]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-inline.c
backport: As described in http://gcc.gnu.org/ml/gcc/2012-08/msg00015.html...
[thirdparty/gcc.git] / gcc / tree-inline.c
CommitLineData
ac534736 1/* Tree inlining.
90dda0e9
JJ
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
3 2012 Free Software Foundation, Inc.
588d3ade
AO
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
5
54a7b573 6This file is part of GCC.
588d3ade 7
54a7b573 8GCC is free software; you can redistribute it and/or modify
588d3ade 9it under the terms of the GNU General Public License as published by
9dcd6f09 10the Free Software Foundation; either version 3, or (at your option)
588d3ade
AO
11any later version.
12
54a7b573 13GCC is distributed in the hope that it will be useful,
588d3ade
AO
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
588d3ade
AO
21
22#include "config.h"
23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
718f9c0f 26#include "diagnostic-core.h"
588d3ade
AO
27#include "tree.h"
28#include "tree-inline.h"
d4e4baa9
AO
29#include "flags.h"
30#include "params.h"
31#include "input.h"
32#include "insn-config.h"
d4e4baa9 33#include "hashtab.h"
d23c55c2 34#include "langhooks.h"
e21aff8a
SB
35#include "basic-block.h"
36#include "tree-iterator.h"
1c4a429a 37#include "cgraph.h"
ddd2d57e 38#include "intl.h"
6de9cd9a 39#include "tree-mudflap.h"
089efaa4 40#include "tree-flow.h"
18c6ada9 41#include "function.h"
e21aff8a 42#include "tree-flow.h"
cf835838 43#include "tree-pretty-print.h"
e21aff8a 44#include "except.h"
1eb3331e 45#include "debug.h"
e21aff8a 46#include "pointer-set.h"
19734dd8 47#include "ipa-prop.h"
6946b3f7 48#include "value-prof.h"
110cfe1c 49#include "tree-pass.h"
18177c7e 50#include "target.h"
d4e4baa9 51
2eb79bbb
SB
52#include "rtl.h" /* FIXME: For asm_str_count. */
53
6de9cd9a
DN
54/* I'm not real happy about this, but we need to handle gimple and
55 non-gimple trees. */
726a989a 56#include "gimple.h"
588d3ade 57
1b369fae 58/* Inlining, Cloning, Versioning, Parallelization
e21aff8a
SB
59
60 Inlining: a function body is duplicated, but the PARM_DECLs are
61 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
726a989a 62 MODIFY_EXPRs that store to a dedicated returned-value variable.
e21aff8a
SB
63 The duplicated eh_region info of the copy will later be appended
64 to the info for the caller; the eh_region info in copied throwing
1d65f45c 65 statements and RESX statements are adjusted accordingly.
e21aff8a 66
e21aff8a
SB
67 Cloning: (only in C++) We have one body for a con/de/structor, and
68 multiple function decls, each with a unique parameter list.
69 Duplicate the body, using the given splay tree; some parameters
70 will become constants (like 0 or 1).
71
1b369fae
RH
72 Versioning: a function body is duplicated and the result is a new
73 function rather than into blocks of an existing function as with
74 inlining. Some parameters will become constants.
75
76 Parallelization: a region of a function is duplicated resulting in
77 a new function. Variables may be replaced with complex expressions
78 to enable shared variable semantics.
79
e21aff8a
SB
80 All of these will simultaneously lookup any callgraph edges. If
81 we're going to inline the duplicated function body, and the given
82 function has some cloned callgraph nodes (one for each place this
83 function will be inlined) those callgraph edges will be duplicated.
1b369fae 84 If we're cloning the body, those callgraph edges will be
e21aff8a
SB
85 updated to point into the new body. (Note that the original
86 callgraph node and edge list will not be altered.)
87
726a989a 88 See the CALL_EXPR handling case in copy_tree_body_r (). */
e21aff8a 89
d4e4baa9
AO
90/* To Do:
91
92 o In order to make inlining-on-trees work, we pessimized
93 function-local static constants. In particular, they are now
94 always output, even when not addressed. Fix this by treating
95 function-local static constants just like global static
96 constants; the back-end already knows not to output them if they
97 are not needed.
98
99 o Provide heuristics to clamp inlining of recursive template
100 calls? */
101
7f9bc51b 102
7f9bc51b
ZD
103/* Weights that estimate_num_insns uses to estimate the size of the
104 produced code. */
105
106eni_weights eni_size_weights;
107
108/* Weights that estimate_num_insns uses to estimate the time necessary
109 to execute the produced code. */
110
111eni_weights eni_time_weights;
112
d4e4baa9
AO
113/* Prototypes. */
114
6938f93f 115static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
1b369fae 116static void remap_block (tree *, copy_body_data *);
1b369fae 117static void copy_bind_expr (tree *, int *, copy_body_data *);
6de9cd9a 118static tree mark_local_for_remap_r (tree *, int *, void *);
19114537 119static void unsave_expr_1 (tree);
6de9cd9a 120static tree unsave_r (tree *, int *, void *);
e21aff8a 121static void declare_inline_vars (tree, tree);
892c7e1e 122static void remap_save_expr (tree *, void *, int *);
4a283090 123static void prepend_lexical_block (tree current_block, tree new_block);
1b369fae 124static tree copy_decl_to_var (tree, copy_body_data *);
c08cd4c1 125static tree copy_result_decl_to_var (tree, copy_body_data *);
1b369fae 126static tree copy_decl_maybe_to_var (tree, copy_body_data *);
726a989a 127static gimple remap_gimple_stmt (gimple, copy_body_data *);
078c3644 128static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
e21aff8a 129
5e20bdd7
JZ
130/* Insert a tree->tree mapping for ID. Despite the name suggests
131 that the trees should be variables, it is used for more than that. */
132
1b369fae
RH
133void
134insert_decl_map (copy_body_data *id, tree key, tree value)
5e20bdd7 135{
6be42dd4 136 *pointer_map_insert (id->decl_map, key) = value;
5e20bdd7
JZ
137
138 /* Always insert an identity map as well. If we see this same new
139 node again, we won't want to duplicate it a second time. */
140 if (key != value)
6be42dd4 141 *pointer_map_insert (id->decl_map, value) = value;
5e20bdd7
JZ
142}
143
b5b8b0ac
AO
144/* Insert a tree->tree mapping for ID. This is only used for
145 variables. */
146
147static void
148insert_debug_decl_map (copy_body_data *id, tree key, tree value)
149{
150 if (!gimple_in_ssa_p (id->src_cfun))
151 return;
152
153 if (!MAY_HAVE_DEBUG_STMTS)
154 return;
155
156 if (!target_for_debug_bind (key))
157 return;
158
159 gcc_assert (TREE_CODE (key) == PARM_DECL);
160 gcc_assert (TREE_CODE (value) == VAR_DECL);
161
162 if (!id->debug_map)
163 id->debug_map = pointer_map_create ();
164
165 *pointer_map_insert (id->debug_map, key) = value;
166}
167
082ab5ff
JJ
168/* If nonzero, we're remapping the contents of inlined debug
169 statements. If negative, an error has occurred, such as a
170 reference to a variable that isn't available in the inlined
171 context. */
172static int processing_debug_stmt = 0;
173
110cfe1c
JH
174/* Construct new SSA name for old NAME. ID is the inline context. */
175
176static tree
177remap_ssa_name (tree name, copy_body_data *id)
178{
70b5e7dc 179 tree new_tree, var;
6be42dd4 180 tree *n;
110cfe1c
JH
181
182 gcc_assert (TREE_CODE (name) == SSA_NAME);
183
6be42dd4 184 n = (tree *) pointer_map_contains (id->decl_map, name);
110cfe1c 185 if (n)
129a37fc 186 return unshare_expr (*n);
110cfe1c 187
082ab5ff
JJ
188 if (processing_debug_stmt)
189 {
67386041
RG
190 if (SSA_NAME_IS_DEFAULT_DEF (name)
191 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
ddb555ed
JJ
192 && id->entry_bb == NULL
193 && single_succ_p (ENTRY_BLOCK_PTR))
194 {
195 tree vexpr = make_node (DEBUG_EXPR_DECL);
196 gimple def_temp;
197 gimple_stmt_iterator gsi;
198 tree val = SSA_NAME_VAR (name);
199
200 n = (tree *) pointer_map_contains (id->decl_map, val);
201 if (n != NULL)
202 val = *n;
203 if (TREE_CODE (val) != PARM_DECL)
204 {
205 processing_debug_stmt = -1;
206 return name;
207 }
208 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
209 DECL_ARTIFICIAL (vexpr) = 1;
210 TREE_TYPE (vexpr) = TREE_TYPE (name);
211 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
212 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
213 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
214 return vexpr;
215 }
216
082ab5ff
JJ
217 processing_debug_stmt = -1;
218 return name;
219 }
220
70b5e7dc
RG
221 /* Remap anonymous SSA names or SSA names of anonymous decls. */
222 var = SSA_NAME_VAR (name);
223 if (!var
224 || (!SSA_NAME_IS_DEFAULT_DEF (name)
225 && TREE_CODE (var) == VAR_DECL
226 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
227 && DECL_ARTIFICIAL (var)
228 && DECL_IGNORED_P (var)
229 && !DECL_NAME (var)))
230 {
231 struct ptr_info_def *pi;
232 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL);
233 if (!var && SSA_NAME_IDENTIFIER (name))
234 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
235 insert_decl_map (id, name, new_tree);
236 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
237 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
238 /* At least IPA points-to info can be directly transferred. */
239 if (id->src_cfun->gimple_df
240 && id->src_cfun->gimple_df->ipa_pta
241 && (pi = SSA_NAME_PTR_INFO (name))
242 && !pi->pt.anything)
243 {
244 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
245 new_pi->pt = pi->pt;
246 }
247 return new_tree;
248 }
249
110cfe1c
JH
250 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
251 in copy_bb. */
70b5e7dc 252 new_tree = remap_decl (var, id);
726a989a 253
110cfe1c 254 /* We might've substituted constant or another SSA_NAME for
b8698a0f 255 the variable.
110cfe1c
JH
256
257 Replace the SSA name representing RESULT_DECL by variable during
258 inlining: this saves us from need to introduce PHI node in a case
259 return value is just partly initialized. */
82d6e6fc 260 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
70b5e7dc
RG
261 && (!SSA_NAME_VAR (name)
262 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
110cfe1c
JH
263 || !id->transform_return_to_modify))
264 {
25a6a873 265 struct ptr_info_def *pi;
82d6e6fc
KG
266 new_tree = make_ssa_name (new_tree, NULL);
267 insert_decl_map (id, name, new_tree);
268 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
110cfe1c 269 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
25a6a873
RG
270 /* At least IPA points-to info can be directly transferred. */
271 if (id->src_cfun->gimple_df
272 && id->src_cfun->gimple_df->ipa_pta
273 && (pi = SSA_NAME_PTR_INFO (name))
274 && !pi->pt.anything)
275 {
276 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
277 new_pi->pt = pi->pt;
278 }
32244553 279 if (SSA_NAME_IS_DEFAULT_DEF (name))
045685a9
JH
280 {
281 /* By inlining function having uninitialized variable, we might
282 extend the lifetime (variable might get reused). This cause
283 ICE in the case we end up extending lifetime of SSA name across
fa10beec 284 abnormal edge, but also increase register pressure.
045685a9 285
726a989a
RB
286 We simply initialize all uninitialized vars by 0 except
287 for case we are inlining to very first BB. We can avoid
288 this for all BBs that are not inside strongly connected
289 regions of the CFG, but this is expensive to test. */
290 if (id->entry_bb
dcad005d 291 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
70b5e7dc
RG
292 && (!SSA_NAME_VAR (name)
293 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
0723b99a 294 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
045685a9
JH
295 || EDGE_COUNT (id->entry_bb->preds) != 1))
296 {
726a989a
RB
297 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
298 gimple init_stmt;
e8160c9a 299 tree zero = build_zero_cst (TREE_TYPE (new_tree));
b8698a0f 300
e8160c9a 301 init_stmt = gimple_build_assign (new_tree, zero);
726a989a 302 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
82d6e6fc 303 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
045685a9
JH
304 }
305 else
306 {
82d6e6fc 307 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
32244553 308 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
045685a9
JH
309 }
310 }
110cfe1c
JH
311 }
312 else
82d6e6fc
KG
313 insert_decl_map (id, name, new_tree);
314 return new_tree;
110cfe1c
JH
315}
316
e21aff8a 317/* Remap DECL during the copying of the BLOCK tree for the function. */
d4e4baa9 318
1b369fae
RH
319tree
320remap_decl (tree decl, copy_body_data *id)
d4e4baa9 321{
6be42dd4 322 tree *n;
e21aff8a
SB
323
324 /* We only remap local variables in the current function. */
3c2a7a6a 325
e21aff8a
SB
326 /* See if we have remapped this declaration. */
327
6be42dd4 328 n = (tree *) pointer_map_contains (id->decl_map, decl);
e21aff8a 329
b5b8b0ac
AO
330 if (!n && processing_debug_stmt)
331 {
332 processing_debug_stmt = -1;
333 return decl;
334 }
335
e21aff8a
SB
336 /* If we didn't already have an equivalent for this declaration,
337 create one now. */
d4e4baa9
AO
338 if (!n)
339 {
d4e4baa9 340 /* Make a copy of the variable or label. */
1b369fae 341 tree t = id->copy_decl (decl, id);
b8698a0f 342
596b98ce
AO
343 /* Remember it, so that if we encounter this local entity again
344 we can reuse this copy. Do this early because remap_type may
345 need this decl for TYPE_STUB_DECL. */
346 insert_decl_map (id, decl, t);
347
1b369fae
RH
348 if (!DECL_P (t))
349 return t;
350
3c2a7a6a
RH
351 /* Remap types, if necessary. */
352 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
353 if (TREE_CODE (t) == TYPE_DECL)
354 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
3c2a7a6a
RH
355
356 /* Remap sizes as necessary. */
726a989a
RB
357 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
358 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
d4e4baa9 359
8c27b7d4 360 /* If fields, do likewise for offset and qualifier. */
5377d5ba
RK
361 if (TREE_CODE (t) == FIELD_DECL)
362 {
726a989a 363 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
5377d5ba 364 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
726a989a 365 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
5377d5ba
RK
366 }
367
5e20bdd7 368 return t;
d4e4baa9
AO
369 }
370
f82a627c
EB
371 if (id->do_not_unshare)
372 return *n;
373 else
374 return unshare_expr (*n);
d4e4baa9
AO
375}
376
3c2a7a6a 377static tree
1b369fae 378remap_type_1 (tree type, copy_body_data *id)
3c2a7a6a 379{
82d6e6fc 380 tree new_tree, t;
3c2a7a6a 381
ed397c43
RK
382 /* We do need a copy. build and register it now. If this is a pointer or
383 reference type, remap the designated type and make a new pointer or
384 reference type. */
385 if (TREE_CODE (type) == POINTER_TYPE)
386 {
82d6e6fc 387 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
ed397c43
RK
388 TYPE_MODE (type),
389 TYPE_REF_CAN_ALIAS_ALL (type));
040c6d51
MM
390 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
391 new_tree = build_type_attribute_qual_variant (new_tree,
392 TYPE_ATTRIBUTES (type),
393 TYPE_QUALS (type));
82d6e6fc
KG
394 insert_decl_map (id, type, new_tree);
395 return new_tree;
ed397c43
RK
396 }
397 else if (TREE_CODE (type) == REFERENCE_TYPE)
398 {
82d6e6fc 399 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
ed397c43
RK
400 TYPE_MODE (type),
401 TYPE_REF_CAN_ALIAS_ALL (type));
040c6d51
MM
402 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
403 new_tree = build_type_attribute_qual_variant (new_tree,
404 TYPE_ATTRIBUTES (type),
405 TYPE_QUALS (type));
82d6e6fc
KG
406 insert_decl_map (id, type, new_tree);
407 return new_tree;
ed397c43
RK
408 }
409 else
82d6e6fc 410 new_tree = copy_node (type);
ed397c43 411
82d6e6fc 412 insert_decl_map (id, type, new_tree);
3c2a7a6a
RH
413
414 /* This is a new type, not a copy of an old type. Need to reassociate
415 variants. We can handle everything except the main variant lazily. */
416 t = TYPE_MAIN_VARIANT (type);
417 if (type != t)
418 {
419 t = remap_type (t, id);
82d6e6fc
KG
420 TYPE_MAIN_VARIANT (new_tree) = t;
421 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
422 TYPE_NEXT_VARIANT (t) = new_tree;
3c2a7a6a
RH
423 }
424 else
425 {
82d6e6fc
KG
426 TYPE_MAIN_VARIANT (new_tree) = new_tree;
427 TYPE_NEXT_VARIANT (new_tree) = NULL;
3c2a7a6a
RH
428 }
429
596b98ce 430 if (TYPE_STUB_DECL (type))
82d6e6fc 431 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
596b98ce 432
3c2a7a6a 433 /* Lazily create pointer and reference types. */
82d6e6fc
KG
434 TYPE_POINTER_TO (new_tree) = NULL;
435 TYPE_REFERENCE_TO (new_tree) = NULL;
3c2a7a6a 436
82d6e6fc 437 switch (TREE_CODE (new_tree))
3c2a7a6a
RH
438 {
439 case INTEGER_TYPE:
440 case REAL_TYPE:
325217ed 441 case FIXED_POINT_TYPE:
3c2a7a6a
RH
442 case ENUMERAL_TYPE:
443 case BOOLEAN_TYPE:
82d6e6fc 444 t = TYPE_MIN_VALUE (new_tree);
3c2a7a6a 445 if (t && TREE_CODE (t) != INTEGER_CST)
82d6e6fc 446 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
1c9766da 447
82d6e6fc 448 t = TYPE_MAX_VALUE (new_tree);
3c2a7a6a 449 if (t && TREE_CODE (t) != INTEGER_CST)
82d6e6fc
KG
450 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
451 return new_tree;
9f63daea 452
3c2a7a6a 453 case FUNCTION_TYPE:
82d6e6fc
KG
454 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
455 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
456 return new_tree;
3c2a7a6a
RH
457
458 case ARRAY_TYPE:
82d6e6fc
KG
459 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
460 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
3c2a7a6a
RH
461 break;
462
463 case RECORD_TYPE:
464 case UNION_TYPE:
465 case QUAL_UNION_TYPE:
52dd234b
RH
466 {
467 tree f, nf = NULL;
468
910ad8de 469 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
52dd234b
RH
470 {
471 t = remap_decl (f, id);
82d6e6fc 472 DECL_CONTEXT (t) = new_tree;
910ad8de 473 DECL_CHAIN (t) = nf;
52dd234b
RH
474 nf = t;
475 }
82d6e6fc 476 TYPE_FIELDS (new_tree) = nreverse (nf);
52dd234b 477 }
3c2a7a6a
RH
478 break;
479
3c2a7a6a
RH
480 case OFFSET_TYPE:
481 default:
482 /* Shouldn't have been thought variable sized. */
1e128c5f 483 gcc_unreachable ();
3c2a7a6a
RH
484 }
485
82d6e6fc
KG
486 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
487 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
3c2a7a6a 488
82d6e6fc 489 return new_tree;
3c2a7a6a
RH
490}
491
1b369fae
RH
492tree
493remap_type (tree type, copy_body_data *id)
52dd234b 494{
6be42dd4 495 tree *node;
4f5c64b8 496 tree tmp;
52dd234b
RH
497
498 if (type == NULL)
499 return type;
500
501 /* See if we have remapped this type. */
6be42dd4 502 node = (tree *) pointer_map_contains (id->decl_map, type);
52dd234b 503 if (node)
6be42dd4 504 return *node;
52dd234b
RH
505
506 /* The type only needs remapping if it's variably modified. */
1b369fae 507 if (! variably_modified_type_p (type, id->src_fn))
52dd234b
RH
508 {
509 insert_decl_map (id, type, type);
510 return type;
511 }
512
4f5c64b8
RG
513 id->remapping_type_depth++;
514 tmp = remap_type_1 (type, id);
515 id->remapping_type_depth--;
516
517 return tmp;
52dd234b
RH
518}
519
526d73ab 520/* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
b8698a0f 521
526d73ab
JH
522static bool
523can_be_nonlocal (tree decl, copy_body_data *id)
524{
525 /* We can not duplicate function decls. */
526 if (TREE_CODE (decl) == FUNCTION_DECL)
527 return true;
528
529 /* Local static vars must be non-local or we get multiple declaration
530 problems. */
531 if (TREE_CODE (decl) == VAR_DECL
532 && !auto_var_in_fn_p (decl, id->src_fn))
533 return true;
534
5f564b8f 535 return false;
526d73ab
JH
536}
537
6de9cd9a 538static tree
526d73ab 539remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
d4e4baa9 540{
6de9cd9a
DN
541 tree old_var;
542 tree new_decls = NULL_TREE;
d4e4baa9 543
6de9cd9a 544 /* Remap its variables. */
910ad8de 545 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
d4e4baa9 546 {
6de9cd9a
DN
547 tree new_var;
548
526d73ab 549 if (can_be_nonlocal (old_var, id))
30be951a 550 {
5f564b8f
MM
551 /* We need to add this variable to the local decls as otherwise
552 nothing else will do so. */
526d73ab 553 if (TREE_CODE (old_var) == VAR_DECL
5f564b8f 554 && ! DECL_EXTERNAL (old_var))
c021f10b 555 add_local_decl (cfun, old_var);
9e6aced0 556 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
526d73ab
JH
557 && !DECL_IGNORED_P (old_var)
558 && nonlocalized_list)
70235ab9 559 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
30be951a
JH
560 continue;
561 }
562
6de9cd9a
DN
563 /* Remap the variable. */
564 new_var = remap_decl (old_var, id);
565
726a989a 566 /* If we didn't remap this variable, we can't mess with its
6de9cd9a
DN
567 TREE_CHAIN. If we remapped this variable to the return slot, it's
568 already declared somewhere else, so don't declare it here. */
b8698a0f 569
526d73ab 570 if (new_var == id->retvar)
6de9cd9a 571 ;
526d73ab
JH
572 else if (!new_var)
573 {
9e6aced0 574 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
526d73ab
JH
575 && !DECL_IGNORED_P (old_var)
576 && nonlocalized_list)
70235ab9 577 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
526d73ab 578 }
d4e4baa9
AO
579 else
580 {
1e128c5f 581 gcc_assert (DECL_P (new_var));
910ad8de 582 DECL_CHAIN (new_var) = new_decls;
6de9cd9a 583 new_decls = new_var;
60a5d78a
JJ
584
585 /* Also copy value-expressions. */
586 if (TREE_CODE (new_var) == VAR_DECL
587 && DECL_HAS_VALUE_EXPR_P (new_var))
588 {
589 tree tem = DECL_VALUE_EXPR (new_var);
590 bool old_regimplify = id->regimplify;
591 id->remapping_type_depth++;
592 walk_tree (&tem, copy_tree_body_r, id, NULL);
593 id->remapping_type_depth--;
594 id->regimplify = old_regimplify;
595 SET_DECL_VALUE_EXPR (new_var, tem);
596 }
d4e4baa9 597 }
d4e4baa9 598 }
d4e4baa9 599
6de9cd9a
DN
600 return nreverse (new_decls);
601}
602
603/* Copy the BLOCK to contain remapped versions of the variables
604 therein. And hook the new block into the block-tree. */
605
606static void
1b369fae 607remap_block (tree *block, copy_body_data *id)
6de9cd9a 608{
d436bff8
AH
609 tree old_block;
610 tree new_block;
d436bff8
AH
611
612 /* Make the new block. */
613 old_block = *block;
614 new_block = make_node (BLOCK);
615 TREE_USED (new_block) = TREE_USED (old_block);
616 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
3e2844cb 617 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
526d73ab
JH
618 BLOCK_NONLOCALIZED_VARS (new_block)
619 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
d436bff8
AH
620 *block = new_block;
621
622 /* Remap its variables. */
526d73ab
JH
623 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
624 &BLOCK_NONLOCALIZED_VARS (new_block),
625 id);
d436bff8 626
1b369fae 627 if (id->transform_lang_insert_block)
9ff420f1 628 id->transform_lang_insert_block (new_block);
1b369fae 629
d436bff8 630 /* Remember the remapped block. */
6de9cd9a 631 insert_decl_map (id, old_block, new_block);
d4e4baa9
AO
632}
633
acb8f212
JH
634/* Copy the whole block tree and root it in id->block. */
635static tree
1b369fae 636remap_blocks (tree block, copy_body_data *id)
acb8f212
JH
637{
638 tree t;
82d6e6fc 639 tree new_tree = block;
acb8f212
JH
640
641 if (!block)
642 return NULL;
643
82d6e6fc
KG
644 remap_block (&new_tree, id);
645 gcc_assert (new_tree != block);
acb8f212 646 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
4a283090
JH
647 prepend_lexical_block (new_tree, remap_blocks (t, id));
648 /* Blocks are in arbitrary order, but make things slightly prettier and do
649 not swap order when producing a copy. */
650 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
82d6e6fc 651 return new_tree;
acb8f212
JH
652}
653
d4e4baa9 654static void
6de9cd9a 655copy_statement_list (tree *tp)
d4e4baa9 656{
6de9cd9a 657 tree_stmt_iterator oi, ni;
82d6e6fc 658 tree new_tree;
6de9cd9a 659
82d6e6fc
KG
660 new_tree = alloc_stmt_list ();
661 ni = tsi_start (new_tree);
6de9cd9a 662 oi = tsi_start (*tp);
b1d82db0 663 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
82d6e6fc 664 *tp = new_tree;
6de9cd9a
DN
665
666 for (; !tsi_end_p (oi); tsi_next (&oi))
a406865a
RG
667 {
668 tree stmt = tsi_stmt (oi);
62e36382
JM
669 if (TREE_CODE (stmt) == STATEMENT_LIST)
670 /* This copy is not redundant; tsi_link_after will smash this
671 STATEMENT_LIST into the end of the one we're building, and we
672 don't want to do that with the original. */
673 copy_statement_list (&stmt);
a406865a
RG
674 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
675 }
6de9cd9a 676}
d4e4baa9 677
6de9cd9a 678static void
1b369fae 679copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
6de9cd9a
DN
680{
681 tree block = BIND_EXPR_BLOCK (*tp);
d4e4baa9
AO
682 /* Copy (and replace) the statement. */
683 copy_tree_r (tp, walk_subtrees, NULL);
6de9cd9a
DN
684 if (block)
685 {
686 remap_block (&block, id);
687 BIND_EXPR_BLOCK (*tp) = block;
688 }
d4e4baa9 689
6de9cd9a 690 if (BIND_EXPR_VARS (*tp))
60a5d78a
JJ
691 /* This will remap a lot of the same decls again, but this should be
692 harmless. */
693 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
d4e4baa9
AO
694}
695
726a989a
RB
696
697/* Create a new gimple_seq by remapping all the statements in BODY
698 using the inlining information in ID. */
699
b34fd25c 700static gimple_seq
726a989a
RB
701remap_gimple_seq (gimple_seq body, copy_body_data *id)
702{
703 gimple_stmt_iterator si;
704 gimple_seq new_body = NULL;
705
706 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
707 {
708 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
709 gimple_seq_add_stmt (&new_body, new_stmt);
710 }
711
712 return new_body;
713}
714
715
716/* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
717 block using the mapping information in ID. */
718
719static gimple
720copy_gimple_bind (gimple stmt, copy_body_data *id)
721{
722 gimple new_bind;
723 tree new_block, new_vars;
724 gimple_seq body, new_body;
725
726 /* Copy the statement. Note that we purposely don't use copy_stmt
727 here because we need to remap statements as we copy. */
728 body = gimple_bind_body (stmt);
729 new_body = remap_gimple_seq (body, id);
730
731 new_block = gimple_bind_block (stmt);
732 if (new_block)
733 remap_block (&new_block, id);
734
735 /* This will remap a lot of the same decls again, but this should be
736 harmless. */
737 new_vars = gimple_bind_vars (stmt);
738 if (new_vars)
526d73ab 739 new_vars = remap_decls (new_vars, NULL, id);
726a989a
RB
740
741 new_bind = gimple_build_bind (new_vars, new_body, new_block);
742
743 return new_bind;
744}
745
746
747/* Remap the GIMPLE operand pointed to by *TP. DATA is really a
748 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
749 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
750 recursing into the children nodes of *TP. */
751
752static tree
753remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
754{
755 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
756 copy_body_data *id = (copy_body_data *) wi_p->info;
757 tree fn = id->src_fn;
758
759 if (TREE_CODE (*tp) == SSA_NAME)
760 {
761 *tp = remap_ssa_name (*tp, id);
762 *walk_subtrees = 0;
763 return NULL;
764 }
765 else if (auto_var_in_fn_p (*tp, fn))
766 {
767 /* Local variables and labels need to be replaced by equivalent
768 variables. We don't want to copy static variables; there's
769 only one of those, no matter how many times we inline the
770 containing function. Similarly for globals from an outer
771 function. */
772 tree new_decl;
773
774 /* Remap the declaration. */
775 new_decl = remap_decl (*tp, id);
776 gcc_assert (new_decl);
777 /* Replace this variable with the copy. */
778 STRIP_TYPE_NOPS (new_decl);
211ca15c
RG
779 /* ??? The C++ frontend uses void * pointer zero to initialize
780 any other type. This confuses the middle-end type verification.
781 As cloned bodies do not go through gimplification again the fixup
782 there doesn't trigger. */
783 if (TREE_CODE (new_decl) == INTEGER_CST
784 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
785 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
726a989a
RB
786 *tp = new_decl;
787 *walk_subtrees = 0;
788 }
789 else if (TREE_CODE (*tp) == STATEMENT_LIST)
790 gcc_unreachable ();
791 else if (TREE_CODE (*tp) == SAVE_EXPR)
792 gcc_unreachable ();
793 else if (TREE_CODE (*tp) == LABEL_DECL
794 && (!DECL_CONTEXT (*tp)
795 || decl_function_context (*tp) == id->src_fn))
796 /* These may need to be remapped for EH handling. */
797 *tp = remap_decl (*tp, id);
37c59e69
EB
798 else if (TREE_CODE (*tp) == FIELD_DECL)
799 {
800 /* If the enclosing record type is variably_modified_type_p, the field
801 has already been remapped. Otherwise, it need not be. */
802 tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
803 if (n)
804 *tp = *n;
805 *walk_subtrees = 0;
806 }
726a989a
RB
807 else if (TYPE_P (*tp))
808 /* Types may need remapping as well. */
809 *tp = remap_type (*tp, id);
810 else if (CONSTANT_CLASS_P (*tp))
811 {
812 /* If this is a constant, we have to copy the node iff the type
813 will be remapped. copy_tree_r will not copy a constant. */
814 tree new_type = remap_type (TREE_TYPE (*tp), id);
815
816 if (new_type == TREE_TYPE (*tp))
817 *walk_subtrees = 0;
818
819 else if (TREE_CODE (*tp) == INTEGER_CST)
820 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
821 TREE_INT_CST_HIGH (*tp));
822 else
823 {
824 *tp = copy_node (*tp);
825 TREE_TYPE (*tp) = new_type;
826 }
827 }
828 else
829 {
830 /* Otherwise, just copy the node. Note that copy_tree_r already
831 knows not to copy VAR_DECLs, etc., so this is safe. */
41a58a92
RG
832
833 /* We should never have TREE_BLOCK set on non-statements. */
834 if (EXPR_P (*tp))
835 gcc_assert (!TREE_BLOCK (*tp));
836
70f34814 837 if (TREE_CODE (*tp) == MEM_REF)
726a989a 838 {
93e452ed 839 tree ptr = TREE_OPERAND (*tp, 0);
41a58a92 840 tree type = remap_type (TREE_TYPE (*tp), id);
93e452ed 841 tree old = *tp;
93e452ed 842
70f34814 843 /* We need to re-canonicalize MEM_REFs from inline substitutions
93e452ed
RG
844 that can happen when a pointer argument is an ADDR_EXPR.
845 Recurse here manually to allow that. */
846 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
315f5f1b
RG
847 *tp = fold_build2 (MEM_REF, type,
848 ptr, TREE_OPERAND (*tp, 1));
849 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
0de204de 850 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
93e452ed
RG
851 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
852 *walk_subtrees = 0;
853 return NULL;
726a989a
RB
854 }
855
856 /* Here is the "usual case". Copy this tree node, and then
857 tweak some special cases. */
858 copy_tree_r (tp, walk_subtrees, NULL);
859
41a58a92
RG
860 if (TREE_CODE (*tp) != OMP_CLAUSE)
861 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
862
726a989a
RB
863 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
864 {
865 /* The copied TARGET_EXPR has never been expanded, even if the
866 original node was expanded already. */
867 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
868 TREE_OPERAND (*tp, 3) = NULL_TREE;
869 }
870 else if (TREE_CODE (*tp) == ADDR_EXPR)
871 {
872 /* Variable substitution need not be simple. In particular,
70f34814 873 the MEM_REF substitution above. Make sure that
726a989a
RB
874 TREE_CONSTANT and friends are up-to-date. But make sure
875 to not improperly set TREE_BLOCK on some sub-expressions. */
876 int invariant = is_gimple_min_invariant (*tp);
877 tree block = id->block;
878 id->block = NULL_TREE;
f1071b12 879 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
726a989a 880 id->block = block;
70f34814 881 recompute_tree_invariant_for_addr_expr (*tp);
726a989a
RB
882
883 /* If this used to be invariant, but is not any longer,
884 then regimplification is probably needed. */
885 if (invariant && !is_gimple_min_invariant (*tp))
886 id->regimplify = true;
887
888 *walk_subtrees = 0;
889 }
890 }
891
892 /* Keep iterating. */
893 return NULL_TREE;
894}
895
896
897/* Called from copy_body_id via walk_tree. DATA is really a
1b369fae 898 `copy_body_data *'. */
aa4a53af 899
1b369fae 900tree
726a989a 901copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
d4e4baa9 902{
1b369fae
RH
903 copy_body_data *id = (copy_body_data *) data;
904 tree fn = id->src_fn;
acb8f212 905 tree new_block;
d4e4baa9 906
e21aff8a
SB
907 /* Begin by recognizing trees that we'll completely rewrite for the
908 inlining context. Our output for these trees is completely
909 different from out input (e.g. RETURN_EXPR is deleted, and morphs
910 into an edge). Further down, we'll handle trees that get
911 duplicated and/or tweaked. */
d4e4baa9 912
1b369fae 913 /* When requested, RETURN_EXPRs should be transformed to just the
726a989a 914 contained MODIFY_EXPR. The branch semantics of the return will
1b369fae
RH
915 be handled elsewhere by manipulating the CFG rather than a statement. */
916 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
d4e4baa9 917 {
e21aff8a 918 tree assignment = TREE_OPERAND (*tp, 0);
d4e4baa9
AO
919
920 /* If we're returning something, just turn that into an
e21aff8a
SB
921 assignment into the equivalent of the original RESULT_DECL.
922 If the "assignment" is just the result decl, the result
923 decl has already been set (e.g. a recent "foo (&result_decl,
924 ...)"); just toss the entire RETURN_EXPR. */
726a989a 925 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
e21aff8a
SB
926 {
927 /* Replace the RETURN_EXPR with (a copy of) the
726a989a 928 MODIFY_EXPR hanging underneath. */
e21aff8a
SB
929 *tp = copy_node (assignment);
930 }
931 else /* Else the RETURN_EXPR returns no value. */
932 {
933 *tp = NULL;
cceb1885 934 return (tree) (void *)1;
e21aff8a 935 }
d4e4baa9 936 }
110cfe1c
JH
937 else if (TREE_CODE (*tp) == SSA_NAME)
938 {
939 *tp = remap_ssa_name (*tp, id);
940 *walk_subtrees = 0;
941 return NULL;
942 }
e21aff8a 943
d4e4baa9
AO
944 /* Local variables and labels need to be replaced by equivalent
945 variables. We don't want to copy static variables; there's only
946 one of those, no matter how many times we inline the containing
5377d5ba 947 function. Similarly for globals from an outer function. */
50886bf1 948 else if (auto_var_in_fn_p (*tp, fn))
d4e4baa9
AO
949 {
950 tree new_decl;
951
952 /* Remap the declaration. */
953 new_decl = remap_decl (*tp, id);
1e128c5f 954 gcc_assert (new_decl);
d4e4baa9
AO
955 /* Replace this variable with the copy. */
956 STRIP_TYPE_NOPS (new_decl);
957 *tp = new_decl;
e4cf29ae 958 *walk_subtrees = 0;
d4e4baa9 959 }
6de9cd9a
DN
960 else if (TREE_CODE (*tp) == STATEMENT_LIST)
961 copy_statement_list (tp);
a406865a
RG
962 else if (TREE_CODE (*tp) == SAVE_EXPR
963 || TREE_CODE (*tp) == TARGET_EXPR)
82c82743 964 remap_save_expr (tp, id->decl_map, walk_subtrees);
17acc01a
JH
965 else if (TREE_CODE (*tp) == LABEL_DECL
966 && (! DECL_CONTEXT (*tp)
1b369fae 967 || decl_function_context (*tp) == id->src_fn))
e21aff8a 968 /* These may need to be remapped for EH handling. */
17acc01a 969 *tp = remap_decl (*tp, id);
6de9cd9a
DN
970 else if (TREE_CODE (*tp) == BIND_EXPR)
971 copy_bind_expr (tp, walk_subtrees, id);
3c2a7a6a
RH
972 /* Types may need remapping as well. */
973 else if (TYPE_P (*tp))
974 *tp = remap_type (*tp, id);
975
bb04998a
RK
976 /* If this is a constant, we have to copy the node iff the type will be
977 remapped. copy_tree_r will not copy a constant. */
3cf11075 978 else if (CONSTANT_CLASS_P (*tp))
bb04998a
RK
979 {
980 tree new_type = remap_type (TREE_TYPE (*tp), id);
981
982 if (new_type == TREE_TYPE (*tp))
983 *walk_subtrees = 0;
984
985 else if (TREE_CODE (*tp) == INTEGER_CST)
986 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
987 TREE_INT_CST_HIGH (*tp));
988 else
989 {
990 *tp = copy_node (*tp);
991 TREE_TYPE (*tp) = new_type;
992 }
993 }
994
d4e4baa9
AO
995 /* Otherwise, just copy the node. Note that copy_tree_r already
996 knows not to copy VAR_DECLs, etc., so this is safe. */
997 else
998 {
e21aff8a
SB
999 /* Here we handle trees that are not completely rewritten.
1000 First we detect some inlining-induced bogosities for
1001 discarding. */
726a989a
RB
1002 if (TREE_CODE (*tp) == MODIFY_EXPR
1003 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1004 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
d4e4baa9
AO
1005 {
1006 /* Some assignments VAR = VAR; don't generate any rtl code
1007 and thus don't count as variable modification. Avoid
1008 keeping bogosities like 0 = 0. */
726a989a 1009 tree decl = TREE_OPERAND (*tp, 0), value;
6be42dd4 1010 tree *n;
d4e4baa9 1011
6be42dd4 1012 n = (tree *) pointer_map_contains (id->decl_map, decl);
d4e4baa9
AO
1013 if (n)
1014 {
6be42dd4 1015 value = *n;
d4e4baa9 1016 STRIP_TYPE_NOPS (value);
becfd6e5 1017 if (TREE_CONSTANT (value) || TREE_READONLY (value))
68594ce7 1018 {
c2255bc4 1019 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
726a989a 1020 return copy_tree_body_r (tp, walk_subtrees, data);
68594ce7 1021 }
d4e4baa9
AO
1022 }
1023 }
1b369fae 1024 else if (TREE_CODE (*tp) == INDIRECT_REF)
6de9cd9a
DN
1025 {
1026 /* Get rid of *& from inline substitutions that can happen when a
1027 pointer argument is an ADDR_EXPR. */
81cfbbc2 1028 tree decl = TREE_OPERAND (*tp, 0);
6be42dd4 1029 tree *n;
6de9cd9a 1030
6be42dd4 1031 n = (tree *) pointer_map_contains (id->decl_map, decl);
6de9cd9a
DN
1032 if (n)
1033 {
82d6e6fc 1034 tree new_tree;
d84b37b0 1035 tree old;
30d2e943
RG
1036 /* If we happen to get an ADDR_EXPR in n->value, strip
1037 it manually here as we'll eventually get ADDR_EXPRs
1038 which lie about their types pointed to. In this case
1039 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
095ecc24
RG
1040 but we absolutely rely on that. As fold_indirect_ref
1041 does other useful transformations, try that first, though. */
6be42dd4 1042 tree type = TREE_TYPE (TREE_TYPE (*n));
f82a627c
EB
1043 if (id->do_not_unshare)
1044 new_tree = *n;
1045 else
1046 new_tree = unshare_expr (*n);
d84b37b0 1047 old = *tp;
82d6e6fc 1048 *tp = gimple_fold_indirect_ref (new_tree);
095ecc24
RG
1049 if (! *tp)
1050 {
82d6e6fc 1051 if (TREE_CODE (new_tree) == ADDR_EXPR)
de4af523 1052 {
db3927fb
AH
1053 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1054 type, new_tree);
de4af523
JJ
1055 /* ??? We should either assert here or build
1056 a VIEW_CONVERT_EXPR instead of blindly leaking
1057 incompatible types to our IL. */
1058 if (! *tp)
82d6e6fc 1059 *tp = TREE_OPERAND (new_tree, 0);
de4af523 1060 }
095ecc24 1061 else
d84b37b0 1062 {
82d6e6fc 1063 *tp = build1 (INDIRECT_REF, type, new_tree);
d84b37b0 1064 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
955f6531 1065 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
a61c3633
EB
1066 TREE_READONLY (*tp) = TREE_READONLY (old);
1067 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
d84b37b0 1068 }
095ecc24 1069 }
81cfbbc2
JH
1070 *walk_subtrees = 0;
1071 return NULL;
68594ce7
JM
1072 }
1073 }
70f34814
RG
1074 else if (TREE_CODE (*tp) == MEM_REF)
1075 {
1076 /* We need to re-canonicalize MEM_REFs from inline substitutions
1077 that can happen when a pointer argument is an ADDR_EXPR. */
1078 tree decl = TREE_OPERAND (*tp, 0);
1079 tree *n;
1080
1081 n = (tree *) pointer_map_contains (id->decl_map, decl);
1082 if (n)
1083 {
1084 tree old = *tp;
1085 *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp),
1086 unshare_expr (*n), TREE_OPERAND (*tp, 1));
1087 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1088 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1089 *walk_subtrees = 0;
1090 return NULL;
1091 }
1092 }
68594ce7 1093
e21aff8a
SB
1094 /* Here is the "usual case". Copy this tree node, and then
1095 tweak some special cases. */
1b369fae 1096 copy_tree_r (tp, walk_subtrees, NULL);
110cfe1c 1097
acb8f212
JH
1098 /* If EXPR has block defined, map it to newly constructed block.
1099 When inlining we want EXPRs without block appear in the block
ee0192a2 1100 of function call if we are not remapping a type. */
726a989a 1101 if (EXPR_P (*tp))
acb8f212 1102 {
ee0192a2 1103 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
acb8f212
JH
1104 if (TREE_BLOCK (*tp))
1105 {
6be42dd4
RG
1106 tree *n;
1107 n = (tree *) pointer_map_contains (id->decl_map,
1108 TREE_BLOCK (*tp));
60a5d78a
JJ
1109 gcc_assert (n || id->remapping_type_depth != 0);
1110 if (n)
1111 new_block = *n;
acb8f212
JH
1112 }
1113 TREE_BLOCK (*tp) = new_block;
1114 }
68594ce7 1115
726a989a 1116 if (TREE_CODE (*tp) != OMP_CLAUSE)
07beea0d 1117 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
3c2a7a6a 1118
68594ce7
JM
1119 /* The copied TARGET_EXPR has never been expanded, even if the
1120 original node was expanded already. */
1121 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1122 {
1123 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1124 TREE_OPERAND (*tp, 3) = NULL_TREE;
1125 }
84cce55d
RH
1126
1127 /* Variable substitution need not be simple. In particular, the
1128 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1129 and friends are up-to-date. */
1130 else if (TREE_CODE (*tp) == ADDR_EXPR)
1131 {
ad6003f2 1132 int invariant = is_gimple_min_invariant (*tp);
726a989a
RB
1133 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1134
8e85fd14
RG
1135 /* Handle the case where we substituted an INDIRECT_REF
1136 into the operand of the ADDR_EXPR. */
1137 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1138 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1139 else
1140 recompute_tree_invariant_for_addr_expr (*tp);
726a989a 1141
416c991f
JJ
1142 /* If this used to be invariant, but is not any longer,
1143 then regimplification is probably needed. */
ad6003f2 1144 if (invariant && !is_gimple_min_invariant (*tp))
416c991f 1145 id->regimplify = true;
726a989a 1146
84cce55d
RH
1147 *walk_subtrees = 0;
1148 }
d4e4baa9
AO
1149 }
1150
1151 /* Keep iterating. */
1152 return NULL_TREE;
1153}
1154
1d65f45c
RH
1155/* Helper for remap_gimple_stmt. Given an EH region number for the
1156 source function, map that to the duplicate EH region number in
1157 the destination function. */
1158
1159static int
1160remap_eh_region_nr (int old_nr, copy_body_data *id)
1161{
1162 eh_region old_r, new_r;
1163 void **slot;
1164
1165 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1166 slot = pointer_map_contains (id->eh_map, old_r);
1167 new_r = (eh_region) *slot;
1168
1169 return new_r->index;
1170}
1171
1172/* Similar, but operate on INTEGER_CSTs. */
1173
1174static tree
1175remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1176{
1177 int old_nr, new_nr;
1178
1179 old_nr = tree_low_cst (old_t_nr, 0);
1180 new_nr = remap_eh_region_nr (old_nr, id);
1181
9f616812 1182 return build_int_cst (integer_type_node, new_nr);
1d65f45c 1183}
726a989a
RB
1184
1185/* Helper for copy_bb. Remap statement STMT using the inlining
1186 information in ID. Return the new statement copy. */
1187
1188static gimple
1189remap_gimple_stmt (gimple stmt, copy_body_data *id)
1190{
1191 gimple copy = NULL;
1192 struct walk_stmt_info wi;
1193 tree new_block;
5a6e26b7 1194 bool skip_first = false;
726a989a
RB
1195
1196 /* Begin by recognizing trees that we'll completely rewrite for the
1197 inlining context. Our output for these trees is completely
1198 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1199 into an edge). Further down, we'll handle trees that get
1200 duplicated and/or tweaked. */
1201
1202 /* When requested, GIMPLE_RETURNs should be transformed to just the
1203 contained GIMPLE_ASSIGN. The branch semantics of the return will
1204 be handled elsewhere by manipulating the CFG rather than the
1205 statement. */
1206 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1207 {
1208 tree retval = gimple_return_retval (stmt);
1209
1210 /* If we're returning something, just turn that into an
1211 assignment into the equivalent of the original RESULT_DECL.
1212 If RETVAL is just the result decl, the result decl has
1213 already been set (e.g. a recent "foo (&result_decl, ...)");
1214 just toss the entire GIMPLE_RETURN. */
6938f93f
JH
1215 if (retval
1216 && (TREE_CODE (retval) != RESULT_DECL
1217 && (TREE_CODE (retval) != SSA_NAME
70b5e7dc 1218 || ! SSA_NAME_VAR (retval)
6938f93f 1219 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
5a6e26b7
JH
1220 {
1221 copy = gimple_build_assign (id->retvar, retval);
1222 /* id->retvar is already substituted. Skip it on later remapping. */
1223 skip_first = true;
1224 }
726a989a
RB
1225 else
1226 return gimple_build_nop ();
1227 }
1228 else if (gimple_has_substatements (stmt))
1229 {
1230 gimple_seq s1, s2;
1231
1232 /* When cloning bodies from the C++ front end, we will be handed bodies
1233 in High GIMPLE form. Handle here all the High GIMPLE statements that
1234 have embedded statements. */
1235 switch (gimple_code (stmt))
1236 {
1237 case GIMPLE_BIND:
1238 copy = copy_gimple_bind (stmt, id);
1239 break;
1240
1241 case GIMPLE_CATCH:
1242 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1243 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1244 break;
1245
1246 case GIMPLE_EH_FILTER:
1247 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1248 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1249 break;
1250
1251 case GIMPLE_TRY:
1252 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1253 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
b8698a0f 1254 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
726a989a
RB
1255 break;
1256
1257 case GIMPLE_WITH_CLEANUP_EXPR:
1258 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1259 copy = gimple_build_wce (s1);
1260 break;
1261
1262 case GIMPLE_OMP_PARALLEL:
1263 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1264 copy = gimple_build_omp_parallel
1265 (s1,
1266 gimple_omp_parallel_clauses (stmt),
1267 gimple_omp_parallel_child_fn (stmt),
1268 gimple_omp_parallel_data_arg (stmt));
1269 break;
1270
1271 case GIMPLE_OMP_TASK:
1272 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1273 copy = gimple_build_omp_task
1274 (s1,
1275 gimple_omp_task_clauses (stmt),
1276 gimple_omp_task_child_fn (stmt),
1277 gimple_omp_task_data_arg (stmt),
1278 gimple_omp_task_copy_fn (stmt),
1279 gimple_omp_task_arg_size (stmt),
1280 gimple_omp_task_arg_align (stmt));
1281 break;
1282
1283 case GIMPLE_OMP_FOR:
1284 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1285 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1286 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1287 gimple_omp_for_collapse (stmt), s2);
1288 {
1289 size_t i;
1290 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1291 {
1292 gimple_omp_for_set_index (copy, i,
1293 gimple_omp_for_index (stmt, i));
1294 gimple_omp_for_set_initial (copy, i,
1295 gimple_omp_for_initial (stmt, i));
1296 gimple_omp_for_set_final (copy, i,
1297 gimple_omp_for_final (stmt, i));
1298 gimple_omp_for_set_incr (copy, i,
1299 gimple_omp_for_incr (stmt, i));
1300 gimple_omp_for_set_cond (copy, i,
1301 gimple_omp_for_cond (stmt, i));
1302 }
1303 }
1304 break;
1305
1306 case GIMPLE_OMP_MASTER:
1307 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1308 copy = gimple_build_omp_master (s1);
1309 break;
1310
1311 case GIMPLE_OMP_ORDERED:
1312 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1313 copy = gimple_build_omp_ordered (s1);
1314 break;
1315
1316 case GIMPLE_OMP_SECTION:
1317 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1318 copy = gimple_build_omp_section (s1);
1319 break;
1320
1321 case GIMPLE_OMP_SECTIONS:
1322 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1323 copy = gimple_build_omp_sections
1324 (s1, gimple_omp_sections_clauses (stmt));
1325 break;
1326
1327 case GIMPLE_OMP_SINGLE:
1328 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1329 copy = gimple_build_omp_single
1330 (s1, gimple_omp_single_clauses (stmt));
1331 break;
1332
05a26161
JJ
1333 case GIMPLE_OMP_CRITICAL:
1334 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1335 copy
1336 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1337 break;
1338
0a35513e
AH
1339 case GIMPLE_TRANSACTION:
1340 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1341 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1342 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1343 break;
1344
726a989a
RB
1345 default:
1346 gcc_unreachable ();
1347 }
1348 }
1349 else
1350 {
1351 if (gimple_assign_copy_p (stmt)
1352 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1353 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1354 {
1355 /* Here we handle statements that are not completely rewritten.
1356 First we detect some inlining-induced bogosities for
1357 discarding. */
1358
1359 /* Some assignments VAR = VAR; don't generate any rtl code
1360 and thus don't count as variable modification. Avoid
1361 keeping bogosities like 0 = 0. */
1362 tree decl = gimple_assign_lhs (stmt), value;
1363 tree *n;
1364
1365 n = (tree *) pointer_map_contains (id->decl_map, decl);
1366 if (n)
1367 {
1368 value = *n;
1369 STRIP_TYPE_NOPS (value);
1370 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1371 return gimple_build_nop ();
1372 }
1373 }
1374
b5b8b0ac
AO
1375 if (gimple_debug_bind_p (stmt))
1376 {
1377 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1378 gimple_debug_bind_get_value (stmt),
1379 stmt);
1380 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1381 return copy;
1382 }
ddb555ed
JJ
1383 if (gimple_debug_source_bind_p (stmt))
1384 {
1385 copy = gimple_build_debug_source_bind
1386 (gimple_debug_source_bind_get_var (stmt),
1387 gimple_debug_source_bind_get_value (stmt), stmt);
1388 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1389 return copy;
1390 }
1d65f45c
RH
1391
1392 /* Create a new deep copy of the statement. */
1393 copy = gimple_copy (stmt);
1394
1395 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1396 RESX and EH_DISPATCH. */
1397 if (id->eh_map)
1398 switch (gimple_code (copy))
1399 {
1400 case GIMPLE_CALL:
1401 {
1402 tree r, fndecl = gimple_call_fndecl (copy);
1403 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1404 switch (DECL_FUNCTION_CODE (fndecl))
1405 {
1406 case BUILT_IN_EH_COPY_VALUES:
1407 r = gimple_call_arg (copy, 1);
1408 r = remap_eh_region_tree_nr (r, id);
1409 gimple_call_set_arg (copy, 1, r);
1410 /* FALLTHRU */
1411
1412 case BUILT_IN_EH_POINTER:
1413 case BUILT_IN_EH_FILTER:
1414 r = gimple_call_arg (copy, 0);
1415 r = remap_eh_region_tree_nr (r, id);
1416 gimple_call_set_arg (copy, 0, r);
1417 break;
1418
1419 default:
1420 break;
1421 }
d086d311 1422
25a6a873
RG
1423 /* Reset alias info if we didn't apply measures to
1424 keep it valid over inlining by setting DECL_PT_UID. */
1425 if (!id->src_cfun->gimple_df
1426 || !id->src_cfun->gimple_df->ipa_pta)
1427 gimple_call_reset_alias_info (copy);
1d65f45c
RH
1428 }
1429 break;
1430
1431 case GIMPLE_RESX:
1432 {
1433 int r = gimple_resx_region (copy);
1434 r = remap_eh_region_nr (r, id);
1435 gimple_resx_set_region (copy, r);
1436 }
1437 break;
1438
1439 case GIMPLE_EH_DISPATCH:
1440 {
1441 int r = gimple_eh_dispatch_region (copy);
1442 r = remap_eh_region_nr (r, id);
1443 gimple_eh_dispatch_set_region (copy, r);
1444 }
1445 break;
1446
1447 default:
1448 break;
1449 }
726a989a
RB
1450 }
1451
1452 /* If STMT has a block defined, map it to the newly constructed
1453 block. When inlining we want statements without a block to
1454 appear in the block of the function call. */
1455 new_block = id->block;
1456 if (gimple_block (copy))
1457 {
1458 tree *n;
1459 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1460 gcc_assert (n);
1461 new_block = *n;
1462 }
1463
1464 gimple_set_block (copy, new_block);
1465
ddb555ed 1466 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
b5b8b0ac
AO
1467 return copy;
1468
726a989a
RB
1469 /* Remap all the operands in COPY. */
1470 memset (&wi, 0, sizeof (wi));
1471 wi.info = id;
5a6e26b7
JH
1472 if (skip_first)
1473 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1474 else
b8698a0f 1475 walk_gimple_op (copy, remap_gimple_op_r, &wi);
726a989a 1476
5006671f
RG
1477 /* Clear the copied virtual operands. We are not remapping them here
1478 but are going to recreate them from scratch. */
1479 if (gimple_has_mem_ops (copy))
1480 {
1481 gimple_set_vdef (copy, NULL_TREE);
1482 gimple_set_vuse (copy, NULL_TREE);
1483 }
1484
726a989a
RB
1485 return copy;
1486}
1487
1488
e21aff8a
SB
1489/* Copy basic block, scale profile accordingly. Edges will be taken care of
1490 later */
1491
1492static basic_block
0178d644
VR
1493copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1494 gcov_type count_scale)
e21aff8a 1495{
c2a4718a 1496 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
e21aff8a 1497 basic_block copy_basic_block;
726a989a 1498 tree decl;
0d63a740 1499 gcov_type freq;
91382288
JH
1500 basic_block prev;
1501
1502 /* Search for previous copied basic block. */
1503 prev = bb->prev_bb;
1504 while (!prev->aux)
1505 prev = prev->prev_bb;
e21aff8a
SB
1506
1507 /* create_basic_block() will append every new block to
1508 basic_block_info automatically. */
cceb1885 1509 copy_basic_block = create_basic_block (NULL, (void *) 0,
91382288 1510 (basic_block) prev->aux);
e21aff8a 1511 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
45a80bb9 1512
726a989a
RB
1513 /* We are going to rebuild frequencies from scratch. These values
1514 have just small importance to drive canonicalize_loop_headers. */
0d63a740 1515 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
726a989a 1516
0d63a740
JH
1517 /* We recompute frequencies after inlining, so this is quite safe. */
1518 if (freq > BB_FREQ_MAX)
1519 freq = BB_FREQ_MAX;
1520 copy_basic_block->frequency = freq;
e21aff8a 1521
726a989a
RB
1522 copy_gsi = gsi_start_bb (copy_basic_block);
1523
1524 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
e21aff8a 1525 {
726a989a
RB
1526 gimple stmt = gsi_stmt (gsi);
1527 gimple orig_stmt = stmt;
e21aff8a 1528
416c991f 1529 id->regimplify = false;
726a989a
RB
1530 stmt = remap_gimple_stmt (stmt, id);
1531 if (gimple_nop_p (stmt))
1532 continue;
1533
1534 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
c2a4718a 1535 seq_gsi = copy_gsi;
726a989a
RB
1536
1537 /* With return slot optimization we can end up with
1538 non-gimple (foo *)&this->m, fix that here. */
4c29307d
JJ
1539 if (is_gimple_assign (stmt)
1540 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1541 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
e21aff8a 1542 {
726a989a 1543 tree new_rhs;
c2a4718a 1544 new_rhs = force_gimple_operand_gsi (&seq_gsi,
4a2b7f24 1545 gimple_assign_rhs1 (stmt),
cf1bcf06
EB
1546 true, NULL, false,
1547 GSI_CONTINUE_LINKING);
726a989a 1548 gimple_assign_set_rhs1 (stmt, new_rhs);
c2a4718a 1549 id->regimplify = false;
726a989a 1550 }
2b65dae5 1551
c2a4718a
JJ
1552 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1553
1554 if (id->regimplify)
1555 gimple_regimplify_operands (stmt, &seq_gsi);
1556
1557 /* If copy_basic_block has been empty at the start of this iteration,
1558 call gsi_start_bb again to get at the newly added statements. */
1559 if (gsi_end_p (copy_gsi))
1560 copy_gsi = gsi_start_bb (copy_basic_block);
1561 else
1562 gsi_next (&copy_gsi);
110cfe1c 1563
726a989a
RB
1564 /* Process the new statement. The call to gimple_regimplify_operands
1565 possibly turned the statement into multiple statements, we
1566 need to process all of them. */
c2a4718a 1567 do
726a989a 1568 {
9187e02d
JH
1569 tree fn;
1570
c2a4718a 1571 stmt = gsi_stmt (copy_gsi);
726a989a
RB
1572 if (is_gimple_call (stmt)
1573 && gimple_call_va_arg_pack_p (stmt)
1574 && id->gimple_call)
1575 {
1576 /* __builtin_va_arg_pack () should be replaced by
1577 all arguments corresponding to ... in the caller. */
1578 tree p;
1579 gimple new_call;
1580 VEC(tree, heap) *argarray;
1581 size_t nargs = gimple_call_num_args (id->gimple_call);
1582 size_t n;
1583
910ad8de 1584 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
726a989a
RB
1585 nargs--;
1586
1587 /* Create the new array of arguments. */
1588 n = nargs + gimple_call_num_args (stmt);
1589 argarray = VEC_alloc (tree, heap, n);
1590 VEC_safe_grow (tree, heap, argarray, n);
1591
1592 /* Copy all the arguments before '...' */
1593 memcpy (VEC_address (tree, argarray),
1594 gimple_call_arg_ptr (stmt, 0),
1595 gimple_call_num_args (stmt) * sizeof (tree));
1596
1597 /* Append the arguments passed in '...' */
1598 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1599 gimple_call_arg_ptr (id->gimple_call, 0)
1600 + (gimple_call_num_args (id->gimple_call) - nargs),
1601 nargs * sizeof (tree));
1602
1603 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1604 argarray);
1605
1606 VEC_free (tree, heap, argarray);
1607
1608 /* Copy all GIMPLE_CALL flags, location and block, except
1609 GF_CALL_VA_ARG_PACK. */
1610 gimple_call_copy_flags (new_call, stmt);
1611 gimple_call_set_va_arg_pack (new_call, false);
1612 gimple_set_location (new_call, gimple_location (stmt));
1613 gimple_set_block (new_call, gimple_block (stmt));
1614 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1615
1616 gsi_replace (&copy_gsi, new_call, false);
1617 stmt = new_call;
1618 }
1619 else if (is_gimple_call (stmt)
1620 && id->gimple_call
1621 && (decl = gimple_call_fndecl (stmt))
1622 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1623 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
e0704a46 1624 {
726a989a
RB
1625 /* __builtin_va_arg_pack_len () should be replaced by
1626 the number of anonymous arguments. */
1627 size_t nargs = gimple_call_num_args (id->gimple_call);
1628 tree count, p;
1629 gimple new_stmt;
1630
910ad8de 1631 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
726a989a
RB
1632 nargs--;
1633
1634 count = build_int_cst (integer_type_node, nargs);
1635 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1636 gsi_replace (&copy_gsi, new_stmt, false);
1637 stmt = new_stmt;
1638 }
b8a00a4d 1639
726a989a
RB
1640 /* Statements produced by inlining can be unfolded, especially
1641 when we constant propagated some operands. We can't fold
1642 them right now for two reasons:
1643 1) folding require SSA_NAME_DEF_STMTs to be correct
1644 2) we can't change function calls to builtins.
1645 So we just mark statement for later folding. We mark
1646 all new statements, instead just statements that has changed
1647 by some nontrivial substitution so even statements made
1648 foldable indirectly are updated. If this turns out to be
1649 expensive, copy_body can be told to watch for nontrivial
1650 changes. */
1651 if (id->statements_to_fold)
1652 pointer_set_insert (id->statements_to_fold, stmt);
1653
1654 /* We're duplicating a CALL_EXPR. Find any corresponding
1655 callgraph edges and update or duplicate them. */
1656 if (is_gimple_call (stmt))
1657 {
9b2a5ef7 1658 struct cgraph_edge *edge;
f618d33e 1659 int flags;
6ef5231b 1660
726a989a 1661 switch (id->transform_call_graph_edges)
e0704a46 1662 {
9b2a5ef7
RH
1663 case CB_CGE_DUPLICATE:
1664 edge = cgraph_edge (id->src_node, orig_stmt);
1665 if (edge)
0d63a740
JH
1666 {
1667 int edge_freq = edge->frequency;
1668 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1669 gimple_uid (stmt),
1670 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
898b8927 1671 true);
0d63a740
JH
1672 /* We could also just rescale the frequency, but
1673 doing so would introduce roundoff errors and make
1674 verifier unhappy. */
b8698a0f 1675 edge->frequency
960bfb69 1676 = compute_call_stmt_bb_frequency (id->dst_node->symbol.decl,
0d63a740
JH
1677 copy_basic_block);
1678 if (dump_file
1679 && profile_status_for_function (cfun) != PROFILE_ABSENT
1680 && (edge_freq > edge->frequency + 10
1681 || edge_freq < edge->frequency - 10))
1682 {
1683 fprintf (dump_file, "Edge frequency estimated by "
1684 "cgraph %i diverge from inliner's estimate %i\n",
1685 edge_freq,
1686 edge->frequency);
1687 fprintf (dump_file,
1688 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1689 bb->index,
1690 bb->frequency,
1691 copy_basic_block->frequency);
1692 }
8132a837 1693 stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
0d63a740 1694 }
9b2a5ef7
RH
1695 break;
1696
1697 case CB_CGE_MOVE_CLONES:
1698 cgraph_set_call_stmt_including_clones (id->dst_node,
1699 orig_stmt, stmt);
1700 edge = cgraph_edge (id->dst_node, stmt);
1701 break;
1702
1703 case CB_CGE_MOVE:
1704 edge = cgraph_edge (id->dst_node, orig_stmt);
1705 if (edge)
1706 cgraph_set_call_stmt (edge, stmt);
1707 break;
1708
1709 default:
1710 gcc_unreachable ();
110cfe1c 1711 }
f618d33e 1712
9b2a5ef7
RH
1713 /* Constant propagation on argument done during inlining
1714 may create new direct call. Produce an edge for it. */
b8698a0f 1715 if ((!edge
e33c6cd6 1716 || (edge->indirect_inlining_edge
9b2a5ef7 1717 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
f9fd305b 1718 && id->dst_node->analyzed
9b2a5ef7
RH
1719 && (fn = gimple_call_fndecl (stmt)) != NULL)
1720 {
581985d7 1721 struct cgraph_node *dest = cgraph_get_node (fn);
9b2a5ef7
RH
1722
1723 /* We have missing edge in the callgraph. This can happen
1724 when previous inlining turned an indirect call into a
0e3776db 1725 direct call by constant propagating arguments or we are
20a6bb58 1726 producing dead clone (for further cloning). In all
9b2a5ef7
RH
1727 other cases we hit a bug (incorrect node sharing is the
1728 most common reason for missing edges). */
ead84f73 1729 gcc_assert (!dest->analyzed
960bfb69 1730 || dest->symbol.address_taken
0cac82a0
JH
1731 || !id->src_node->analyzed
1732 || !id->dst_node->analyzed);
9b2a5ef7
RH
1733 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1734 cgraph_create_edge_including_clones
47cb0d7d 1735 (id->dst_node, dest, orig_stmt, stmt, bb->count,
960bfb69 1736 compute_call_stmt_bb_frequency (id->dst_node->symbol.decl,
0d63a740 1737 copy_basic_block),
898b8927 1738 CIF_ORIGINALLY_INDIRECT_CALL);
9b2a5ef7
RH
1739 else
1740 cgraph_create_edge (id->dst_node, dest, stmt,
47cb0d7d
JH
1741 bb->count,
1742 compute_call_stmt_bb_frequency
960bfb69
JH
1743 (id->dst_node->symbol.decl,
1744 copy_basic_block))->inline_failed
9b2a5ef7
RH
1745 = CIF_ORIGINALLY_INDIRECT_CALL;
1746 if (dump_file)
1747 {
91382288 1748 fprintf (dump_file, "Created new direct edge to %s\n",
9b2a5ef7
RH
1749 cgraph_node_name (dest));
1750 }
1751 }
9187e02d 1752
f618d33e 1753 flags = gimple_call_flags (stmt);
f618d33e
MJ
1754 if (flags & ECF_MAY_BE_ALLOCA)
1755 cfun->calls_alloca = true;
1756 if (flags & ECF_RETURNS_TWICE)
1757 cfun->calls_setjmp = true;
726a989a 1758 }
e21aff8a 1759
1d65f45c
RH
1760 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1761 id->eh_map, id->eh_lp_nr);
726a989a 1762
b5b8b0ac 1763 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
726a989a
RB
1764 {
1765 ssa_op_iter i;
1766 tree def;
1767
726a989a
RB
1768 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1769 if (TREE_CODE (def) == SSA_NAME)
1770 SSA_NAME_DEF_STMT (def) = stmt;
1771 }
1772
1773 gsi_next (&copy_gsi);
e21aff8a 1774 }
c2a4718a 1775 while (!gsi_end_p (copy_gsi));
726a989a
RB
1776
1777 copy_gsi = gsi_last_bb (copy_basic_block);
e21aff8a 1778 }
726a989a 1779
e21aff8a
SB
1780 return copy_basic_block;
1781}
1782
110cfe1c
JH
1783/* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1784 form is quite easy, since dominator relationship for old basic blocks does
1785 not change.
1786
1787 There is however exception where inlining might change dominator relation
1788 across EH edges from basic block within inlined functions destinating
5305a4cb 1789 to landing pads in function we inline into.
110cfe1c 1790
e9705dc5
AO
1791 The function fills in PHI_RESULTs of such PHI nodes if they refer
1792 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1793 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1794 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1795 set, and this means that there will be no overlapping live ranges
110cfe1c
JH
1796 for the underlying symbol.
1797
1798 This might change in future if we allow redirecting of EH edges and
1799 we might want to change way build CFG pre-inlining to include
1800 all the possible edges then. */
1801static void
e9705dc5
AO
1802update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1803 bool can_throw, bool nonlocal_goto)
110cfe1c
JH
1804{
1805 edge e;
1806 edge_iterator ei;
1807
1808 FOR_EACH_EDGE (e, ei, bb->succs)
1809 if (!e->dest->aux
1810 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1811 {
726a989a
RB
1812 gimple phi;
1813 gimple_stmt_iterator si;
110cfe1c 1814
e9705dc5
AO
1815 if (!nonlocal_goto)
1816 gcc_assert (e->flags & EDGE_EH);
726a989a 1817
e9705dc5
AO
1818 if (!can_throw)
1819 gcc_assert (!(e->flags & EDGE_EH));
726a989a
RB
1820
1821 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
110cfe1c 1822 {
e9705dc5
AO
1823 edge re;
1824
726a989a
RB
1825 phi = gsi_stmt (si);
1826
e9705dc5
AO
1827 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1828 gcc_assert (!e->dest->aux);
1829
496a4ef5
JH
1830 gcc_assert ((e->flags & EDGE_EH)
1831 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
e9705dc5 1832
ea057359 1833 if (virtual_operand_p (PHI_RESULT (phi)))
e9705dc5 1834 {
525174a2 1835 mark_virtual_operands_for_renaming (cfun);
e9705dc5
AO
1836 continue;
1837 }
1838
1839 re = find_edge (ret_bb, e->dest);
1432b19f 1840 gcc_assert (re);
e9705dc5
AO
1841 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1842 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1843
1844 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1845 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
110cfe1c
JH
1846 }
1847 }
1848}
1849
726a989a 1850
128a79fb
KH
1851/* Copy edges from BB into its copy constructed earlier, scale profile
1852 accordingly. Edges will be taken care of later. Assume aux
90a7788b
JJ
1853 pointers to point to the copies of each BB. Return true if any
1854 debug stmts are left after a statement that must end the basic block. */
726a989a 1855
90a7788b 1856static bool
0178d644 1857copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
e21aff8a 1858{
cceb1885 1859 basic_block new_bb = (basic_block) bb->aux;
e21aff8a
SB
1860 edge_iterator ei;
1861 edge old_edge;
726a989a 1862 gimple_stmt_iterator si;
e21aff8a 1863 int flags;
90a7788b 1864 bool need_debug_cleanup = false;
e21aff8a
SB
1865
1866 /* Use the indices from the original blocks to create edges for the
1867 new ones. */
1868 FOR_EACH_EDGE (old_edge, ei, bb->succs)
e0704a46
JH
1869 if (!(old_edge->flags & EDGE_EH))
1870 {
82d6e6fc 1871 edge new_edge;
e21aff8a 1872
e0704a46 1873 flags = old_edge->flags;
e21aff8a 1874
e0704a46
JH
1875 /* Return edges do get a FALLTHRU flag when the get inlined. */
1876 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1877 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1878 flags |= EDGE_FALLTHRU;
82d6e6fc
KG
1879 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1880 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1881 new_edge->probability = old_edge->probability;
e0704a46 1882 }
e21aff8a
SB
1883
1884 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
90a7788b 1885 return false;
e21aff8a 1886
726a989a 1887 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
e21aff8a 1888 {
726a989a 1889 gimple copy_stmt;
e9705dc5 1890 bool can_throw, nonlocal_goto;
e21aff8a 1891
726a989a 1892 copy_stmt = gsi_stmt (si);
b5b8b0ac 1893 if (!is_gimple_debug (copy_stmt))
f9a21e13 1894 update_stmt (copy_stmt);
726a989a 1895
e21aff8a 1896 /* Do this before the possible split_block. */
726a989a 1897 gsi_next (&si);
e21aff8a
SB
1898
1899 /* If this tree could throw an exception, there are two
1900 cases where we need to add abnormal edge(s): the
1901 tree wasn't in a region and there is a "current
1902 region" in the caller; or the original tree had
1903 EH edges. In both cases split the block after the tree,
1904 and add abnormal edge(s) as needed; we need both
1905 those from the callee and the caller.
1906 We check whether the copy can throw, because the const
1907 propagation can change an INDIRECT_REF which throws
1908 into a COMPONENT_REF which doesn't. If the copy
1909 can throw, the original could also throw. */
726a989a
RB
1910 can_throw = stmt_can_throw_internal (copy_stmt);
1911 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
e9705dc5
AO
1912
1913 if (can_throw || nonlocal_goto)
e21aff8a 1914 {
90a7788b
JJ
1915 if (!gsi_end_p (si))
1916 {
1917 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1918 gsi_next (&si);
1919 if (gsi_end_p (si))
1920 need_debug_cleanup = true;
1921 }
726a989a 1922 if (!gsi_end_p (si))
e21aff8a
SB
1923 /* Note that bb's predecessor edges aren't necessarily
1924 right at this point; split_block doesn't care. */
1925 {
1926 edge e = split_block (new_bb, copy_stmt);
110cfe1c 1927
e21aff8a 1928 new_bb = e->dest;
110cfe1c 1929 new_bb->aux = e->src->aux;
726a989a 1930 si = gsi_start_bb (new_bb);
e21aff8a 1931 }
e9705dc5 1932 }
e21aff8a 1933
1d65f45c
RH
1934 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1935 make_eh_dispatch_edges (copy_stmt);
1936 else if (can_throw)
e9705dc5 1937 make_eh_edges (copy_stmt);
110cfe1c 1938
e9705dc5 1939 if (nonlocal_goto)
726a989a 1940 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
e9705dc5
AO
1941
1942 if ((can_throw || nonlocal_goto)
1943 && gimple_in_ssa_p (cfun))
726a989a 1944 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
e9705dc5 1945 can_throw, nonlocal_goto);
110cfe1c 1946 }
90a7788b 1947 return need_debug_cleanup;
110cfe1c
JH
1948}
1949
1950/* Copy the PHIs. All blocks and edges are copied, some blocks
1951 was possibly split and new outgoing EH edges inserted.
1952 BB points to the block of original function and AUX pointers links
1953 the original and newly copied blocks. */
1954
1955static void
1956copy_phis_for_bb (basic_block bb, copy_body_data *id)
1957{
3d9a9f94 1958 basic_block const new_bb = (basic_block) bb->aux;
110cfe1c 1959 edge_iterator ei;
726a989a
RB
1960 gimple phi;
1961 gimple_stmt_iterator si;
6a78fd06
RG
1962 edge new_edge;
1963 bool inserted = false;
110cfe1c 1964
355a7673 1965 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
110cfe1c 1966 {
726a989a
RB
1967 tree res, new_res;
1968 gimple new_phi;
110cfe1c 1969
726a989a
RB
1970 phi = gsi_stmt (si);
1971 res = PHI_RESULT (phi);
1972 new_res = res;
ea057359 1973 if (!virtual_operand_p (res))
110cfe1c 1974 {
726a989a 1975 walk_tree (&new_res, copy_tree_body_r, id, NULL);
dcc748dd 1976 new_phi = create_phi_node (new_res, new_bb);
110cfe1c
JH
1977 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1978 {
8b3057b3
JH
1979 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
1980 tree arg;
1981 tree new_arg;
726a989a 1982 tree block = id->block;
8b3057b3
JH
1983 edge_iterator ei2;
1984
20a6bb58 1985 /* When doing partial cloning, we allow PHIs on the entry block
8b3057b3
JH
1986 as long as all the arguments are the same. Find any input
1987 edge to see argument to copy. */
1988 if (!old_edge)
1989 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
1990 if (!old_edge->src->aux)
1991 break;
1992
1993 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1994 new_arg = arg;
726a989a
RB
1995 id->block = NULL_TREE;
1996 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1997 id->block = block;
110cfe1c 1998 gcc_assert (new_arg);
36b6e793
JJ
1999 /* With return slot optimization we can end up with
2000 non-gimple (foo *)&this->m, fix that here. */
2001 if (TREE_CODE (new_arg) != SSA_NAME
2002 && TREE_CODE (new_arg) != FUNCTION_DECL
2003 && !is_gimple_val (new_arg))
2004 {
726a989a
RB
2005 gimple_seq stmts = NULL;
2006 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
6a78fd06
RG
2007 gsi_insert_seq_on_edge (new_edge, stmts);
2008 inserted = true;
36b6e793 2009 }
b8698a0f 2010 add_phi_arg (new_phi, new_arg, new_edge,
9e227d60 2011 gimple_phi_arg_location_from_edge (phi, old_edge));
110cfe1c 2012 }
e21aff8a
SB
2013 }
2014 }
6a78fd06
RG
2015
2016 /* Commit the delayed edge insertions. */
2017 if (inserted)
2018 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2019 gsi_commit_one_edge_insert (new_edge, NULL);
e21aff8a
SB
2020}
2021
726a989a 2022
e21aff8a 2023/* Wrapper for remap_decl so it can be used as a callback. */
726a989a 2024
e21aff8a
SB
2025static tree
2026remap_decl_1 (tree decl, void *data)
2027{
1b369fae 2028 return remap_decl (decl, (copy_body_data *) data);
e21aff8a
SB
2029}
2030
110cfe1c
JH
2031/* Build struct function and associated datastructures for the new clone
2032 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
2033
2034static void
0d63a740 2035initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
110cfe1c 2036{
110cfe1c 2037 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
0d63a740 2038 gcov_type count_scale;
110cfe1c
JH
2039
2040 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2041 count_scale = (REG_BR_PROB_BASE * count
2042 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2043 else
0d63a740 2044 count_scale = REG_BR_PROB_BASE;
110cfe1c
JH
2045
2046 /* Register specific tree functions. */
726a989a 2047 gimple_register_cfg_hooks ();
39ecc018
JH
2048
2049 /* Get clean struct function. */
2050 push_struct_function (new_fndecl);
2051
2052 /* We will rebuild these, so just sanity check that they are empty. */
2053 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2054 gcc_assert (cfun->local_decls == NULL);
2055 gcc_assert (cfun->cfg == NULL);
2056 gcc_assert (cfun->decl == new_fndecl);
2057
20a6bb58 2058 /* Copy items we preserve during cloning. */
39ecc018
JH
2059 cfun->static_chain_decl = src_cfun->static_chain_decl;
2060 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2061 cfun->function_end_locus = src_cfun->function_end_locus;
7d776ee2 2062 cfun->curr_properties = src_cfun->curr_properties & ~PROP_loops;
39ecc018 2063 cfun->last_verified = src_cfun->last_verified;
39ecc018
JH
2064 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2065 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
39ecc018
JH
2066 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2067 cfun->stdarg = src_cfun->stdarg;
39ecc018 2068 cfun->after_inlining = src_cfun->after_inlining;
8f4f502f
EB
2069 cfun->can_throw_non_call_exceptions
2070 = src_cfun->can_throw_non_call_exceptions;
9510c5af 2071 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
39ecc018
JH
2072 cfun->returns_struct = src_cfun->returns_struct;
2073 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
39ecc018 2074
110cfe1c
JH
2075 init_empty_tree_cfg ();
2076
0d63a740 2077 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
110cfe1c
JH
2078 ENTRY_BLOCK_PTR->count =
2079 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2080 REG_BR_PROB_BASE);
0d63a740
JH
2081 ENTRY_BLOCK_PTR->frequency
2082 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
110cfe1c
JH
2083 EXIT_BLOCK_PTR->count =
2084 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2085 REG_BR_PROB_BASE);
2086 EXIT_BLOCK_PTR->frequency =
0d63a740 2087 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
110cfe1c
JH
2088 if (src_cfun->eh)
2089 init_eh_for_function ();
2090
2091 if (src_cfun->gimple_df)
2092 {
5db9ba0c 2093 init_tree_ssa (cfun);
110cfe1c 2094 cfun->gimple_df->in_ssa_p = true;
3828719a 2095 init_ssa_operands (cfun);
110cfe1c
JH
2096 }
2097 pop_cfun ();
2098}
2099
90a7788b
JJ
2100/* Helper function for copy_cfg_body. Move debug stmts from the end
2101 of NEW_BB to the beginning of successor basic blocks when needed. If the
2102 successor has multiple predecessors, reset them, otherwise keep
2103 their value. */
2104
2105static void
2106maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2107{
2108 edge e;
2109 edge_iterator ei;
2110 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2111
2112 if (gsi_end_p (si)
2113 || gsi_one_before_end_p (si)
2114 || !(stmt_can_throw_internal (gsi_stmt (si))
2115 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2116 return;
2117
2118 FOR_EACH_EDGE (e, ei, new_bb->succs)
2119 {
2120 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2121 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2122 while (is_gimple_debug (gsi_stmt (ssi)))
2123 {
2124 gimple stmt = gsi_stmt (ssi), new_stmt;
2125 tree var;
2126 tree value;
2127
2128 /* For the last edge move the debug stmts instead of copying
2129 them. */
2130 if (ei_one_before_end_p (ei))
2131 {
2132 si = ssi;
2133 gsi_prev (&ssi);
ddb555ed 2134 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
90a7788b
JJ
2135 gimple_debug_bind_reset_value (stmt);
2136 gsi_remove (&si, false);
2137 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2138 continue;
2139 }
2140
ddb555ed 2141 if (gimple_debug_bind_p (stmt))
90a7788b 2142 {
ddb555ed
JJ
2143 var = gimple_debug_bind_get_var (stmt);
2144 if (single_pred_p (e->dest))
2145 {
2146 value = gimple_debug_bind_get_value (stmt);
2147 value = unshare_expr (value);
2148 }
2149 else
2150 value = NULL_TREE;
2151 new_stmt = gimple_build_debug_bind (var, value, stmt);
2152 }
2153 else if (gimple_debug_source_bind_p (stmt))
2154 {
2155 var = gimple_debug_source_bind_get_var (stmt);
2156 value = gimple_debug_source_bind_get_value (stmt);
2157 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
90a7788b
JJ
2158 }
2159 else
ddb555ed 2160 gcc_unreachable ();
90a7788b
JJ
2161 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2162 VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt);
2163 gsi_prev (&ssi);
2164 }
2165 }
2166}
2167
e21aff8a
SB
2168/* Make a copy of the body of FN so that it can be inserted inline in
2169 another function. Walks FN via CFG, returns new fndecl. */
2170
2171static tree
0d63a740 2172copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
91382288
JH
2173 basic_block entry_block_map, basic_block exit_block_map,
2174 bitmap blocks_to_copy, basic_block new_entry)
e21aff8a 2175{
1b369fae 2176 tree callee_fndecl = id->src_fn;
e21aff8a 2177 /* Original cfun for the callee, doesn't change. */
1b369fae 2178 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
110cfe1c 2179 struct function *cfun_to_copy;
e21aff8a
SB
2180 basic_block bb;
2181 tree new_fndecl = NULL;
90a7788b 2182 bool need_debug_cleanup = false;
0d63a740 2183 gcov_type count_scale;
110cfe1c 2184 int last;
20a6bb58
JH
2185 int incoming_frequency = 0;
2186 gcov_type incoming_count = 0;
e21aff8a 2187
1b369fae 2188 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
e21aff8a 2189 count_scale = (REG_BR_PROB_BASE * count
1b369fae 2190 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
e21aff8a 2191 else
0d63a740 2192 count_scale = REG_BR_PROB_BASE;
e21aff8a
SB
2193
2194 /* Register specific tree functions. */
726a989a 2195 gimple_register_cfg_hooks ();
e21aff8a 2196
b35366ce
JH
2197 /* If we are inlining just region of the function, make sure to connect new entry
2198 to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
2199 frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
20a6bb58 2200 probabilities of edges incoming from nonduplicated region. */
b35366ce
JH
2201 if (new_entry)
2202 {
2203 edge e;
2204 edge_iterator ei;
2205
2206 FOR_EACH_EDGE (e, ei, new_entry->preds)
2207 if (!e->src->aux)
2208 {
20a6bb58
JH
2209 incoming_frequency += EDGE_FREQUENCY (e);
2210 incoming_count += e->count;
b35366ce 2211 }
20a6bb58
JH
2212 incoming_count = incoming_count * count_scale / REG_BR_PROB_BASE;
2213 incoming_frequency
2214 = incoming_frequency * frequency_scale / REG_BR_PROB_BASE;
2215 ENTRY_BLOCK_PTR->count = incoming_count;
2216 ENTRY_BLOCK_PTR->frequency = incoming_frequency;
b35366ce
JH
2217 }
2218
e21aff8a
SB
2219 /* Must have a CFG here at this point. */
2220 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2221 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2222
110cfe1c
JH
2223 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2224
e21aff8a
SB
2225 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2226 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
110cfe1c
JH
2227 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2228 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
e21aff8a 2229
e21aff8a
SB
2230 /* Duplicate any exception-handling regions. */
2231 if (cfun->eh)
1d65f45c
RH
2232 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2233 remap_decl_1, id);
726a989a 2234
e21aff8a
SB
2235 /* Use aux pointers to map the original blocks to copy. */
2236 FOR_EACH_BB_FN (bb, cfun_to_copy)
91382288
JH
2237 if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index))
2238 {
2239 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2240 bb->aux = new_bb;
2241 new_bb->aux = bb;
2242 }
110cfe1c 2243
7c57be85 2244 last = last_basic_block;
726a989a 2245
e21aff8a
SB
2246 /* Now that we've duplicated the blocks, duplicate their edges. */
2247 FOR_ALL_BB_FN (bb, cfun_to_copy)
91382288
JH
2248 if (!blocks_to_copy
2249 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2250 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
726a989a 2251
91382288 2252 if (new_entry)
110cfe1c 2253 {
b35366ce 2254 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
91382288 2255 e->probability = REG_BR_PROB_BASE;
20a6bb58 2256 e->count = incoming_count;
110cfe1c 2257 }
726a989a 2258
8b3057b3
JH
2259 if (gimple_in_ssa_p (cfun))
2260 FOR_ALL_BB_FN (bb, cfun_to_copy)
2261 if (!blocks_to_copy
2262 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2263 copy_phis_for_bb (bb, id);
2264
91382288
JH
2265 FOR_ALL_BB_FN (bb, cfun_to_copy)
2266 if (bb->aux)
2267 {
2268 if (need_debug_cleanup
2269 && bb->index != ENTRY_BLOCK
2270 && bb->index != EXIT_BLOCK)
2271 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2272 ((basic_block)bb->aux)->aux = NULL;
2273 bb->aux = NULL;
2274 }
2275
110cfe1c
JH
2276 /* Zero out AUX fields of newly created block during EH edge
2277 insertion. */
7c57be85 2278 for (; last < last_basic_block; last++)
90a7788b
JJ
2279 {
2280 if (need_debug_cleanup)
2281 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2282 BASIC_BLOCK (last)->aux = NULL;
2283 }
110cfe1c
JH
2284 entry_block_map->aux = NULL;
2285 exit_block_map->aux = NULL;
e21aff8a 2286
1d65f45c
RH
2287 if (id->eh_map)
2288 {
2289 pointer_map_destroy (id->eh_map);
2290 id->eh_map = NULL;
2291 }
2292
e21aff8a
SB
2293 return new_fndecl;
2294}
2295
b5b8b0ac
AO
2296/* Copy the debug STMT using ID. We deal with these statements in a
2297 special way: if any variable in their VALUE expression wasn't
2298 remapped yet, we won't remap it, because that would get decl uids
2299 out of sync, causing codegen differences between -g and -g0. If
2300 this arises, we drop the VALUE expression altogether. */
2301
2302static void
2303copy_debug_stmt (gimple stmt, copy_body_data *id)
2304{
2305 tree t, *n;
2306 struct walk_stmt_info wi;
2307
2308 t = id->block;
2309 if (gimple_block (stmt))
2310 {
b5b8b0ac
AO
2311 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2312 if (n)
2313 t = *n;
2314 }
2315 gimple_set_block (stmt, t);
2316
2317 /* Remap all the operands in COPY. */
2318 memset (&wi, 0, sizeof (wi));
2319 wi.info = id;
2320
2321 processing_debug_stmt = 1;
2322
ddb555ed
JJ
2323 if (gimple_debug_source_bind_p (stmt))
2324 t = gimple_debug_source_bind_get_var (stmt);
2325 else
2326 t = gimple_debug_bind_get_var (stmt);
b5b8b0ac
AO
2327
2328 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2329 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2330 {
2331 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2332 t = *n;
2333 }
d17af147 2334 else if (TREE_CODE (t) == VAR_DECL
5f564b8f
MM
2335 && !is_global_var (t)
2336 && !pointer_map_contains (id->decl_map, t))
d17af147 2337 /* T is a non-localized variable. */;
b5b8b0ac
AO
2338 else
2339 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2340
ddb555ed
JJ
2341 if (gimple_debug_bind_p (stmt))
2342 {
2343 gimple_debug_bind_set_var (stmt, t);
b5b8b0ac 2344
ddb555ed
JJ
2345 if (gimple_debug_bind_has_value_p (stmt))
2346 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2347 remap_gimple_op_r, &wi, NULL);
b5b8b0ac 2348
ddb555ed
JJ
2349 /* Punt if any decl couldn't be remapped. */
2350 if (processing_debug_stmt < 0)
2351 gimple_debug_bind_reset_value (stmt);
2352 }
2353 else if (gimple_debug_source_bind_p (stmt))
2354 {
2355 gimple_debug_source_bind_set_var (stmt, t);
2356 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2357 remap_gimple_op_r, &wi, NULL);
2358 }
b5b8b0ac
AO
2359
2360 processing_debug_stmt = 0;
2361
2362 update_stmt (stmt);
b5b8b0ac
AO
2363}
2364
2365/* Process deferred debug stmts. In order to give values better odds
2366 of being successfully remapped, we delay the processing of debug
2367 stmts until all other stmts that might require remapping are
2368 processed. */
2369
2370static void
2371copy_debug_stmts (copy_body_data *id)
2372{
2373 size_t i;
2374 gimple stmt;
2375
2376 if (!id->debug_stmts)
2377 return;
2378
ac47786e 2379 FOR_EACH_VEC_ELT (gimple, id->debug_stmts, i, stmt)
b5b8b0ac
AO
2380 copy_debug_stmt (stmt, id);
2381
2382 VEC_free (gimple, heap, id->debug_stmts);
2383}
2384
f82a627c
EB
2385/* Make a copy of the body of SRC_FN so that it can be inserted inline in
2386 another function. */
2387
2388static tree
2389copy_tree_body (copy_body_data *id)
2390{
2391 tree fndecl = id->src_fn;
2392 tree body = DECL_SAVED_TREE (fndecl);
2393
2394 walk_tree (&body, copy_tree_body_r, id, NULL);
2395
2396 return body;
2397}
2398
b5b8b0ac
AO
2399/* Make a copy of the body of FN so that it can be inserted inline in
2400 another function. */
2401
e21aff8a 2402static tree
0d63a740 2403copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
91382288
JH
2404 basic_block entry_block_map, basic_block exit_block_map,
2405 bitmap blocks_to_copy, basic_block new_entry)
e21aff8a 2406{
1b369fae 2407 tree fndecl = id->src_fn;
e21aff8a
SB
2408 tree body;
2409
2410 /* If this body has a CFG, walk CFG and copy. */
2411 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
91382288
JH
2412 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2413 blocks_to_copy, new_entry);
b5b8b0ac 2414 copy_debug_stmts (id);
e21aff8a
SB
2415
2416 return body;
2417}
2418
04482133
AO
2419/* Return true if VALUE is an ADDR_EXPR of an automatic variable
2420 defined in function FN, or of a data member thereof. */
2421
2422static bool
2423self_inlining_addr_expr (tree value, tree fn)
2424{
2425 tree var;
2426
2427 if (TREE_CODE (value) != ADDR_EXPR)
2428 return false;
2429
2430 var = get_base_address (TREE_OPERAND (value, 0));
e21aff8a 2431
50886bf1 2432 return var && auto_var_in_fn_p (var, fn);
04482133
AO
2433}
2434
b5b8b0ac
AO
2435/* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2436 lexical block and line number information from base_stmt, if given,
2437 or from the last stmt of the block otherwise. */
2438
2439static gimple
2440insert_init_debug_bind (copy_body_data *id,
2441 basic_block bb, tree var, tree value,
2442 gimple base_stmt)
2443{
2444 gimple note;
2445 gimple_stmt_iterator gsi;
2446 tree tracked_var;
2447
2448 if (!gimple_in_ssa_p (id->src_cfun))
2449 return NULL;
2450
2451 if (!MAY_HAVE_DEBUG_STMTS)
2452 return NULL;
2453
2454 tracked_var = target_for_debug_bind (var);
2455 if (!tracked_var)
2456 return NULL;
2457
2458 if (bb)
2459 {
2460 gsi = gsi_last_bb (bb);
2461 if (!base_stmt && !gsi_end_p (gsi))
2462 base_stmt = gsi_stmt (gsi);
2463 }
2464
2465 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2466
2467 if (bb)
2468 {
2469 if (!gsi_end_p (gsi))
2470 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2471 else
2472 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2473 }
2474
2475 return note;
2476}
2477
6de9cd9a 2478static void
b5b8b0ac 2479insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
0f1961a2 2480{
0f1961a2
JH
2481 /* If VAR represents a zero-sized variable, it's possible that the
2482 assignment statement may result in no gimple statements. */
2483 if (init_stmt)
c2a4718a
JJ
2484 {
2485 gimple_stmt_iterator si = gsi_last_bb (bb);
0f1961a2 2486
bfb0b886
RG
2487 /* We can end up with init statements that store to a non-register
2488 from a rhs with a conversion. Handle that here by forcing the
2489 rhs into a temporary. gimple_regimplify_operands is not
2490 prepared to do this for us. */
b5b8b0ac
AO
2491 if (!is_gimple_debug (init_stmt)
2492 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
bfb0b886
RG
2493 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2494 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2495 {
2496 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2497 gimple_expr_type (init_stmt),
2498 gimple_assign_rhs1 (init_stmt));
2499 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2500 GSI_NEW_STMT);
2501 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2502 gimple_assign_set_rhs1 (init_stmt, rhs);
2503 }
c2a4718a
JJ
2504 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2505 gimple_regimplify_operands (init_stmt, &si);
b5b8b0ac
AO
2506
2507 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2508 {
70b5e7dc
RG
2509 tree def = gimple_assign_lhs (init_stmt);
2510 insert_init_debug_bind (id, bb, def, def, init_stmt);
b5b8b0ac 2511 }
c2a4718a 2512 }
0f1961a2
JH
2513}
2514
2515/* Initialize parameter P with VALUE. If needed, produce init statement
2516 at the end of BB. When BB is NULL, we return init statement to be
2517 output later. */
2518static gimple
1b369fae 2519setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
e21aff8a 2520 basic_block bb, tree *vars)
6de9cd9a 2521{
0f1961a2 2522 gimple init_stmt = NULL;
6de9cd9a 2523 tree var;
f4088621 2524 tree rhs = value;
110cfe1c 2525 tree def = (gimple_in_ssa_p (cfun)
32244553 2526 ? ssa_default_def (id->src_cfun, p) : NULL);
6de9cd9a 2527
f4088621
RG
2528 if (value
2529 && value != error_mark_node
2530 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
c54e3854 2531 {
c4ac6e94 2532 /* If we can match up types by promotion/demotion do so. */
c54e3854 2533 if (fold_convertible_p (TREE_TYPE (p), value))
c4ac6e94 2534 rhs = fold_convert (TREE_TYPE (p), value);
c54e3854 2535 else
c4ac6e94
RG
2536 {
2537 /* ??? For valid programs we should not end up here.
2538 Still if we end up with truly mismatched types here, fall back
2539 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2540 GIMPLE to the following passes. */
2541 if (!is_gimple_reg_type (TREE_TYPE (value))
2542 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2543 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2544 else
2545 rhs = build_zero_cst (TREE_TYPE (p));
2546 }
c54e3854 2547 }
f4088621 2548
b5b8b0ac
AO
2549 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2550 here since the type of this decl must be visible to the calling
2551 function. */
2552 var = copy_decl_to_var (p, id);
2553
b5b8b0ac 2554 /* Declare this new variable. */
910ad8de 2555 DECL_CHAIN (var) = *vars;
b5b8b0ac
AO
2556 *vars = var;
2557
2558 /* Make gimplifier happy about this variable. */
2559 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2560
110cfe1c 2561 /* If the parameter is never assigned to, has no SSA_NAMEs created,
b5b8b0ac
AO
2562 we would not need to create a new variable here at all, if it
2563 weren't for debug info. Still, we can just use the argument
2564 value. */
6de9cd9a
DN
2565 if (TREE_READONLY (p)
2566 && !TREE_ADDRESSABLE (p)
110cfe1c
JH
2567 && value && !TREE_SIDE_EFFECTS (value)
2568 && !def)
6de9cd9a 2569 {
84936f6f
RH
2570 /* We may produce non-gimple trees by adding NOPs or introduce
2571 invalid sharing when operand is not really constant.
2572 It is not big deal to prohibit constant propagation here as
2573 we will constant propagate in DOM1 pass anyway. */
2574 if (is_gimple_min_invariant (value)
f4088621
RG
2575 && useless_type_conversion_p (TREE_TYPE (p),
2576 TREE_TYPE (value))
04482133
AO
2577 /* We have to be very careful about ADDR_EXPR. Make sure
2578 the base variable isn't a local variable of the inlined
2579 function, e.g., when doing recursive inlining, direct or
2580 mutually-recursive or whatever, which is why we don't
2581 just test whether fn == current_function_decl. */
2582 && ! self_inlining_addr_expr (value, fn))
6de9cd9a 2583 {
6de9cd9a 2584 insert_decl_map (id, p, value);
b5b8b0ac
AO
2585 insert_debug_decl_map (id, p, var);
2586 return insert_init_debug_bind (id, bb, var, value, NULL);
6de9cd9a
DN
2587 }
2588 }
2589
6de9cd9a
DN
2590 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2591 that way, when the PARM_DECL is encountered, it will be
2592 automatically replaced by the VAR_DECL. */
7c7d3047 2593 insert_decl_map (id, p, var);
6de9cd9a 2594
6de9cd9a
DN
2595 /* Even if P was TREE_READONLY, the new VAR should not be.
2596 In the original code, we would have constructed a
2597 temporary, and then the function body would have never
2598 changed the value of P. However, now, we will be
2599 constructing VAR directly. The constructor body may
2600 change its value multiple times as it is being
2601 constructed. Therefore, it must not be TREE_READONLY;
2602 the back-end assumes that TREE_READONLY variable is
2603 assigned to only once. */
2604 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2605 TREE_READONLY (var) = 0;
2606
110cfe1c
JH
2607 /* If there is no setup required and we are in SSA, take the easy route
2608 replacing all SSA names representing the function parameter by the
2609 SSA name passed to function.
2610
2611 We need to construct map for the variable anyway as it might be used
2612 in different SSA names when parameter is set in function.
2613
8454d27e
JH
2614 Do replacement at -O0 for const arguments replaced by constant.
2615 This is important for builtin_constant_p and other construct requiring
b5b8b0ac 2616 constant argument to be visible in inlined function body. */
110cfe1c 2617 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
8454d27e
JH
2618 && (optimize
2619 || (TREE_READONLY (p)
2620 && is_gimple_min_invariant (rhs)))
110cfe1c 2621 && (TREE_CODE (rhs) == SSA_NAME
9b718f81
JH
2622 || is_gimple_min_invariant (rhs))
2623 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
110cfe1c
JH
2624 {
2625 insert_decl_map (id, def, rhs);
b5b8b0ac 2626 return insert_init_debug_bind (id, bb, var, rhs, NULL);
110cfe1c
JH
2627 }
2628
f6f2da7d
JH
2629 /* If the value of argument is never used, don't care about initializing
2630 it. */
1cf5abb3 2631 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
f6f2da7d
JH
2632 {
2633 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
b5b8b0ac 2634 return insert_init_debug_bind (id, bb, var, rhs, NULL);
f6f2da7d
JH
2635 }
2636
6de9cd9a
DN
2637 /* Initialize this VAR_DECL from the equivalent argument. Convert
2638 the argument to the proper type in case it was promoted. */
2639 if (value)
2640 {
6de9cd9a 2641 if (rhs == error_mark_node)
110cfe1c 2642 {
7c7d3047 2643 insert_decl_map (id, p, var);
b5b8b0ac 2644 return insert_init_debug_bind (id, bb, var, rhs, NULL);
110cfe1c 2645 }
afe08db5 2646
73dab33b 2647 STRIP_USELESS_TYPE_CONVERSION (rhs);
6de9cd9a 2648
6b18b1a3 2649 /* If we are in SSA form properly remap the default definition
27eb31c9
RG
2650 or assign to a dummy SSA name if the parameter is unused and
2651 we are not optimizing. */
6b18b1a3 2652 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
110cfe1c 2653 {
6b18b1a3
RG
2654 if (def)
2655 {
2656 def = remap_ssa_name (def, id);
2657 init_stmt = gimple_build_assign (def, rhs);
2658 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
32244553 2659 set_ssa_default_def (cfun, var, NULL);
6b18b1a3 2660 }
27eb31c9
RG
2661 else if (!optimize)
2662 {
2663 def = make_ssa_name (var, NULL);
2664 init_stmt = gimple_build_assign (def, rhs);
2665 }
110cfe1c
JH
2666 }
2667 else
726a989a 2668 init_stmt = gimple_build_assign (var, rhs);
6de9cd9a 2669
0f1961a2 2670 if (bb && init_stmt)
b5b8b0ac 2671 insert_init_stmt (id, bb, init_stmt);
6de9cd9a 2672 }
0f1961a2 2673 return init_stmt;
6de9cd9a
DN
2674}
2675
d4e4baa9 2676/* Generate code to initialize the parameters of the function at the
726a989a 2677 top of the stack in ID from the GIMPLE_CALL STMT. */
d4e4baa9 2678
e21aff8a 2679static void
726a989a 2680initialize_inlined_parameters (copy_body_data *id, gimple stmt,
e21aff8a 2681 tree fn, basic_block bb)
d4e4baa9 2682{
d4e4baa9 2683 tree parms;
726a989a 2684 size_t i;
d4e4baa9 2685 tree p;
d436bff8 2686 tree vars = NULL_TREE;
726a989a 2687 tree static_chain = gimple_call_chain (stmt);
d4e4baa9
AO
2688
2689 /* Figure out what the parameters are. */
18c6ada9 2690 parms = DECL_ARGUMENTS (fn);
d4e4baa9 2691
d4e4baa9
AO
2692 /* Loop through the parameter declarations, replacing each with an
2693 equivalent VAR_DECL, appropriately initialized. */
910ad8de 2694 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
726a989a
RB
2695 {
2696 tree val;
2697 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2698 setup_one_parameter (id, p, val, fn, bb, &vars);
2699 }
ea184343
RG
2700 /* After remapping parameters remap their types. This has to be done
2701 in a second loop over all parameters to appropriately remap
2702 variable sized arrays when the size is specified in a
2703 parameter following the array. */
910ad8de 2704 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
ea184343
RG
2705 {
2706 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2707 if (varp
2708 && TREE_CODE (*varp) == VAR_DECL)
2709 {
72aa3dca 2710 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
32244553 2711 ? ssa_default_def (id->src_cfun, p) : NULL);
72aa3dca
RG
2712 tree var = *varp;
2713 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
ea184343
RG
2714 /* Also remap the default definition if it was remapped
2715 to the default definition of the parameter replacement
2716 by the parameter setup. */
72aa3dca 2717 if (def)
ea184343
RG
2718 {
2719 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
2720 if (defp
2721 && TREE_CODE (*defp) == SSA_NAME
72aa3dca
RG
2722 && SSA_NAME_VAR (*defp) == var)
2723 TREE_TYPE (*defp) = TREE_TYPE (var);
ea184343
RG
2724 }
2725 }
2726 }
4838c5ee 2727
6de9cd9a
DN
2728 /* Initialize the static chain. */
2729 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
ea99e0be 2730 gcc_assert (fn != current_function_decl);
6de9cd9a
DN
2731 if (p)
2732 {
2733 /* No static chain? Seems like a bug in tree-nested.c. */
1e128c5f 2734 gcc_assert (static_chain);
4838c5ee 2735
e21aff8a 2736 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
4838c5ee
AO
2737 }
2738
e21aff8a 2739 declare_inline_vars (id->block, vars);
d4e4baa9
AO
2740}
2741
726a989a 2742
e21aff8a
SB
2743/* Declare a return variable to replace the RESULT_DECL for the
2744 function we are calling. An appropriate DECL_STMT is returned.
2745 The USE_STMT is filled to contain a use of the declaration to
2746 indicate the return value of the function.
2747
110cfe1c
JH
2748 RETURN_SLOT, if non-null is place where to store the result. It
2749 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
726a989a 2750 was the LHS of the MODIFY_EXPR to which this call is the RHS.
7740f00d 2751
0f900dfa
JJ
2752 The return value is a (possibly null) value that holds the result
2753 as seen by the caller. */
d4e4baa9 2754
d436bff8 2755static tree
6938f93f
JH
2756declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2757 basic_block entry_bb)
d4e4baa9 2758{
1b369fae 2759 tree callee = id->src_fn;
7740f00d
RH
2760 tree result = DECL_RESULT (callee);
2761 tree callee_type = TREE_TYPE (result);
ea2edf88 2762 tree caller_type;
7740f00d 2763 tree var, use;
d4e4baa9 2764
ea2edf88
RG
2765 /* Handle type-mismatches in the function declaration return type
2766 vs. the call expression. */
2767 if (modify_dest)
2768 caller_type = TREE_TYPE (modify_dest);
2769 else
2770 caller_type = TREE_TYPE (TREE_TYPE (callee));
2771
1a2c27e9
EB
2772 /* We don't need to do anything for functions that don't return anything. */
2773 if (VOID_TYPE_P (callee_type))
0f900dfa 2774 return NULL_TREE;
d4e4baa9 2775
cc77ae10 2776 /* If there was a return slot, then the return value is the
7740f00d 2777 dereferenced address of that object. */
110cfe1c 2778 if (return_slot)
7740f00d 2779 {
110cfe1c 2780 /* The front end shouldn't have used both return_slot and
7740f00d 2781 a modify expression. */
1e128c5f 2782 gcc_assert (!modify_dest);
cc77ae10 2783 if (DECL_BY_REFERENCE (result))
110cfe1c
JH
2784 {
2785 tree return_slot_addr = build_fold_addr_expr (return_slot);
2786 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2787
2788 /* We are going to construct *&return_slot and we can't do that
b8698a0f 2789 for variables believed to be not addressable.
110cfe1c
JH
2790
2791 FIXME: This check possibly can match, because values returned
2792 via return slot optimization are not believed to have address
2793 taken by alias analysis. */
2794 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
110cfe1c
JH
2795 var = return_slot_addr;
2796 }
cc77ae10 2797 else
110cfe1c
JH
2798 {
2799 var = return_slot;
2800 gcc_assert (TREE_CODE (var) != SSA_NAME);
b5ca517c 2801 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
110cfe1c 2802 }
0890b981
AP
2803 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2804 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2805 && !DECL_GIMPLE_REG_P (result)
22918034 2806 && DECL_P (var))
0890b981 2807 DECL_GIMPLE_REG_P (var) = 0;
7740f00d
RH
2808 use = NULL;
2809 goto done;
2810 }
2811
2812 /* All types requiring non-trivial constructors should have been handled. */
1e128c5f 2813 gcc_assert (!TREE_ADDRESSABLE (callee_type));
7740f00d
RH
2814
2815 /* Attempt to avoid creating a new temporary variable. */
110cfe1c
JH
2816 if (modify_dest
2817 && TREE_CODE (modify_dest) != SSA_NAME)
7740f00d
RH
2818 {
2819 bool use_it = false;
2820
2821 /* We can't use MODIFY_DEST if there's type promotion involved. */
f4088621 2822 if (!useless_type_conversion_p (callee_type, caller_type))
7740f00d
RH
2823 use_it = false;
2824
2825 /* ??? If we're assigning to a variable sized type, then we must
2826 reuse the destination variable, because we've no good way to
2827 create variable sized temporaries at this point. */
2828 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2829 use_it = true;
2830
2831 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2832 reuse it as the result of the call directly. Don't do this if
2833 it would promote MODIFY_DEST to addressable. */
e2f9fe42
RH
2834 else if (TREE_ADDRESSABLE (result))
2835 use_it = false;
2836 else
2837 {
2838 tree base_m = get_base_address (modify_dest);
2839
2840 /* If the base isn't a decl, then it's a pointer, and we don't
2841 know where that's going to go. */
2842 if (!DECL_P (base_m))
2843 use_it = false;
2844 else if (is_global_var (base_m))
2845 use_it = false;
0890b981
AP
2846 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2847 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2848 && !DECL_GIMPLE_REG_P (result)
2849 && DECL_GIMPLE_REG_P (base_m))
1d327c16 2850 use_it = false;
e2f9fe42
RH
2851 else if (!TREE_ADDRESSABLE (base_m))
2852 use_it = true;
2853 }
7740f00d
RH
2854
2855 if (use_it)
2856 {
2857 var = modify_dest;
2858 use = NULL;
2859 goto done;
2860 }
2861 }
2862
1e128c5f 2863 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
7740f00d 2864
c08cd4c1 2865 var = copy_result_decl_to_var (result, id);
7740f00d 2866 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
7740f00d 2867
6de9cd9a 2868 /* Do not have the rest of GCC warn about this variable as it should
471854f8 2869 not be visible to the user. */
6de9cd9a 2870 TREE_NO_WARNING (var) = 1;
d4e4baa9 2871
c08cd4c1
JM
2872 declare_inline_vars (id->block, var);
2873
7740f00d
RH
2874 /* Build the use expr. If the return type of the function was
2875 promoted, convert it back to the expected type. */
2876 use = var;
f4088621 2877 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
c4ac6e94
RG
2878 {
2879 /* If we can match up types by promotion/demotion do so. */
2880 if (fold_convertible_p (caller_type, var))
2881 use = fold_convert (caller_type, var);
2882 else
2883 {
2884 /* ??? For valid programs we should not end up here.
2885 Still if we end up with truly mismatched types here, fall back
2886 to using a MEM_REF to not leak invalid GIMPLE to the following
2887 passes. */
2888 /* Prevent var from being written into SSA form. */
2889 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
2890 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
2891 DECL_GIMPLE_REG_P (var) = false;
2892 else if (is_gimple_reg_type (TREE_TYPE (var)))
2893 TREE_ADDRESSABLE (var) = true;
2894 use = fold_build2 (MEM_REF, caller_type,
2895 build_fold_addr_expr (var),
2896 build_int_cst (ptr_type_node, 0));
2897 }
2898 }
b8698a0f 2899
73dab33b 2900 STRIP_USELESS_TYPE_CONVERSION (use);
7740f00d 2901
c08cd4c1 2902 if (DECL_BY_REFERENCE (result))
32848948
RG
2903 {
2904 TREE_ADDRESSABLE (var) = 1;
2905 var = build_fold_addr_expr (var);
2906 }
c08cd4c1 2907
7740f00d 2908 done:
d4e4baa9
AO
2909 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2910 way, when the RESULT_DECL is encountered, it will be
6938f93f
JH
2911 automatically replaced by the VAR_DECL.
2912
2913 When returning by reference, ensure that RESULT_DECL remaps to
2914 gimple_val. */
2915 if (DECL_BY_REFERENCE (result)
2916 && !is_gimple_val (var))
2917 {
2918 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
2919 insert_decl_map (id, result, temp);
6b18b1a3
RG
2920 /* When RESULT_DECL is in SSA form, we need to remap and initialize
2921 it's default_def SSA_NAME. */
2922 if (gimple_in_ssa_p (id->src_cfun)
2923 && is_gimple_reg (result))
2924 {
2925 temp = make_ssa_name (temp, NULL);
32244553 2926 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
6b18b1a3 2927 }
6938f93f
JH
2928 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
2929 }
2930 else
2931 insert_decl_map (id, result, var);
d4e4baa9 2932
6de9cd9a
DN
2933 /* Remember this so we can ignore it in remap_decls. */
2934 id->retvar = var;
2935
0f900dfa 2936 return use;
d4e4baa9
AO
2937}
2938
27dbd3ac
RH
2939/* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2940 to a local label. */
4838c5ee 2941
27dbd3ac
RH
2942static tree
2943has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
4838c5ee 2944{
27dbd3ac
RH
2945 tree node = *nodep;
2946 tree fn = (tree) fnp;
726a989a 2947
27dbd3ac
RH
2948 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2949 return node;
2950
2951 if (TYPE_P (node))
2952 *walk_subtrees = 0;
2953
2954 return NULL_TREE;
2955}
726a989a 2956
27dbd3ac
RH
2957/* Determine if the function can be copied. If so return NULL. If
2958 not return a string describng the reason for failure. */
2959
2960static const char *
2961copy_forbidden (struct function *fun, tree fndecl)
2962{
2963 const char *reason = fun->cannot_be_copied_reason;
c021f10b
NF
2964 tree decl;
2965 unsigned ix;
27dbd3ac
RH
2966
2967 /* Only examine the function once. */
2968 if (fun->cannot_be_copied_set)
2969 return reason;
2970
2971 /* We cannot copy a function that receives a non-local goto
2972 because we cannot remap the destination label used in the
2973 function that is performing the non-local goto. */
2974 /* ??? Actually, this should be possible, if we work at it.
2975 No doubt there's just a handful of places that simply
2976 assume it doesn't happen and don't substitute properly. */
2977 if (fun->has_nonlocal_label)
2978 {
2979 reason = G_("function %q+F can never be copied "
2980 "because it receives a non-local goto");
2981 goto fail;
2982 }
2983
c021f10b
NF
2984 FOR_EACH_LOCAL_DECL (fun, ix, decl)
2985 if (TREE_CODE (decl) == VAR_DECL
2986 && TREE_STATIC (decl)
2987 && !DECL_EXTERNAL (decl)
2988 && DECL_INITIAL (decl)
2989 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2990 has_label_address_in_static_1,
2991 fndecl))
2992 {
2993 reason = G_("function %q+F can never be copied because it saves "
2994 "address of local label in a static variable");
2995 goto fail;
2996 }
27dbd3ac
RH
2997
2998 fail:
2999 fun->cannot_be_copied_reason = reason;
3000 fun->cannot_be_copied_set = true;
3001 return reason;
3002}
3003
3004
3005static const char *inline_forbidden_reason;
3006
3007/* A callback for walk_gimple_seq to handle statements. Returns non-null
3008 iff a function can not be inlined. Also sets the reason why. */
c986baf6 3009
c986baf6 3010static tree
726a989a
RB
3011inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3012 struct walk_stmt_info *wip)
c986baf6 3013{
726a989a 3014 tree fn = (tree) wip->info;
f08545a8 3015 tree t;
726a989a 3016 gimple stmt = gsi_stmt (*gsi);
c986baf6 3017
726a989a 3018 switch (gimple_code (stmt))
f08545a8 3019 {
726a989a 3020 case GIMPLE_CALL:
3197c4fd
AS
3021 /* Refuse to inline alloca call unless user explicitly forced so as
3022 this may change program's memory overhead drastically when the
3023 function using alloca is called in loop. In GCC present in
3024 SPEC2000 inlining into schedule_block cause it to require 2GB of
63d2a353
MM
3025 RAM instead of 256MB. Don't do so for alloca calls emitted for
3026 VLA objects as those can't cause unbounded growth (they're always
3027 wrapped inside stack_save/stack_restore regions. */
726a989a 3028 if (gimple_alloca_call_p (stmt)
63d2a353 3029 && !gimple_call_alloca_for_var_p (stmt)
f08545a8
JH
3030 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3031 {
ddd2d57e 3032 inline_forbidden_reason
dee15844 3033 = G_("function %q+F can never be inlined because it uses "
ddd2d57e 3034 "alloca (override using the always_inline attribute)");
726a989a
RB
3035 *handled_ops_p = true;
3036 return fn;
f08545a8 3037 }
726a989a
RB
3038
3039 t = gimple_call_fndecl (stmt);
3040 if (t == NULL_TREE)
f08545a8 3041 break;
84f5e1b1 3042
f08545a8
JH
3043 /* We cannot inline functions that call setjmp. */
3044 if (setjmp_call_p (t))
3045 {
ddd2d57e 3046 inline_forbidden_reason
dee15844 3047 = G_("function %q+F can never be inlined because it uses setjmp");
726a989a
RB
3048 *handled_ops_p = true;
3049 return t;
f08545a8
JH
3050 }
3051
6de9cd9a 3052 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3197c4fd 3053 switch (DECL_FUNCTION_CODE (t))
f08545a8 3054 {
3197c4fd
AS
3055 /* We cannot inline functions that take a variable number of
3056 arguments. */
3057 case BUILT_IN_VA_START:
3197c4fd
AS
3058 case BUILT_IN_NEXT_ARG:
3059 case BUILT_IN_VA_END:
6de9cd9a 3060 inline_forbidden_reason
dee15844 3061 = G_("function %q+F can never be inlined because it "
6de9cd9a 3062 "uses variable argument lists");
726a989a
RB
3063 *handled_ops_p = true;
3064 return t;
6de9cd9a 3065
3197c4fd 3066 case BUILT_IN_LONGJMP:
6de9cd9a
DN
3067 /* We can't inline functions that call __builtin_longjmp at
3068 all. The non-local goto machinery really requires the
3069 destination be in a different function. If we allow the
3070 function calling __builtin_longjmp to be inlined into the
3071 function calling __builtin_setjmp, Things will Go Awry. */
3072 inline_forbidden_reason
dee15844 3073 = G_("function %q+F can never be inlined because "
6de9cd9a 3074 "it uses setjmp-longjmp exception handling");
726a989a
RB
3075 *handled_ops_p = true;
3076 return t;
6de9cd9a
DN
3077
3078 case BUILT_IN_NONLOCAL_GOTO:
3079 /* Similarly. */
3080 inline_forbidden_reason
dee15844 3081 = G_("function %q+F can never be inlined because "
6de9cd9a 3082 "it uses non-local goto");
726a989a
RB
3083 *handled_ops_p = true;
3084 return t;
f08545a8 3085
4b284111
JJ
3086 case BUILT_IN_RETURN:
3087 case BUILT_IN_APPLY_ARGS:
3088 /* If a __builtin_apply_args caller would be inlined,
3089 it would be saving arguments of the function it has
3090 been inlined into. Similarly __builtin_return would
3091 return from the function the inline has been inlined into. */
3092 inline_forbidden_reason
dee15844 3093 = G_("function %q+F can never be inlined because "
4b284111 3094 "it uses __builtin_return or __builtin_apply_args");
726a989a
RB
3095 *handled_ops_p = true;
3096 return t;
4b284111 3097
3197c4fd
AS
3098 default:
3099 break;
3100 }
f08545a8
JH
3101 break;
3102
726a989a
RB
3103 case GIMPLE_GOTO:
3104 t = gimple_goto_dest (stmt);
f08545a8
JH
3105
3106 /* We will not inline a function which uses computed goto. The
3107 addresses of its local labels, which may be tucked into
3108 global storage, are of course not constant across
3109 instantiations, which causes unexpected behavior. */
3110 if (TREE_CODE (t) != LABEL_DECL)
3111 {
ddd2d57e 3112 inline_forbidden_reason
dee15844 3113 = G_("function %q+F can never be inlined "
ddd2d57e 3114 "because it contains a computed goto");
726a989a
RB
3115 *handled_ops_p = true;
3116 return t;
f08545a8 3117 }
6de9cd9a 3118 break;
f08545a8 3119
f08545a8
JH
3120 default:
3121 break;
3122 }
3123
726a989a 3124 *handled_ops_p = false;
f08545a8 3125 return NULL_TREE;
84f5e1b1
RH
3126}
3127
726a989a
RB
3128/* Return true if FNDECL is a function that cannot be inlined into
3129 another one. */
3130
3131static bool
f08545a8 3132inline_forbidden_p (tree fndecl)
84f5e1b1 3133{
2092ee7d 3134 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
726a989a
RB
3135 struct walk_stmt_info wi;
3136 struct pointer_set_t *visited_nodes;
3137 basic_block bb;
3138 bool forbidden_p = false;
3139
27dbd3ac
RH
3140 /* First check for shared reasons not to copy the code. */
3141 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3142 if (inline_forbidden_reason != NULL)
3143 return true;
3144
3145 /* Next, walk the statements of the function looking for
3146 constraucts we can't handle, or are non-optimal for inlining. */
726a989a
RB
3147 visited_nodes = pointer_set_create ();
3148 memset (&wi, 0, sizeof (wi));
3149 wi.info = (void *) fndecl;
3150 wi.pset = visited_nodes;
e21aff8a 3151
2092ee7d 3152 FOR_EACH_BB_FN (bb, fun)
726a989a
RB
3153 {
3154 gimple ret;
3155 gimple_seq seq = bb_seq (bb);
27dbd3ac 3156 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
726a989a
RB
3157 forbidden_p = (ret != NULL);
3158 if (forbidden_p)
27dbd3ac 3159 break;
2092ee7d
JJ
3160 }
3161
726a989a 3162 pointer_set_destroy (visited_nodes);
726a989a 3163 return forbidden_p;
84f5e1b1 3164}
6399c0ab
SB
3165\f
3166/* Return false if the function FNDECL cannot be inlined on account of its
3167 attributes, true otherwise. */
3168static bool
3169function_attribute_inlinable_p (const_tree fndecl)
3170{
3171 if (targetm.attribute_table)
3172 {
3173 const_tree a;
3174
3175 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3176 {
3177 const_tree name = TREE_PURPOSE (a);
3178 int i;
3179
3180 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3181 if (is_attribute_p (targetm.attribute_table[i].name, name))
3182 return targetm.function_attribute_inlinable_p (fndecl);
3183 }
3184 }
3185
3186 return true;
3187}
84f5e1b1 3188
b3c3af2f
SB
3189/* Returns nonzero if FN is a function that does not have any
3190 fundamental inline blocking properties. */
d4e4baa9 3191
27dbd3ac
RH
3192bool
3193tree_inlinable_function_p (tree fn)
d4e4baa9 3194{
b3c3af2f 3195 bool inlinable = true;
18177c7e
RG
3196 bool do_warning;
3197 tree always_inline;
d4e4baa9
AO
3198
3199 /* If we've already decided this function shouldn't be inlined,
3200 there's no need to check again. */
3201 if (DECL_UNINLINABLE (fn))
b3c3af2f 3202 return false;
d4e4baa9 3203
18177c7e
RG
3204 /* We only warn for functions declared `inline' by the user. */
3205 do_warning = (warn_inline
18177c7e 3206 && DECL_DECLARED_INLINE_P (fn)
0494626a 3207 && !DECL_NO_INLINE_WARNING_P (fn)
18177c7e
RG
3208 && !DECL_IN_SYSTEM_HEADER (fn));
3209
3210 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3211
e90acd93 3212 if (flag_no_inline
18177c7e
RG
3213 && always_inline == NULL)
3214 {
3215 if (do_warning)
3216 warning (OPT_Winline, "function %q+F can never be inlined because it "
3217 "is suppressed using -fno-inline", fn);
3218 inlinable = false;
3219 }
3220
18177c7e
RG
3221 else if (!function_attribute_inlinable_p (fn))
3222 {
3223 if (do_warning)
3224 warning (OPT_Winline, "function %q+F can never be inlined because it "
3225 "uses attributes conflicting with inlining", fn);
3226 inlinable = false;
3227 }
46c5ad27 3228
f08545a8 3229 else if (inline_forbidden_p (fn))
b3c3af2f
SB
3230 {
3231 /* See if we should warn about uninlinable functions. Previously,
3232 some of these warnings would be issued while trying to expand
3233 the function inline, but that would cause multiple warnings
3234 about functions that would for example call alloca. But since
3235 this a property of the function, just one warning is enough.
3236 As a bonus we can now give more details about the reason why a
18177c7e
RG
3237 function is not inlinable. */
3238 if (always_inline)
c9fc06dc 3239 error (inline_forbidden_reason, fn);
2d327012 3240 else if (do_warning)
d2fcbf6f 3241 warning (OPT_Winline, inline_forbidden_reason, fn);
b3c3af2f
SB
3242
3243 inlinable = false;
3244 }
d4e4baa9
AO
3245
3246 /* Squirrel away the result so that we don't have to check again. */
b3c3af2f 3247 DECL_UNINLINABLE (fn) = !inlinable;
d4e4baa9 3248
b3c3af2f
SB
3249 return inlinable;
3250}
3251
e5c4f28a
RG
3252/* Estimate the cost of a memory move. Use machine dependent
3253 word size and take possible memcpy call into account. */
3254
3255int
3256estimate_move_cost (tree type)
3257{
3258 HOST_WIDE_INT size;
3259
078c3644
JH
3260 gcc_assert (!VOID_TYPE_P (type));
3261
c204d113
L
3262 if (TREE_CODE (type) == VECTOR_TYPE)
3263 {
3264 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3265 enum machine_mode simd
3266 = targetm.vectorize.preferred_simd_mode (inner);
3267 int simd_mode_size = GET_MODE_SIZE (simd);
3268 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3269 / simd_mode_size);
3270 }
3271
e5c4f28a
RG
3272 size = int_size_in_bytes (type);
3273
e04ad03d 3274 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
e5c4f28a
RG
3275 /* Cost of a memcpy call, 3 arguments and the call. */
3276 return 4;
3277 else
3278 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3279}
3280
726a989a 3281/* Returns cost of operation CODE, according to WEIGHTS */
7f9bc51b 3282
726a989a 3283static int
02f0b13a
JH
3284estimate_operator_cost (enum tree_code code, eni_weights *weights,
3285 tree op1 ATTRIBUTE_UNUSED, tree op2)
6de9cd9a 3286{
726a989a 3287 switch (code)
6de9cd9a 3288 {
726a989a
RB
3289 /* These are "free" conversions, or their presumed cost
3290 is folded into other operations. */
61fcaeec 3291 case RANGE_EXPR:
1a87cf0c 3292 CASE_CONVERT:
726a989a
RB
3293 case COMPLEX_EXPR:
3294 case PAREN_EXPR:
d4d92cd3 3295 case VIEW_CONVERT_EXPR:
726a989a 3296 return 0;
6de9cd9a 3297
e5c4f28a
RG
3298 /* Assign cost of 1 to usual operations.
3299 ??? We may consider mapping RTL costs to this. */
6de9cd9a 3300 case COND_EXPR:
4151978d 3301 case VEC_COND_EXPR:
2205ed25 3302 case VEC_PERM_EXPR:
6de9cd9a
DN
3303
3304 case PLUS_EXPR:
5be014d5 3305 case POINTER_PLUS_EXPR:
6de9cd9a
DN
3306 case MINUS_EXPR:
3307 case MULT_EXPR:
98449720 3308 case MULT_HIGHPART_EXPR:
16949072 3309 case FMA_EXPR:
6de9cd9a 3310
09e881c9 3311 case ADDR_SPACE_CONVERT_EXPR:
325217ed 3312 case FIXED_CONVERT_EXPR:
6de9cd9a 3313 case FIX_TRUNC_EXPR:
6de9cd9a
DN
3314
3315 case NEGATE_EXPR:
3316 case FLOAT_EXPR:
3317 case MIN_EXPR:
3318 case MAX_EXPR:
3319 case ABS_EXPR:
3320
3321 case LSHIFT_EXPR:
3322 case RSHIFT_EXPR:
3323 case LROTATE_EXPR:
3324 case RROTATE_EXPR:
a6b46ba2
DN
3325 case VEC_LSHIFT_EXPR:
3326 case VEC_RSHIFT_EXPR:
6de9cd9a
DN
3327
3328 case BIT_IOR_EXPR:
3329 case BIT_XOR_EXPR:
3330 case BIT_AND_EXPR:
3331 case BIT_NOT_EXPR:
3332
3333 case TRUTH_ANDIF_EXPR:
3334 case TRUTH_ORIF_EXPR:
3335 case TRUTH_AND_EXPR:
3336 case TRUTH_OR_EXPR:
3337 case TRUTH_XOR_EXPR:
3338 case TRUTH_NOT_EXPR:
3339
3340 case LT_EXPR:
3341 case LE_EXPR:
3342 case GT_EXPR:
3343 case GE_EXPR:
3344 case EQ_EXPR:
3345 case NE_EXPR:
3346 case ORDERED_EXPR:
3347 case UNORDERED_EXPR:
3348
3349 case UNLT_EXPR:
3350 case UNLE_EXPR:
3351 case UNGT_EXPR:
3352 case UNGE_EXPR:
3353 case UNEQ_EXPR:
d1a7edaf 3354 case LTGT_EXPR:
6de9cd9a 3355
6de9cd9a
DN
3356 case CONJ_EXPR:
3357
3358 case PREDECREMENT_EXPR:
3359 case PREINCREMENT_EXPR:
3360 case POSTDECREMENT_EXPR:
3361 case POSTINCREMENT_EXPR:
3362
16630a2c
DN
3363 case REALIGN_LOAD_EXPR:
3364
61d3cdbb
DN
3365 case REDUC_MAX_EXPR:
3366 case REDUC_MIN_EXPR:
3367 case REDUC_PLUS_EXPR:
20f06221 3368 case WIDEN_SUM_EXPR:
726a989a
RB
3369 case WIDEN_MULT_EXPR:
3370 case DOT_PROD_EXPR:
0354c0c7
BS
3371 case WIDEN_MULT_PLUS_EXPR:
3372 case WIDEN_MULT_MINUS_EXPR:
36ba4aae 3373 case WIDEN_LSHIFT_EXPR:
726a989a 3374
89d67cca
DN
3375 case VEC_WIDEN_MULT_HI_EXPR:
3376 case VEC_WIDEN_MULT_LO_EXPR:
3f30a9a6
RH
3377 case VEC_WIDEN_MULT_EVEN_EXPR:
3378 case VEC_WIDEN_MULT_ODD_EXPR:
89d67cca
DN
3379 case VEC_UNPACK_HI_EXPR:
3380 case VEC_UNPACK_LO_EXPR:
d9987fb4
UB
3381 case VEC_UNPACK_FLOAT_HI_EXPR:
3382 case VEC_UNPACK_FLOAT_LO_EXPR:
8115817b 3383 case VEC_PACK_TRUNC_EXPR:
89d67cca 3384 case VEC_PACK_SAT_EXPR:
d9987fb4 3385 case VEC_PACK_FIX_TRUNC_EXPR:
36ba4aae
IR
3386 case VEC_WIDEN_LSHIFT_HI_EXPR:
3387 case VEC_WIDEN_LSHIFT_LO_EXPR:
98b44b0e 3388
726a989a 3389 return 1;
6de9cd9a 3390
1ea7e6ad 3391 /* Few special cases of expensive operations. This is useful
6de9cd9a
DN
3392 to avoid inlining on functions having too many of these. */
3393 case TRUNC_DIV_EXPR:
3394 case CEIL_DIV_EXPR:
3395 case FLOOR_DIV_EXPR:
3396 case ROUND_DIV_EXPR:
3397 case EXACT_DIV_EXPR:
3398 case TRUNC_MOD_EXPR:
3399 case CEIL_MOD_EXPR:
3400 case FLOOR_MOD_EXPR:
3401 case ROUND_MOD_EXPR:
3402 case RDIV_EXPR:
02f0b13a
JH
3403 if (TREE_CODE (op2) != INTEGER_CST)
3404 return weights->div_mod_cost;
3405 return 1;
726a989a
RB
3406
3407 default:
3408 /* We expect a copy assignment with no operator. */
3409 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3410 return 0;
3411 }
3412}
3413
3414
3415/* Estimate number of instructions that will be created by expanding
3416 the statements in the statement sequence STMTS.
3417 WEIGHTS contains weights attributed to various constructs. */
3418
3419static
3420int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3421{
3422 int cost;
3423 gimple_stmt_iterator gsi;
3424
3425 cost = 0;
3426 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3427 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3428
3429 return cost;
3430}
3431
3432
3433/* Estimate number of instructions that will be created by expanding STMT.
3434 WEIGHTS contains weights attributed to various constructs. */
3435
3436int
3437estimate_num_insns (gimple stmt, eni_weights *weights)
3438{
3439 unsigned cost, i;
3440 enum gimple_code code = gimple_code (stmt);
3441 tree lhs;
02f0b13a 3442 tree rhs;
726a989a
RB
3443
3444 switch (code)
3445 {
3446 case GIMPLE_ASSIGN:
3447 /* Try to estimate the cost of assignments. We have three cases to
3448 deal with:
3449 1) Simple assignments to registers;
3450 2) Stores to things that must live in memory. This includes
3451 "normal" stores to scalars, but also assignments of large
3452 structures, or constructors of big arrays;
3453
3454 Let us look at the first two cases, assuming we have "a = b + C":
3455 <GIMPLE_ASSIGN <var_decl "a">
3456 <plus_expr <var_decl "b"> <constant C>>
3457 If "a" is a GIMPLE register, the assignment to it is free on almost
3458 any target, because "a" usually ends up in a real register. Hence
3459 the only cost of this expression comes from the PLUS_EXPR, and we
3460 can ignore the GIMPLE_ASSIGN.
3461 If "a" is not a GIMPLE register, the assignment to "a" will most
3462 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3463 of moving something into "a", which we compute using the function
3464 estimate_move_cost. */
bccc50d4
JJ
3465 if (gimple_clobber_p (stmt))
3466 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3467
726a989a 3468 lhs = gimple_assign_lhs (stmt);
02f0b13a
JH
3469 rhs = gimple_assign_rhs1 (stmt);
3470
726a989a
RB
3471 if (is_gimple_reg (lhs))
3472 cost = 0;
3473 else
3474 cost = estimate_move_cost (TREE_TYPE (lhs));
3475
02f0b13a
JH
3476 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3477 cost += estimate_move_cost (TREE_TYPE (rhs));
3478
3479 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3480 gimple_assign_rhs1 (stmt),
3481 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3482 == GIMPLE_BINARY_RHS
3483 ? gimple_assign_rhs2 (stmt) : NULL);
726a989a
RB
3484 break;
3485
3486 case GIMPLE_COND:
02f0b13a
JH
3487 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3488 gimple_op (stmt, 0),
3489 gimple_op (stmt, 1));
726a989a
RB
3490 break;
3491
3492 case GIMPLE_SWITCH:
3493 /* Take into account cost of the switch + guess 2 conditional jumps for
b8698a0f 3494 each case label.
726a989a
RB
3495
3496 TODO: once the switch expansion logic is sufficiently separated, we can
3497 do better job on estimating cost of the switch. */
02f0b13a
JH
3498 if (weights->time_based)
3499 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3500 else
3501 cost = gimple_switch_num_labels (stmt) * 2;
6de9cd9a 3502 break;
726a989a
RB
3503
3504 case GIMPLE_CALL:
6de9cd9a 3505 {
726a989a 3506 tree decl = gimple_call_fndecl (stmt);
d2d668fb 3507 struct cgraph_node *node = NULL;
6de9cd9a 3508
9bb2f479
JH
3509 /* Do not special case builtins where we see the body.
3510 This just confuse inliner. */
9f9ebcdf 3511 if (!decl || !(node = cgraph_get_node (decl)) || node->analyzed)
e9f7ad79 3512 ;
9bb2f479
JH
3513 /* For buitins that are likely expanded to nothing or
3514 inlined do not account operand costs. */
3515 else if (is_simple_builtin (decl))
bec922f0
SL
3516 return 0;
3517 else if (is_inexpensive_builtin (decl))
9bb2f479 3518 return weights->target_builtin_call_cost;
e9f7ad79
RG
3519 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3520 {
3521 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3522 specialize the cheap expansion we do here.
3523 ??? This asks for a more general solution. */
3524 switch (DECL_FUNCTION_CODE (decl))
3525 {
3526 case BUILT_IN_POW:
3527 case BUILT_IN_POWF:
3528 case BUILT_IN_POWL:
3529 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3530 && REAL_VALUES_EQUAL
3531 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3532 return estimate_operator_cost (MULT_EXPR, weights,
3533 gimple_call_arg (stmt, 0),
3534 gimple_call_arg (stmt, 0));
3535 break;
3536
3537 default:
3538 break;
3539 }
3540 }
b8698a0f 3541
d2d668fb 3542 cost = node ? weights->call_cost : weights->indirect_call_cost;
3c04921b
RG
3543 if (gimple_call_lhs (stmt))
3544 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3545 for (i = 0; i < gimple_call_num_args (stmt); i++)
c7f599d0 3546 {
3c04921b
RG
3547 tree arg = gimple_call_arg (stmt, i);
3548 cost += estimate_move_cost (TREE_TYPE (arg));
c7f599d0 3549 }
6de9cd9a
DN
3550 break;
3551 }
88f4034b 3552
9bb2f479
JH
3553 case GIMPLE_RETURN:
3554 return weights->return_cost;
3555
726a989a
RB
3556 case GIMPLE_GOTO:
3557 case GIMPLE_LABEL:
3558 case GIMPLE_NOP:
3559 case GIMPLE_PHI:
726a989a 3560 case GIMPLE_PREDICT:
b5b8b0ac 3561 case GIMPLE_DEBUG:
726a989a
RB
3562 return 0;
3563
3564 case GIMPLE_ASM:
2bd1d2c8 3565 return asm_str_count (gimple_asm_string (stmt));
726a989a 3566
1d65f45c
RH
3567 case GIMPLE_RESX:
3568 /* This is either going to be an external function call with one
3569 argument, or two register copy statements plus a goto. */
3570 return 2;
3571
3572 case GIMPLE_EH_DISPATCH:
3573 /* ??? This is going to turn into a switch statement. Ideally
3574 we'd have a look at the eh region and estimate the number of
3575 edges involved. */
3576 return 10;
3577
726a989a
RB
3578 case GIMPLE_BIND:
3579 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3580
3581 case GIMPLE_EH_FILTER:
3582 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3583
3584 case GIMPLE_CATCH:
3585 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3586
3587 case GIMPLE_TRY:
3588 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3589 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3590
3591 /* OpenMP directives are generally very expensive. */
3592
3593 case GIMPLE_OMP_RETURN:
3594 case GIMPLE_OMP_SECTIONS_SWITCH:
3595 case GIMPLE_OMP_ATOMIC_STORE:
3596 case GIMPLE_OMP_CONTINUE:
3597 /* ...except these, which are cheap. */
3598 return 0;
3599
3600 case GIMPLE_OMP_ATOMIC_LOAD:
3601 return weights->omp_cost;
3602
3603 case GIMPLE_OMP_FOR:
3604 return (weights->omp_cost
3605 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3606 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3607
3608 case GIMPLE_OMP_PARALLEL:
3609 case GIMPLE_OMP_TASK:
3610 case GIMPLE_OMP_CRITICAL:
3611 case GIMPLE_OMP_MASTER:
3612 case GIMPLE_OMP_ORDERED:
3613 case GIMPLE_OMP_SECTION:
3614 case GIMPLE_OMP_SECTIONS:
3615 case GIMPLE_OMP_SINGLE:
3616 return (weights->omp_cost
3617 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
88f4034b 3618
0a35513e
AH
3619 case GIMPLE_TRANSACTION:
3620 return (weights->tm_cost
3621 + estimate_num_insns_seq (gimple_transaction_body (stmt),
3622 weights));
3623
6de9cd9a 3624 default:
1e128c5f 3625 gcc_unreachable ();
6de9cd9a 3626 }
726a989a
RB
3627
3628 return cost;
6de9cd9a
DN
3629}
3630
726a989a
RB
3631/* Estimate number of instructions that will be created by expanding
3632 function FNDECL. WEIGHTS contains weights attributed to various
3633 constructs. */
aa4a53af 3634
6de9cd9a 3635int
726a989a 3636estimate_num_insns_fn (tree fndecl, eni_weights *weights)
6de9cd9a 3637{
726a989a
RB
3638 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3639 gimple_stmt_iterator bsi;
e21aff8a 3640 basic_block bb;
726a989a 3641 int n = 0;
e21aff8a 3642
726a989a
RB
3643 gcc_assert (my_function && my_function->cfg);
3644 FOR_EACH_BB_FN (bb, my_function)
e21aff8a 3645 {
726a989a
RB
3646 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3647 n += estimate_num_insns (gsi_stmt (bsi), weights);
e21aff8a 3648 }
e21aff8a 3649
726a989a 3650 return n;
7f9bc51b
ZD
3651}
3652
726a989a 3653
7f9bc51b
ZD
3654/* Initializes weights used by estimate_num_insns. */
3655
3656void
3657init_inline_once (void)
3658{
7f9bc51b 3659 eni_size_weights.call_cost = 1;
d2d668fb 3660 eni_size_weights.indirect_call_cost = 3;
625a2efb 3661 eni_size_weights.target_builtin_call_cost = 1;
7f9bc51b 3662 eni_size_weights.div_mod_cost = 1;
7f9bc51b 3663 eni_size_weights.omp_cost = 40;
0a35513e 3664 eni_size_weights.tm_cost = 10;
02f0b13a 3665 eni_size_weights.time_based = false;
9bb2f479 3666 eni_size_weights.return_cost = 1;
7f9bc51b
ZD
3667
3668 /* Estimating time for call is difficult, since we have no idea what the
3669 called function does. In the current uses of eni_time_weights,
3670 underestimating the cost does less harm than overestimating it, so
ea2c620c 3671 we choose a rather small value here. */
7f9bc51b 3672 eni_time_weights.call_cost = 10;
d2d668fb 3673 eni_time_weights.indirect_call_cost = 15;
9bb2f479 3674 eni_time_weights.target_builtin_call_cost = 1;
7f9bc51b 3675 eni_time_weights.div_mod_cost = 10;
7f9bc51b 3676 eni_time_weights.omp_cost = 40;
0a35513e 3677 eni_time_weights.tm_cost = 40;
02f0b13a 3678 eni_time_weights.time_based = true;
9bb2f479 3679 eni_time_weights.return_cost = 2;
6de9cd9a
DN
3680}
3681
726a989a
RB
3682/* Estimate the number of instructions in a gimple_seq. */
3683
3684int
3685count_insns_seq (gimple_seq seq, eni_weights *weights)
3686{
3687 gimple_stmt_iterator gsi;
3688 int n = 0;
3689 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3690 n += estimate_num_insns (gsi_stmt (gsi), weights);
3691
3692 return n;
3693}
3694
3695
e21aff8a 3696/* Install new lexical TREE_BLOCK underneath 'current_block'. */
726a989a 3697
e21aff8a 3698static void
4a283090 3699prepend_lexical_block (tree current_block, tree new_block)
e21aff8a 3700{
4a283090
JH
3701 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3702 BLOCK_SUBBLOCKS (current_block) = new_block;
e21aff8a 3703 BLOCK_SUPERCONTEXT (new_block) = current_block;
e21aff8a
SB
3704}
3705
c021f10b
NF
3706/* Add local variables from CALLEE to CALLER. */
3707
3708static inline void
3709add_local_variables (struct function *callee, struct function *caller,
ae0379fc 3710 copy_body_data *id)
c021f10b
NF
3711{
3712 tree var;
3713 unsigned ix;
3714
3715 FOR_EACH_LOCAL_DECL (callee, ix, var)
ae0379fc 3716 if (!can_be_nonlocal (var, id))
42694189
JJ
3717 {
3718 tree new_var = remap_decl (var, id);
3719
3720 /* Remap debug-expressions. */
3721 if (TREE_CODE (new_var) == VAR_DECL
3722 && DECL_DEBUG_EXPR_IS_FROM (new_var)
3723 && new_var != var)
3724 {
3725 tree tem = DECL_DEBUG_EXPR (var);
3726 bool old_regimplify = id->regimplify;
3727 id->remapping_type_depth++;
3728 walk_tree (&tem, copy_tree_body_r, id, NULL);
3729 id->remapping_type_depth--;
3730 id->regimplify = old_regimplify;
3731 SET_DECL_DEBUG_EXPR (new_var, tem);
3732 }
3733 add_local_decl (caller, new_var);
3734 }
c021f10b
NF
3735}
3736
726a989a 3737/* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
d4e4baa9 3738
e21aff8a 3739static bool
726a989a 3740expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
d4e4baa9 3741{
0f900dfa 3742 tree use_retvar;
d436bff8 3743 tree fn;
b5b8b0ac 3744 struct pointer_map_t *st, *dst;
110cfe1c 3745 tree return_slot;
7740f00d 3746 tree modify_dest;
6de9cd9a 3747 location_t saved_location;
e21aff8a 3748 struct cgraph_edge *cg_edge;
61a05df1 3749 cgraph_inline_failed_t reason;
e21aff8a
SB
3750 basic_block return_block;
3751 edge e;
726a989a 3752 gimple_stmt_iterator gsi, stmt_gsi;
e21aff8a 3753 bool successfully_inlined = FALSE;
4f6c2131 3754 bool purge_dead_abnormal_edges;
d4e4baa9 3755
6de9cd9a
DN
3756 /* Set input_location here so we get the right instantiation context
3757 if we call instantiate_decl from inlinable_function_p. */
532aafad 3758 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
6de9cd9a 3759 saved_location = input_location;
035775c8 3760 input_location = gimple_location (stmt);
6de9cd9a 3761
d4e4baa9 3762 /* From here on, we're only interested in CALL_EXPRs. */
726a989a 3763 if (gimple_code (stmt) != GIMPLE_CALL)
6de9cd9a 3764 goto egress;
d4e4baa9 3765
db09f943
MJ
3766 cg_edge = cgraph_edge (id->dst_node, stmt);
3767 gcc_checking_assert (cg_edge);
d4e4baa9
AO
3768 /* First, see if we can figure out what function is being called.
3769 If we cannot, then there is no hope of inlining the function. */
db09f943 3770 if (cg_edge->indirect_unknown_callee)
3949c4a7 3771 goto egress;
960bfb69 3772 fn = cg_edge->callee->symbol.decl;
db09f943 3773 gcc_checking_assert (fn);
b58b1157 3774
726a989a 3775 /* If FN is a declaration of a function in a nested scope that was
a1a0fd4e
AO
3776 globally declared inline, we don't set its DECL_INITIAL.
3777 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3778 C++ front-end uses it for cdtors to refer to their internal
3779 declarations, that are not real functions. Fortunately those
3780 don't have trees to be saved, so we can tell by checking their
726a989a
RB
3781 gimple_body. */
3782 if (!DECL_INITIAL (fn)
a1a0fd4e 3783 && DECL_ABSTRACT_ORIGIN (fn)
39ecc018 3784 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
a1a0fd4e
AO
3785 fn = DECL_ABSTRACT_ORIGIN (fn);
3786
8f4f502f 3787 /* Don't try to inline functions that are not well-suited to inlining. */
9c8305f8 3788 if (cg_edge->inline_failed)
a833faa5 3789 {
9c8305f8 3790 reason = cg_edge->inline_failed;
3e293154
MJ
3791 /* If this call was originally indirect, we do not want to emit any
3792 inlining related warnings or sorry messages because there are no
3793 guarantees regarding those. */
e33c6cd6 3794 if (cg_edge->indirect_inlining_edge)
3e293154
MJ
3795 goto egress;
3796
7fac66d4
JH
3797 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3798 /* Avoid warnings during early inline pass. */
c9fc06dc
CB
3799 && cgraph_global_info_ready
3800 /* PR 20090218-1_0.c. Body can be provided by another module. */
3801 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
2d327012 3802 {
c9fc06dc
CB
3803 error ("inlining failed in call to always_inline %q+F: %s", fn,
3804 cgraph_inline_failed_string (reason));
3805 error ("called from here");
2d327012 3806 }
ff7037dc
EB
3807 else if (warn_inline
3808 && DECL_DECLARED_INLINE_P (fn)
3809 && !DECL_NO_INLINE_WARNING_P (fn)
2d327012 3810 && !DECL_IN_SYSTEM_HEADER (fn)
61a05df1 3811 && reason != CIF_UNSPECIFIED
d63db217 3812 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
d7d1d041
RG
3813 /* Do not warn about not inlined recursive calls. */
3814 && !cgraph_edge_recursive_p (cg_edge)
d63db217 3815 /* Avoid warnings during early inline pass. */
7e8b322a 3816 && cgraph_global_info_ready)
a833faa5 3817 {
dee15844 3818 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
49c8958b 3819 fn, _(cgraph_inline_failed_string (reason)));
3176a0c2 3820 warning (OPT_Winline, "called from here");
a833faa5 3821 }
6de9cd9a 3822 goto egress;
a833faa5 3823 }
960bfb69 3824 fn = cg_edge->callee->symbol.decl;
d4e4baa9 3825
18c6ada9 3826#ifdef ENABLE_CHECKING
960bfb69 3827 if (cg_edge->callee->symbol.decl != id->dst_node->symbol.decl)
e21aff8a 3828 verify_cgraph_node (cg_edge->callee);
18c6ada9
JH
3829#endif
3830
e21aff8a 3831 /* We will be inlining this callee. */
1d65f45c 3832 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
e21aff8a 3833
f9417da1 3834 /* Update the callers EH personality. */
960bfb69
JH
3835 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->symbol.decl))
3836 DECL_FUNCTION_PERSONALITY (cg_edge->caller->symbol.decl)
3837 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->symbol.decl);
f9417da1 3838
726a989a 3839 /* Split the block holding the GIMPLE_CALL. */
e21aff8a
SB
3840 e = split_block (bb, stmt);
3841 bb = e->src;
3842 return_block = e->dest;
3843 remove_edge (e);
3844
4f6c2131
EB
3845 /* split_block splits after the statement; work around this by
3846 moving the call into the second block manually. Not pretty,
3847 but seems easier than doing the CFG manipulation by hand
726a989a
RB
3848 when the GIMPLE_CALL is in the last statement of BB. */
3849 stmt_gsi = gsi_last_bb (bb);
3850 gsi_remove (&stmt_gsi, false);
4f6c2131 3851
726a989a 3852 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4f6c2131
EB
3853 been the source of abnormal edges. In this case, schedule
3854 the removal of dead abnormal edges. */
726a989a
RB
3855 gsi = gsi_start_bb (return_block);
3856 if (gsi_end_p (gsi))
e21aff8a 3857 {
726a989a 3858 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4f6c2131 3859 purge_dead_abnormal_edges = true;
e21aff8a 3860 }
4f6c2131
EB
3861 else
3862 {
726a989a 3863 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4f6c2131
EB
3864 purge_dead_abnormal_edges = false;
3865 }
3866
726a989a 3867 stmt_gsi = gsi_start_bb (return_block);
742a37d5 3868
d436bff8
AH
3869 /* Build a block containing code to initialize the arguments, the
3870 actual inline expansion of the body, and a label for the return
3871 statements within the function to jump to. The type of the
3872 statement expression is the return type of the function call. */
e21aff8a
SB
3873 id->block = make_node (BLOCK);
3874 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3e2844cb 3875 BLOCK_SOURCE_LOCATION (id->block) = input_location;
4a283090 3876 prepend_lexical_block (gimple_block (stmt), id->block);
e21aff8a 3877
d4e4baa9
AO
3878 /* Local declarations will be replaced by their equivalents in this
3879 map. */
3880 st = id->decl_map;
6be42dd4 3881 id->decl_map = pointer_map_create ();
b5b8b0ac
AO
3882 dst = id->debug_map;
3883 id->debug_map = NULL;
d4e4baa9 3884
e21aff8a 3885 /* Record the function we are about to inline. */
1b369fae
RH
3886 id->src_fn = fn;
3887 id->src_node = cg_edge->callee;
110cfe1c 3888 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
726a989a 3889 id->gimple_call = stmt;
1b369fae 3890
3c8da8a5
AO
3891 gcc_assert (!id->src_cfun->after_inlining);
3892
045685a9 3893 id->entry_bb = bb;
7299cb99
JH
3894 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3895 {
3896 gimple_stmt_iterator si = gsi_last_bb (bb);
3897 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3898 NOT_TAKEN),
3899 GSI_NEW_STMT);
3900 }
726a989a 3901 initialize_inlined_parameters (id, stmt, fn, bb);
d4e4baa9 3902
ea99e0be 3903 if (DECL_INITIAL (fn))
4a283090 3904 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
acb8f212 3905
d4e4baa9
AO
3906 /* Return statements in the function body will be replaced by jumps
3907 to the RET_LABEL. */
1e128c5f
GB
3908 gcc_assert (DECL_INITIAL (fn));
3909 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
23700f65 3910
726a989a 3911 /* Find the LHS to which the result of this call is assigned. */
110cfe1c 3912 return_slot = NULL;
726a989a 3913 if (gimple_call_lhs (stmt))
81bafd36 3914 {
726a989a 3915 modify_dest = gimple_call_lhs (stmt);
81bafd36
ILT
3916
3917 /* The function which we are inlining might not return a value,
3918 in which case we should issue a warning that the function
3919 does not return a value. In that case the optimizers will
3920 see that the variable to which the value is assigned was not
3921 initialized. We do not want to issue a warning about that
3922 uninitialized variable. */
3923 if (DECL_P (modify_dest))
3924 TREE_NO_WARNING (modify_dest) = 1;
726a989a
RB
3925
3926 if (gimple_call_return_slot_opt_p (stmt))
fa47911c 3927 {
110cfe1c 3928 return_slot = modify_dest;
fa47911c
JM
3929 modify_dest = NULL;
3930 }
81bafd36 3931 }
7740f00d
RH
3932 else
3933 modify_dest = NULL;
3934
1ea193c2
ILT
3935 /* If we are inlining a call to the C++ operator new, we don't want
3936 to use type based alias analysis on the return value. Otherwise
3937 we may get confused if the compiler sees that the inlined new
3938 function returns a pointer which was just deleted. See bug
3939 33407. */
3940 if (DECL_IS_OPERATOR_NEW (fn))
3941 {
3942 return_slot = NULL;
3943 modify_dest = NULL;
3944 }
3945
d4e4baa9 3946 /* Declare the return variable for the function. */
6938f93f 3947 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
1ea193c2 3948
acb8f212 3949 /* Add local vars in this inlined callee to caller. */
ae0379fc 3950 add_local_variables (id->src_cfun, cfun, id);
acb8f212 3951
0d63a740
JH
3952 if (dump_file && (dump_flags & TDF_DETAILS))
3953 {
3954 fprintf (dump_file, "Inlining ");
b8698a0f 3955 print_generic_expr (dump_file, id->src_fn, 0);
0d63a740 3956 fprintf (dump_file, " to ");
b8698a0f 3957 print_generic_expr (dump_file, id->dst_fn, 0);
0d63a740
JH
3958 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3959 }
3960
eb50f5f4
JH
3961 /* This is it. Duplicate the callee body. Assume callee is
3962 pre-gimplified. Note that we must not alter the caller
3963 function in any way before this point, as this CALL_EXPR may be
3964 a self-referential call; if we're calling ourselves, we need to
3965 duplicate our body before altering anything. */
0d63a740
JH
3966 copy_body (id, bb->count,
3967 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
91382288 3968 bb, return_block, NULL, NULL);
eb50f5f4 3969
d086d311 3970 /* Reset the escaped solution. */
6b8ed145 3971 if (cfun->gimple_df)
d086d311 3972 pt_solution_reset (&cfun->gimple_df->escaped);
6b8ed145 3973
d4e4baa9 3974 /* Clean up. */
b5b8b0ac
AO
3975 if (id->debug_map)
3976 {
3977 pointer_map_destroy (id->debug_map);
3978 id->debug_map = dst;
3979 }
6be42dd4 3980 pointer_map_destroy (id->decl_map);
d4e4baa9
AO
3981 id->decl_map = st;
3982
5006671f
RG
3983 /* Unlink the calls virtual operands before replacing it. */
3984 unlink_stmt_vdef (stmt);
3985
84936f6f 3986 /* If the inlined function returns a result that we care about,
726a989a
RB
3987 substitute the GIMPLE_CALL with an assignment of the return
3988 variable to the LHS of the call. That is, if STMT was
3989 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3990 if (use_retvar && gimple_call_lhs (stmt))
e21aff8a 3991 {
726a989a
RB
3992 gimple old_stmt = stmt;
3993 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3994 gsi_replace (&stmt_gsi, stmt, false);
726a989a 3995 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
e21aff8a 3996 }
6de9cd9a 3997 else
110cfe1c 3998 {
726a989a
RB
3999 /* Handle the case of inlining a function with no return
4000 statement, which causes the return value to become undefined. */
4001 if (gimple_call_lhs (stmt)
4002 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
110cfe1c 4003 {
726a989a
RB
4004 tree name = gimple_call_lhs (stmt);
4005 tree var = SSA_NAME_VAR (name);
32244553 4006 tree def = ssa_default_def (cfun, var);
110cfe1c 4007
110cfe1c
JH
4008 if (def)
4009 {
726a989a
RB
4010 /* If the variable is used undefined, make this name
4011 undefined via a move. */
4012 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4013 gsi_replace (&stmt_gsi, stmt, true);
110cfe1c 4014 }
110cfe1c
JH
4015 else
4016 {
726a989a
RB
4017 /* Otherwise make this variable undefined. */
4018 gsi_remove (&stmt_gsi, true);
32244553 4019 set_ssa_default_def (cfun, var, name);
726a989a 4020 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
110cfe1c
JH
4021 }
4022 }
4023 else
726a989a 4024 gsi_remove (&stmt_gsi, true);
110cfe1c 4025 }
d4e4baa9 4026
4f6c2131 4027 if (purge_dead_abnormal_edges)
30fd5881
EB
4028 {
4029 gimple_purge_dead_eh_edges (return_block);
4030 gimple_purge_dead_abnormal_call_edges (return_block);
4031 }
84936f6f 4032
e21aff8a
SB
4033 /* If the value of the new expression is ignored, that's OK. We
4034 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4035 the equivalent inlined version either. */
726a989a
RB
4036 if (is_gimple_assign (stmt))
4037 {
4038 gcc_assert (gimple_assign_single_p (stmt)
1a87cf0c 4039 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
726a989a
RB
4040 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4041 }
84936f6f 4042
1eb3331e
DB
4043 /* Output the inlining info for this abstract function, since it has been
4044 inlined. If we don't do this now, we can lose the information about the
4045 variables in the function when the blocks get blown away as soon as we
4046 remove the cgraph node. */
960bfb69 4047 (*debug_hooks->outlining_inline_function) (cg_edge->callee->symbol.decl);
84936f6f 4048
e72fcfe8 4049 /* Update callgraph if needed. */
e21aff8a 4050 cgraph_remove_node (cg_edge->callee);
e72fcfe8 4051
e21aff8a 4052 id->block = NULL_TREE;
e21aff8a 4053 successfully_inlined = TRUE;
742a37d5 4054
6de9cd9a
DN
4055 egress:
4056 input_location = saved_location;
e21aff8a 4057 return successfully_inlined;
d4e4baa9 4058}
6de9cd9a 4059
e21aff8a
SB
4060/* Expand call statements reachable from STMT_P.
4061 We can only have CALL_EXPRs as the "toplevel" tree code or nested
0a35513e 4062 in a MODIFY_EXPR. */
e21aff8a
SB
4063
4064static bool
1b369fae 4065gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
6de9cd9a 4066{
726a989a 4067 gimple_stmt_iterator gsi;
6de9cd9a 4068
726a989a 4069 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4070 {
726a989a 4071 gimple stmt = gsi_stmt (gsi);
e21aff8a 4072
726a989a
RB
4073 if (is_gimple_call (stmt)
4074 && expand_call_inline (bb, stmt, id))
4075 return true;
6de9cd9a 4076 }
726a989a 4077
e21aff8a 4078 return false;
6de9cd9a
DN
4079}
4080
726a989a 4081
b8a00a4d
JH
4082/* Walk all basic blocks created after FIRST and try to fold every statement
4083 in the STATEMENTS pointer set. */
726a989a 4084
b8a00a4d
JH
4085static void
4086fold_marked_statements (int first, struct pointer_set_t *statements)
4087{
726a989a 4088 for (; first < n_basic_blocks; first++)
b8a00a4d
JH
4089 if (BASIC_BLOCK (first))
4090 {
726a989a
RB
4091 gimple_stmt_iterator gsi;
4092
4093 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4094 !gsi_end_p (gsi);
4095 gsi_next (&gsi))
4096 if (pointer_set_contains (statements, gsi_stmt (gsi)))
9477eb38 4097 {
726a989a 4098 gimple old_stmt = gsi_stmt (gsi);
4b685e14 4099 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
2bafad93 4100
44e10129
MM
4101 if (old_decl && DECL_BUILT_IN (old_decl))
4102 {
4103 /* Folding builtins can create multiple instructions,
4104 we need to look at all of them. */
4105 gimple_stmt_iterator i2 = gsi;
4106 gsi_prev (&i2);
4107 if (fold_stmt (&gsi))
4108 {
4109 gimple new_stmt;
a9d24544
JJ
4110 /* If a builtin at the end of a bb folded into nothing,
4111 the following loop won't work. */
4112 if (gsi_end_p (gsi))
4113 {
4114 cgraph_update_edges_for_call_stmt (old_stmt,
4115 old_decl, NULL);
4116 break;
4117 }
44e10129
MM
4118 if (gsi_end_p (i2))
4119 i2 = gsi_start_bb (BASIC_BLOCK (first));
4120 else
4121 gsi_next (&i2);
4122 while (1)
4123 {
4124 new_stmt = gsi_stmt (i2);
4125 update_stmt (new_stmt);
4126 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4127 new_stmt);
4128
4129 if (new_stmt == gsi_stmt (gsi))
4130 {
4131 /* It is okay to check only for the very last
4132 of these statements. If it is a throwing
4133 statement nothing will change. If it isn't
4134 this can remove EH edges. If that weren't
4135 correct then because some intermediate stmts
4136 throw, but not the last one. That would mean
4137 we'd have to split the block, which we can't
4138 here and we'd loose anyway. And as builtins
4139 probably never throw, this all
4140 is mood anyway. */
4141 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4142 new_stmt))
4143 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4144 break;
4145 }
4146 gsi_next (&i2);
4147 }
4148 }
4149 }
4150 else if (fold_stmt (&gsi))
9477eb38 4151 {
726a989a
RB
4152 /* Re-read the statement from GSI as fold_stmt() may
4153 have changed it. */
4154 gimple new_stmt = gsi_stmt (gsi);
4155 update_stmt (new_stmt);
4156
4b685e14
JH
4157 if (is_gimple_call (old_stmt)
4158 || is_gimple_call (new_stmt))
44e10129
MM
4159 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4160 new_stmt);
726a989a
RB
4161
4162 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4163 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
9477eb38
JH
4164 }
4165 }
b8a00a4d
JH
4166 }
4167}
4168
1084e689
JH
4169/* Return true if BB has at least one abnormal outgoing edge. */
4170
4171static inline bool
4172has_abnormal_outgoing_edge_p (basic_block bb)
4173{
4174 edge e;
4175 edge_iterator ei;
4176
4177 FOR_EACH_EDGE (e, ei, bb->succs)
4178 if (e->flags & EDGE_ABNORMAL)
4179 return true;
4180
4181 return false;
4182}
4183
d4e4baa9
AO
4184/* Expand calls to inline functions in the body of FN. */
4185
873aa8f5 4186unsigned int
46c5ad27 4187optimize_inline_calls (tree fn)
d4e4baa9 4188{
1b369fae 4189 copy_body_data id;
e21aff8a 4190 basic_block bb;
b8a00a4d 4191 int last = n_basic_blocks;
d406b663 4192 struct gimplify_ctx gctx;
5d7b099c 4193 bool inlined_p = false;
d406b663 4194
d4e4baa9
AO
4195 /* Clear out ID. */
4196 memset (&id, 0, sizeof (id));
4197
581985d7 4198 id.src_node = id.dst_node = cgraph_get_node (fn);
322dd859 4199 gcc_assert (id.dst_node->analyzed);
1b369fae 4200 id.dst_fn = fn;
d4e4baa9 4201 /* Or any functions that aren't finished yet. */
d4e4baa9 4202 if (current_function_decl)
0f900dfa 4203 id.dst_fn = current_function_decl;
1b369fae
RH
4204
4205 id.copy_decl = copy_decl_maybe_to_var;
4206 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4207 id.transform_new_cfg = false;
4208 id.transform_return_to_modify = true;
9ff420f1 4209 id.transform_lang_insert_block = NULL;
b8a00a4d 4210 id.statements_to_fold = pointer_set_create ();
1b369fae 4211
d406b663 4212 push_gimplify_context (&gctx);
d4e4baa9 4213
672987e8
ZD
4214 /* We make no attempts to keep dominance info up-to-date. */
4215 free_dominance_info (CDI_DOMINATORS);
4216 free_dominance_info (CDI_POST_DOMINATORS);
4217
726a989a
RB
4218 /* Register specific gimple functions. */
4219 gimple_register_cfg_hooks ();
4220
e21aff8a
SB
4221 /* Reach the trees by walking over the CFG, and note the
4222 enclosing basic-blocks in the call edges. */
4223 /* We walk the blocks going forward, because inlined function bodies
4224 will split id->current_basic_block, and the new blocks will
4225 follow it; we'll trudge through them, processing their CALL_EXPRs
4226 along the way. */
4227 FOR_EACH_BB (bb)
5d7b099c 4228 inlined_p |= gimple_expand_calls_inline (bb, &id);
d4e4baa9 4229
e21aff8a 4230 pop_gimplify_context (NULL);
6de9cd9a 4231
18c6ada9
JH
4232#ifdef ENABLE_CHECKING
4233 {
4234 struct cgraph_edge *e;
4235
1b369fae 4236 verify_cgraph_node (id.dst_node);
18c6ada9
JH
4237
4238 /* Double check that we inlined everything we are supposed to inline. */
1b369fae 4239 for (e = id.dst_node->callees; e; e = e->next_callee)
1e128c5f 4240 gcc_assert (e->inline_failed);
18c6ada9
JH
4241 }
4242#endif
b8698a0f 4243
5d7b099c 4244 /* Fold queued statements. */
a9eafe81
AP
4245 fold_marked_statements (last, id.statements_to_fold);
4246 pointer_set_destroy (id.statements_to_fold);
b8698a0f 4247
b5b8b0ac
AO
4248 gcc_assert (!id.debug_stmts);
4249
5d7b099c
RG
4250 /* If we didn't inline into the function there is nothing to do. */
4251 if (!inlined_p)
4252 return 0;
4253
a9eafe81
AP
4254 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4255 number_blocks (fn);
b8a00a4d 4256
078c3644
JH
4257 delete_unreachable_blocks_update_callgraph (&id);
4258#ifdef ENABLE_CHECKING
4259 verify_cgraph_node (id.dst_node);
4260#endif
726a989a 4261
110cfe1c
JH
4262 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4263 not possible yet - the IPA passes might make various functions to not
4264 throw and they don't care to proactively update local EH info. This is
4265 done later in fixup_cfg pass that also execute the verification. */
726a989a
RB
4266 return (TODO_update_ssa
4267 | TODO_cleanup_cfg
45a80bb9 4268 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5d7b099c 4269 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
45a80bb9 4270 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
d4e4baa9
AO
4271}
4272
d4e4baa9
AO
4273/* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4274
4275tree
46c5ad27 4276copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
d4e4baa9
AO
4277{
4278 enum tree_code code = TREE_CODE (*tp);
07beea0d 4279 enum tree_code_class cl = TREE_CODE_CLASS (code);
d4e4baa9
AO
4280
4281 /* We make copies of most nodes. */
07beea0d 4282 if (IS_EXPR_CODE_CLASS (cl)
d4e4baa9
AO
4283 || code == TREE_LIST
4284 || code == TREE_VEC
8843c120
DN
4285 || code == TYPE_DECL
4286 || code == OMP_CLAUSE)
d4e4baa9
AO
4287 {
4288 /* Because the chain gets clobbered when we make a copy, we save it
4289 here. */
82d6e6fc 4290 tree chain = NULL_TREE, new_tree;
07beea0d 4291
81f653d6
NF
4292 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4293 chain = TREE_CHAIN (*tp);
d4e4baa9
AO
4294
4295 /* Copy the node. */
82d6e6fc 4296 new_tree = copy_node (*tp);
6de9cd9a
DN
4297
4298 /* Propagate mudflap marked-ness. */
4299 if (flag_mudflap && mf_marked_p (*tp))
82d6e6fc 4300 mf_mark (new_tree);
6de9cd9a 4301
82d6e6fc 4302 *tp = new_tree;
d4e4baa9
AO
4303
4304 /* Now, restore the chain, if appropriate. That will cause
4305 walk_tree to walk into the chain as well. */
50674e96
DN
4306 if (code == PARM_DECL
4307 || code == TREE_LIST
aaf46ef9 4308 || code == OMP_CLAUSE)
d4e4baa9
AO
4309 TREE_CHAIN (*tp) = chain;
4310
4311 /* For now, we don't update BLOCKs when we make copies. So, we
6de9cd9a
DN
4312 have to nullify all BIND_EXPRs. */
4313 if (TREE_CODE (*tp) == BIND_EXPR)
4314 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
d4e4baa9 4315 }
4038c495
GB
4316 else if (code == CONSTRUCTOR)
4317 {
4318 /* CONSTRUCTOR nodes need special handling because
4319 we need to duplicate the vector of elements. */
82d6e6fc 4320 tree new_tree;
4038c495 4321
82d6e6fc 4322 new_tree = copy_node (*tp);
4038c495
GB
4323
4324 /* Propagate mudflap marked-ness. */
4325 if (flag_mudflap && mf_marked_p (*tp))
82d6e6fc 4326 mf_mark (new_tree);
9f63daea 4327
82d6e6fc 4328 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4038c495 4329 CONSTRUCTOR_ELTS (*tp));
82d6e6fc 4330 *tp = new_tree;
4038c495 4331 }
3533b943 4332 else if (code == STATEMENT_LIST)
deb5046b
JM
4333 /* We used to just abort on STATEMENT_LIST, but we can run into them
4334 with statement-expressions (c++/40975). */
4335 copy_statement_list (tp);
6615c446 4336 else if (TREE_CODE_CLASS (code) == tcc_type)
d4e4baa9 4337 *walk_subtrees = 0;
6615c446 4338 else if (TREE_CODE_CLASS (code) == tcc_declaration)
6de9cd9a 4339 *walk_subtrees = 0;
a396f8ae
GK
4340 else if (TREE_CODE_CLASS (code) == tcc_constant)
4341 *walk_subtrees = 0;
d4e4baa9
AO
4342 return NULL_TREE;
4343}
4344
4345/* The SAVE_EXPR pointed to by TP is being copied. If ST contains
aa4a53af 4346 information indicating to what new SAVE_EXPR this one should be mapped,
e21aff8a
SB
4347 use that one. Otherwise, create a new node and enter it in ST. FN is
4348 the function into which the copy will be placed. */
d4e4baa9 4349
892c7e1e 4350static void
82c82743 4351remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
d4e4baa9 4352{
6be42dd4
RG
4353 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4354 tree *n;
5e20bdd7 4355 tree t;
d4e4baa9
AO
4356
4357 /* See if we already encountered this SAVE_EXPR. */
6be42dd4 4358 n = (tree *) pointer_map_contains (st, *tp);
d92b4486 4359
d4e4baa9
AO
4360 /* If we didn't already remap this SAVE_EXPR, do so now. */
4361 if (!n)
4362 {
5e20bdd7 4363 t = copy_node (*tp);
d4e4baa9 4364
d4e4baa9 4365 /* Remember this SAVE_EXPR. */
6be42dd4 4366 *pointer_map_insert (st, *tp) = t;
350ebd54 4367 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
6be42dd4 4368 *pointer_map_insert (st, t) = t;
d4e4baa9
AO
4369 }
4370 else
5e20bdd7
JZ
4371 {
4372 /* We've already walked into this SAVE_EXPR; don't do it again. */
4373 *walk_subtrees = 0;
6be42dd4 4374 t = *n;
5e20bdd7 4375 }
d4e4baa9
AO
4376
4377 /* Replace this SAVE_EXPR with the copy. */
5e20bdd7 4378 *tp = t;
d4e4baa9 4379}
d436bff8 4380
aa4a53af
RK
4381/* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4382 copies the declaration and enters it in the splay_tree in DATA (which is
1b369fae 4383 really an `copy_body_data *'). */
6de9cd9a
DN
4384
4385static tree
4386mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4387 void *data)
4388{
1b369fae 4389 copy_body_data *id = (copy_body_data *) data;
6de9cd9a
DN
4390
4391 /* Don't walk into types. */
350fae66
RK
4392 if (TYPE_P (*tp))
4393 *walk_subtrees = 0;
6de9cd9a 4394
350fae66 4395 else if (TREE_CODE (*tp) == LABEL_EXPR)
6de9cd9a 4396 {
350fae66 4397 tree decl = TREE_OPERAND (*tp, 0);
6de9cd9a 4398
350fae66 4399 /* Copy the decl and remember the copy. */
1b369fae 4400 insert_decl_map (id, decl, id->copy_decl (decl, id));
6de9cd9a
DN
4401 }
4402
4403 return NULL_TREE;
4404}
4405
19114537
EC
4406/* Perform any modifications to EXPR required when it is unsaved. Does
4407 not recurse into EXPR's subtrees. */
4408
4409static void
4410unsave_expr_1 (tree expr)
4411{
4412 switch (TREE_CODE (expr))
4413 {
4414 case TARGET_EXPR:
4415 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4416 It's OK for this to happen if it was part of a subtree that
4417 isn't immediately expanded, such as operand 2 of another
4418 TARGET_EXPR. */
4419 if (TREE_OPERAND (expr, 1))
4420 break;
4421
4422 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4423 TREE_OPERAND (expr, 3) = NULL_TREE;
4424 break;
4425
4426 default:
4427 break;
4428 }
4429}
4430
6de9cd9a
DN
4431/* Called via walk_tree when an expression is unsaved. Using the
4432 splay_tree pointed to by ST (which is really a `splay_tree'),
4433 remaps all local declarations to appropriate replacements. */
d436bff8
AH
4434
4435static tree
6de9cd9a 4436unsave_r (tree *tp, int *walk_subtrees, void *data)
d436bff8 4437{
1b369fae 4438 copy_body_data *id = (copy_body_data *) data;
6be42dd4
RG
4439 struct pointer_map_t *st = id->decl_map;
4440 tree *n;
6de9cd9a
DN
4441
4442 /* Only a local declaration (variable or label). */
4443 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4444 || TREE_CODE (*tp) == LABEL_DECL)
4445 {
4446 /* Lookup the declaration. */
6be42dd4 4447 n = (tree *) pointer_map_contains (st, *tp);
9f63daea 4448
6de9cd9a
DN
4449 /* If it's there, remap it. */
4450 if (n)
6be42dd4 4451 *tp = *n;
6de9cd9a 4452 }
aa4a53af 4453
6de9cd9a 4454 else if (TREE_CODE (*tp) == STATEMENT_LIST)
726a989a 4455 gcc_unreachable ();
6de9cd9a
DN
4456 else if (TREE_CODE (*tp) == BIND_EXPR)
4457 copy_bind_expr (tp, walk_subtrees, id);
a406865a
RG
4458 else if (TREE_CODE (*tp) == SAVE_EXPR
4459 || TREE_CODE (*tp) == TARGET_EXPR)
82c82743 4460 remap_save_expr (tp, st, walk_subtrees);
d436bff8 4461 else
6de9cd9a
DN
4462 {
4463 copy_tree_r (tp, walk_subtrees, NULL);
4464
4465 /* Do whatever unsaving is required. */
4466 unsave_expr_1 (*tp);
4467 }
4468
4469 /* Keep iterating. */
4470 return NULL_TREE;
d436bff8
AH
4471}
4472
19114537
EC
4473/* Copies everything in EXPR and replaces variables, labels
4474 and SAVE_EXPRs local to EXPR. */
6de9cd9a
DN
4475
4476tree
19114537 4477unsave_expr_now (tree expr)
6de9cd9a 4478{
1b369fae 4479 copy_body_data id;
6de9cd9a
DN
4480
4481 /* There's nothing to do for NULL_TREE. */
4482 if (expr == 0)
4483 return expr;
4484
4485 /* Set up ID. */
4486 memset (&id, 0, sizeof (id));
1b369fae
RH
4487 id.src_fn = current_function_decl;
4488 id.dst_fn = current_function_decl;
6be42dd4 4489 id.decl_map = pointer_map_create ();
b5b8b0ac 4490 id.debug_map = NULL;
6de9cd9a 4491
1b369fae
RH
4492 id.copy_decl = copy_decl_no_change;
4493 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4494 id.transform_new_cfg = false;
4495 id.transform_return_to_modify = false;
9ff420f1 4496 id.transform_lang_insert_block = NULL;
1b369fae 4497
6de9cd9a
DN
4498 /* Walk the tree once to find local labels. */
4499 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4500
4501 /* Walk the tree again, copying, remapping, and unsaving. */
4502 walk_tree (&expr, unsave_r, &id, NULL);
4503
4504 /* Clean up. */
6be42dd4 4505 pointer_map_destroy (id.decl_map);
b5b8b0ac
AO
4506 if (id.debug_map)
4507 pointer_map_destroy (id.debug_map);
6de9cd9a
DN
4508
4509 return expr;
4510}
4511
726a989a
RB
4512/* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4513 label, copies the declaration and enters it in the splay_tree in DATA (which
4514 is really a 'copy_body_data *'. */
4515
4516static tree
4517mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4518 bool *handled_ops_p ATTRIBUTE_UNUSED,
4519 struct walk_stmt_info *wi)
4520{
4521 copy_body_data *id = (copy_body_data *) wi->info;
4522 gimple stmt = gsi_stmt (*gsip);
4523
4524 if (gimple_code (stmt) == GIMPLE_LABEL)
4525 {
4526 tree decl = gimple_label_label (stmt);
4527
4528 /* Copy the decl and remember the copy. */
4529 insert_decl_map (id, decl, id->copy_decl (decl, id));
4530 }
4531
4532 return NULL_TREE;
4533}
4534
4535
4536/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4537 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4538 remaps all local declarations to appropriate replacements in gimple
4539 operands. */
4540
4541static tree
4542replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4543{
4544 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4545 copy_body_data *id = (copy_body_data *) wi->info;
4546 struct pointer_map_t *st = id->decl_map;
4547 tree *n;
4548 tree expr = *tp;
4549
4550 /* Only a local declaration (variable or label). */
4551 if ((TREE_CODE (expr) == VAR_DECL
4552 && !TREE_STATIC (expr))
4553 || TREE_CODE (expr) == LABEL_DECL)
4554 {
4555 /* Lookup the declaration. */
4556 n = (tree *) pointer_map_contains (st, expr);
4557
4558 /* If it's there, remap it. */
4559 if (n)
4560 *tp = *n;
4561 *walk_subtrees = 0;
4562 }
4563 else if (TREE_CODE (expr) == STATEMENT_LIST
4564 || TREE_CODE (expr) == BIND_EXPR
4565 || TREE_CODE (expr) == SAVE_EXPR)
4566 gcc_unreachable ();
4567 else if (TREE_CODE (expr) == TARGET_EXPR)
4568 {
4569 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4570 It's OK for this to happen if it was part of a subtree that
4571 isn't immediately expanded, such as operand 2 of another
4572 TARGET_EXPR. */
4573 if (!TREE_OPERAND (expr, 1))
4574 {
4575 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4576 TREE_OPERAND (expr, 3) = NULL_TREE;
4577 }
4578 }
4579
4580 /* Keep iterating. */
4581 return NULL_TREE;
4582}
4583
4584
4585/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4586 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4587 remaps all local declarations to appropriate replacements in gimple
4588 statements. */
4589
4590static tree
4591replace_locals_stmt (gimple_stmt_iterator *gsip,
4592 bool *handled_ops_p ATTRIBUTE_UNUSED,
4593 struct walk_stmt_info *wi)
4594{
4595 copy_body_data *id = (copy_body_data *) wi->info;
4596 gimple stmt = gsi_stmt (*gsip);
4597
4598 if (gimple_code (stmt) == GIMPLE_BIND)
4599 {
4600 tree block = gimple_bind_block (stmt);
4601
4602 if (block)
4603 {
4604 remap_block (&block, id);
4605 gimple_bind_set_block (stmt, block);
4606 }
4607
4608 /* This will remap a lot of the same decls again, but this should be
4609 harmless. */
4610 if (gimple_bind_vars (stmt))
526d73ab 4611 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
726a989a
RB
4612 }
4613
4614 /* Keep iterating. */
4615 return NULL_TREE;
4616}
4617
4618
4619/* Copies everything in SEQ and replaces variables and labels local to
4620 current_function_decl. */
4621
4622gimple_seq
4623copy_gimple_seq_and_replace_locals (gimple_seq seq)
4624{
4625 copy_body_data id;
4626 struct walk_stmt_info wi;
4627 struct pointer_set_t *visited;
4628 gimple_seq copy;
4629
4630 /* There's nothing to do for NULL_TREE. */
4631 if (seq == NULL)
4632 return seq;
4633
4634 /* Set up ID. */
4635 memset (&id, 0, sizeof (id));
4636 id.src_fn = current_function_decl;
4637 id.dst_fn = current_function_decl;
4638 id.decl_map = pointer_map_create ();
b5b8b0ac 4639 id.debug_map = NULL;
726a989a
RB
4640
4641 id.copy_decl = copy_decl_no_change;
4642 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4643 id.transform_new_cfg = false;
4644 id.transform_return_to_modify = false;
4645 id.transform_lang_insert_block = NULL;
4646
4647 /* Walk the tree once to find local labels. */
4648 memset (&wi, 0, sizeof (wi));
4649 visited = pointer_set_create ();
4650 wi.info = &id;
4651 wi.pset = visited;
4652 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4653 pointer_set_destroy (visited);
4654
4655 copy = gimple_seq_copy (seq);
4656
4657 /* Walk the copy, remapping decls. */
4658 memset (&wi, 0, sizeof (wi));
4659 wi.info = &id;
4660 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4661
4662 /* Clean up. */
4663 pointer_map_destroy (id.decl_map);
b5b8b0ac
AO
4664 if (id.debug_map)
4665 pointer_map_destroy (id.debug_map);
726a989a
RB
4666
4667 return copy;
4668}
4669
4670
6de9cd9a 4671/* Allow someone to determine if SEARCH is a child of TOP from gdb. */
aa4a53af 4672
6de9cd9a
DN
4673static tree
4674debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4675{
4676 if (*tp == data)
4677 return (tree) data;
4678 else
4679 return NULL;
4680}
4681
24e47c76 4682DEBUG_FUNCTION bool
6de9cd9a
DN
4683debug_find_tree (tree top, tree search)
4684{
4685 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4686}
4687
e21aff8a 4688
6de9cd9a
DN
4689/* Declare the variables created by the inliner. Add all the variables in
4690 VARS to BIND_EXPR. */
4691
4692static void
e21aff8a 4693declare_inline_vars (tree block, tree vars)
6de9cd9a 4694{
84936f6f 4695 tree t;
910ad8de 4696 for (t = vars; t; t = DECL_CHAIN (t))
9659ce8b
JH
4697 {
4698 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4699 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
c021f10b 4700 add_local_decl (cfun, t);
9659ce8b 4701 }
6de9cd9a 4702
e21aff8a
SB
4703 if (block)
4704 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4705}
4706
19734dd8 4707/* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
1b369fae
RH
4708 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4709 VAR_DECL translation. */
19734dd8 4710
1b369fae
RH
4711static tree
4712copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
19734dd8 4713{
19734dd8
RL
4714 /* Don't generate debug information for the copy if we wouldn't have
4715 generated it for the copy either. */
4716 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4717 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4718
4719 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
b8698a0f 4720 declaration inspired this copy. */
19734dd8
RL
4721 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4722
4723 /* The new variable/label has no RTL, yet. */
68a976f2
RL
4724 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4725 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
2eb79bbb 4726 SET_DECL_RTL (copy, 0);
b8698a0f 4727
19734dd8
RL
4728 /* These args would always appear unused, if not for this. */
4729 TREE_USED (copy) = 1;
4730
4731 /* Set the context for the new declaration. */
4732 if (!DECL_CONTEXT (decl))
4733 /* Globals stay global. */
4734 ;
1b369fae 4735 else if (DECL_CONTEXT (decl) != id->src_fn)
19734dd8
RL
4736 /* Things that weren't in the scope of the function we're inlining
4737 from aren't in the scope we're inlining to, either. */
4738 ;
4739 else if (TREE_STATIC (decl))
4740 /* Function-scoped static variables should stay in the original
4741 function. */
4742 ;
4743 else
4744 /* Ordinary automatic local variables are now in the scope of the
4745 new function. */
1b369fae 4746 DECL_CONTEXT (copy) = id->dst_fn;
19734dd8
RL
4747
4748 return copy;
4749}
4750
1b369fae
RH
4751static tree
4752copy_decl_to_var (tree decl, copy_body_data *id)
4753{
4754 tree copy, type;
4755
4756 gcc_assert (TREE_CODE (decl) == PARM_DECL
4757 || TREE_CODE (decl) == RESULT_DECL);
4758
4759 type = TREE_TYPE (decl);
4760
c2255bc4
AH
4761 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4762 VAR_DECL, DECL_NAME (decl), type);
25a6a873
RG
4763 if (DECL_PT_UID_SET_P (decl))
4764 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
1b369fae
RH
4765 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4766 TREE_READONLY (copy) = TREE_READONLY (decl);
4767 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
0890b981 4768 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
1b369fae
RH
4769
4770 return copy_decl_for_dup_finish (id, decl, copy);
4771}
4772
c08cd4c1
JM
4773/* Like copy_decl_to_var, but create a return slot object instead of a
4774 pointer variable for return by invisible reference. */
4775
4776static tree
4777copy_result_decl_to_var (tree decl, copy_body_data *id)
4778{
4779 tree copy, type;
4780
4781 gcc_assert (TREE_CODE (decl) == PARM_DECL
4782 || TREE_CODE (decl) == RESULT_DECL);
4783
4784 type = TREE_TYPE (decl);
4785 if (DECL_BY_REFERENCE (decl))
4786 type = TREE_TYPE (type);
4787
c2255bc4
AH
4788 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4789 VAR_DECL, DECL_NAME (decl), type);
25a6a873
RG
4790 if (DECL_PT_UID_SET_P (decl))
4791 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
c08cd4c1
JM
4792 TREE_READONLY (copy) = TREE_READONLY (decl);
4793 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4794 if (!DECL_BY_REFERENCE (decl))
4795 {
4796 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
0890b981 4797 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
c08cd4c1
JM
4798 }
4799
4800 return copy_decl_for_dup_finish (id, decl, copy);
4801}
4802
9ff420f1 4803tree
1b369fae
RH
4804copy_decl_no_change (tree decl, copy_body_data *id)
4805{
4806 tree copy;
4807
4808 copy = copy_node (decl);
4809
4810 /* The COPY is not abstract; it will be generated in DST_FN. */
4811 DECL_ABSTRACT (copy) = 0;
4812 lang_hooks.dup_lang_specific_decl (copy);
4813
4814 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4815 been taken; it's for internal bookkeeping in expand_goto_internal. */
4816 if (TREE_CODE (copy) == LABEL_DECL)
4817 {
4818 TREE_ADDRESSABLE (copy) = 0;
4819 LABEL_DECL_UID (copy) = -1;
4820 }
4821
4822 return copy_decl_for_dup_finish (id, decl, copy);
4823}
4824
4825static tree
4826copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4827{
4828 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4829 return copy_decl_to_var (decl, id);
4830 else
4831 return copy_decl_no_change (decl, id);
4832}
4833
19734dd8
RL
4834/* Return a copy of the function's argument tree. */
4835static tree
c6f7cfc1
JH
4836copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4837 bitmap args_to_skip, tree *vars)
19734dd8 4838{
c6f7cfc1
JH
4839 tree arg, *parg;
4840 tree new_parm = NULL;
4841 int i = 0;
19734dd8 4842
c6f7cfc1
JH
4843 parg = &new_parm;
4844
910ad8de 4845 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
c6f7cfc1
JH
4846 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4847 {
4848 tree new_tree = remap_decl (arg, id);
d7da5cc8
MJ
4849 if (TREE_CODE (new_tree) != PARM_DECL)
4850 new_tree = id->copy_decl (arg, id);
c6f7cfc1
JH
4851 lang_hooks.dup_lang_specific_decl (new_tree);
4852 *parg = new_tree;
910ad8de 4853 parg = &DECL_CHAIN (new_tree);
c6f7cfc1 4854 }
eb50f5f4 4855 else if (!pointer_map_contains (id->decl_map, arg))
c6f7cfc1
JH
4856 {
4857 /* Make an equivalent VAR_DECL. If the argument was used
4858 as temporary variable later in function, the uses will be
4859 replaced by local variable. */
4860 tree var = copy_decl_to_var (arg, id);
c6f7cfc1
JH
4861 insert_decl_map (id, arg, var);
4862 /* Declare this new variable. */
910ad8de 4863 DECL_CHAIN (var) = *vars;
c6f7cfc1
JH
4864 *vars = var;
4865 }
4866 return new_parm;
19734dd8
RL
4867}
4868
4869/* Return a copy of the function's static chain. */
4870static tree
1b369fae 4871copy_static_chain (tree static_chain, copy_body_data * id)
19734dd8
RL
4872{
4873 tree *chain_copy, *pvar;
4874
4875 chain_copy = &static_chain;
910ad8de 4876 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
19734dd8 4877 {
82d6e6fc
KG
4878 tree new_tree = remap_decl (*pvar, id);
4879 lang_hooks.dup_lang_specific_decl (new_tree);
910ad8de 4880 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
82d6e6fc 4881 *pvar = new_tree;
19734dd8
RL
4882 }
4883 return static_chain;
4884}
4885
4886/* Return true if the function is allowed to be versioned.
4887 This is a guard for the versioning functionality. */
27dbd3ac 4888
19734dd8
RL
4889bool
4890tree_versionable_function_p (tree fndecl)
4891{
86631ea3
MJ
4892 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4893 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
19734dd8
RL
4894}
4895
9187e02d
JH
4896/* Delete all unreachable basic blocks and update callgraph.
4897 Doing so is somewhat nontrivial because we need to update all clones and
4898 remove inline function that become unreachable. */
9f5e9983 4899
9187e02d
JH
4900static bool
4901delete_unreachable_blocks_update_callgraph (copy_body_data *id)
9f5e9983 4902{
9187e02d
JH
4903 bool changed = false;
4904 basic_block b, next_bb;
4905
4906 find_unreachable_blocks ();
4907
4908 /* Delete all unreachable basic blocks. */
4909
4910 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4911 {
4912 next_bb = b->next_bb;
4913
4914 if (!(b->flags & BB_REACHABLE))
4915 {
4916 gimple_stmt_iterator bsi;
4917
4918 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4919 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4920 {
4921 struct cgraph_edge *e;
4922 struct cgraph_node *node;
4923
4924 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4925 {
4926 if (!e->inline_failed)
b4e93f45 4927 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
9187e02d
JH
4928 else
4929 cgraph_remove_edge (e);
4930 }
4931 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4932 && id->dst_node->clones)
4933 for (node = id->dst_node->clones; node != id->dst_node;)
4934 {
4935 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4936 {
4937 if (!e->inline_failed)
b4e93f45 4938 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
9187e02d
JH
4939 else
4940 cgraph_remove_edge (e);
4941 }
b8698a0f 4942
9187e02d
JH
4943 if (node->clones)
4944 node = node->clones;
4945 else if (node->next_sibling_clone)
4946 node = node->next_sibling_clone;
4947 else
4948 {
4949 while (node != id->dst_node && !node->next_sibling_clone)
4950 node = node->clone_of;
4951 if (node != id->dst_node)
4952 node = node->next_sibling_clone;
4953 }
4954 }
4955 }
4956 delete_basic_block (b);
4957 changed = true;
4958 }
4959 }
4960
9187e02d 4961 return changed;
9f5e9983
JJ
4962}
4963
08ad1d6d
JH
4964/* Update clone info after duplication. */
4965
4966static void
4967update_clone_info (copy_body_data * id)
4968{
4969 struct cgraph_node *node;
4970 if (!id->dst_node->clones)
4971 return;
4972 for (node = id->dst_node->clones; node != id->dst_node;)
4973 {
4974 /* First update replace maps to match the new body. */
4975 if (node->clone.tree_map)
4976 {
4977 unsigned int i;
4978 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4979 {
4980 struct ipa_replace_map *replace_info;
4981 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4982 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4983 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4984 }
4985 }
4986 if (node->clones)
4987 node = node->clones;
4988 else if (node->next_sibling_clone)
4989 node = node->next_sibling_clone;
4990 else
4991 {
4992 while (node != id->dst_node && !node->next_sibling_clone)
4993 node = node->clone_of;
4994 if (node != id->dst_node)
4995 node = node->next_sibling_clone;
4996 }
4997 }
4998}
4999
19734dd8
RL
5000/* Create a copy of a function's tree.
5001 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5002 of the original function and the new copied function
b8698a0f
L
5003 respectively. In case we want to replace a DECL
5004 tree with another tree while duplicating the function's
5005 body, TREE_MAP represents the mapping between these
ea99e0be 5006 trees. If UPDATE_CLONES is set, the call_stmt fields
91382288
JH
5007 of edges of clones of the function will be updated.
5008
5009 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5010 from new version.
1a2c27e9 5011 If SKIP_RETURN is true, the new version will return void.
91382288
JH
5012 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5013 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5014*/
19734dd8 5015void
27dbd3ac
RH
5016tree_function_versioning (tree old_decl, tree new_decl,
5017 VEC(ipa_replace_map_p,gc)* tree_map,
91382288 5018 bool update_clones, bitmap args_to_skip,
1a2c27e9
EB
5019 bool skip_return, bitmap blocks_to_copy,
5020 basic_block new_entry)
19734dd8
RL
5021{
5022 struct cgraph_node *old_version_node;
5023 struct cgraph_node *new_version_node;
1b369fae 5024 copy_body_data id;
110cfe1c 5025 tree p;
19734dd8
RL
5026 unsigned i;
5027 struct ipa_replace_map *replace_info;
b5b8b0ac 5028 basic_block old_entry_block, bb;
0f1961a2
JH
5029 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
5030
873aa8f5 5031 tree old_current_function_decl = current_function_decl;
0f1961a2 5032 tree vars = NULL_TREE;
19734dd8
RL
5033
5034 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5035 && TREE_CODE (new_decl) == FUNCTION_DECL);
5036 DECL_POSSIBLY_INLINED (old_decl) = 1;
5037
fe660d7b
MJ
5038 old_version_node = cgraph_get_node (old_decl);
5039 gcc_checking_assert (old_version_node);
5040 new_version_node = cgraph_get_node (new_decl);
5041 gcc_checking_assert (new_version_node);
19734dd8 5042
ddb555ed
JJ
5043 /* Copy over debug args. */
5044 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5045 {
5046 VEC(tree, gc) **new_debug_args, **old_debug_args;
5047 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5048 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5049 old_debug_args = decl_debug_args_lookup (old_decl);
5050 if (old_debug_args)
5051 {
5052 new_debug_args = decl_debug_args_insert (new_decl);
5053 *new_debug_args = VEC_copy (tree, gc, *old_debug_args);
5054 }
5055 }
5056
a3aadcc5
JH
5057 /* Output the inlining info for this abstract function, since it has been
5058 inlined. If we don't do this now, we can lose the information about the
5059 variables in the function when the blocks get blown away as soon as we
5060 remove the cgraph node. */
5061 (*debug_hooks->outlining_inline_function) (old_decl);
5062
19734dd8
RL
5063 DECL_ARTIFICIAL (new_decl) = 1;
5064 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
f9417da1 5065 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
19734dd8 5066
3d283195
JH
5067 /* Prepare the data structures for the tree copy. */
5068 memset (&id, 0, sizeof (id));
5069
19734dd8 5070 /* Generate a new name for the new version. */
9187e02d 5071 id.statements_to_fold = pointer_set_create ();
b5b8b0ac 5072
6be42dd4 5073 id.decl_map = pointer_map_create ();
b5b8b0ac 5074 id.debug_map = NULL;
1b369fae
RH
5075 id.src_fn = old_decl;
5076 id.dst_fn = new_decl;
5077 id.src_node = old_version_node;
5078 id.dst_node = new_version_node;
5079 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
0e3776db
JH
5080 if (id.src_node->ipa_transforms_to_apply)
5081 {
5082 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
5083 unsigned int i;
5084
5085 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
5086 id.src_node->ipa_transforms_to_apply);
5087 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
5088 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
5089 VEC_index (ipa_opt_pass,
5090 old_transforms_to_apply,
5091 i));
5092 }
b8698a0f 5093
1b369fae
RH
5094 id.copy_decl = copy_decl_no_change;
5095 id.transform_call_graph_edges
5096 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5097 id.transform_new_cfg = true;
5098 id.transform_return_to_modify = false;
9ff420f1 5099 id.transform_lang_insert_block = NULL;
1b369fae 5100
19734dd8 5101 current_function_decl = new_decl;
110cfe1c
JH
5102 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5103 (DECL_STRUCT_FUNCTION (old_decl));
5104 initialize_cfun (new_decl, old_decl,
0d63a740 5105 old_entry_block->count);
1755aad0
RG
5106 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5107 = id.src_cfun->gimple_df->ipa_pta;
110cfe1c 5108 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
b8698a0f 5109
19734dd8
RL
5110 /* Copy the function's static chain. */
5111 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5112 if (p)
5113 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5114 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5115 &id);
b8698a0f 5116
19734dd8
RL
5117 /* If there's a tree_map, prepare for substitution. */
5118 if (tree_map)
9187e02d 5119 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
19734dd8 5120 {
0f1961a2 5121 gimple init;
9187e02d 5122 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
1b369fae 5123 if (replace_info->replace_p)
00fc2333 5124 {
657c0925 5125 tree op = replace_info->new_tree;
922f15c2
JH
5126 if (!replace_info->old_tree)
5127 {
5128 int i = replace_info->parm_num;
5129 tree parm;
910ad8de 5130 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
922f15c2
JH
5131 i --;
5132 replace_info->old_tree = parm;
5133 }
5134
657c0925
JH
5135
5136 STRIP_NOPS (op);
5137
5138 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
5139 op = TREE_OPERAND (op, 0);
b8698a0f 5140
0f1961a2
JH
5141 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5142 init = setup_one_parameter (&id, replace_info->old_tree,
5143 replace_info->new_tree, id.src_fn,
5144 NULL,
5145 &vars);
5146 if (init)
5147 VEC_safe_push (gimple, heap, init_stmts, init);
00fc2333 5148 }
19734dd8 5149 }
eb50f5f4
JH
5150 /* Copy the function's arguments. */
5151 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5152 DECL_ARGUMENTS (new_decl) =
5153 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5154 args_to_skip, &vars);
b8698a0f 5155
eb50f5f4 5156 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
474086eb 5157 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
b8698a0f 5158
0f1961a2 5159 declare_inline_vars (DECL_INITIAL (new_decl), vars);
9187e02d 5160
c021f10b 5161 if (!VEC_empty (tree, DECL_STRUCT_FUNCTION (old_decl)->local_decls))
19734dd8 5162 /* Add local vars. */
ae0379fc 5163 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
b8698a0f 5164
90dda0e9 5165 if (DECL_RESULT (old_decl) == NULL_TREE)
1a2c27e9 5166 ;
90dda0e9 5167 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
1a2c27e9
EB
5168 {
5169 DECL_RESULT (new_decl)
5170 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5171 RESULT_DECL, NULL_TREE, void_type_node);
5172 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5173 cfun->returns_struct = 0;
5174 cfun->returns_pcc_struct = 0;
5175 }
5176 else
19734dd8 5177 {
6ff38230
RG
5178 tree old_name;
5179 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
19734dd8 5180 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6ff38230
RG
5181 if (gimple_in_ssa_p (id.src_cfun)
5182 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
32244553 5183 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6ff38230
RG
5184 {
5185 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5186 insert_decl_map (&id, old_name, new_name);
5187 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
32244553 5188 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6ff38230 5189 }
19734dd8 5190 }
b8698a0f 5191
6ff38230
RG
5192 /* Copy the Function's body. */
5193 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5194 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
5195
19734dd8
RL
5196 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5197 number_blocks (new_decl);
5198
b5b8b0ac
AO
5199 /* We want to create the BB unconditionally, so that the addition of
5200 debug stmts doesn't affect BB count, which may in the end cause
5201 codegen differences. */
5202 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5203 while (VEC_length (gimple, init_stmts))
5204 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
08ad1d6d 5205 update_clone_info (&id);
0f1961a2 5206
27dbd3ac
RH
5207 /* Remap the nonlocal_goto_save_area, if any. */
5208 if (cfun->nonlocal_goto_save_area)
5209 {
5210 struct walk_stmt_info wi;
5211
5212 memset (&wi, 0, sizeof (wi));
5213 wi.info = &id;
5214 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5215 }
5216
19734dd8 5217 /* Clean up. */
6be42dd4 5218 pointer_map_destroy (id.decl_map);
b5b8b0ac
AO
5219 if (id.debug_map)
5220 pointer_map_destroy (id.debug_map);
5006671f
RG
5221 free_dominance_info (CDI_DOMINATORS);
5222 free_dominance_info (CDI_POST_DOMINATORS);
9187e02d
JH
5223
5224 fold_marked_statements (0, id.statements_to_fold);
5225 pointer_set_destroy (id.statements_to_fold);
5226 fold_cond_expr_cond ();
5227 delete_unreachable_blocks_update_callgraph (&id);
99b766fc
JH
5228 if (id.dst_node->analyzed)
5229 cgraph_rebuild_references ();
9187e02d 5230 update_ssa (TODO_update_ssa);
b35366ce
JH
5231
5232 /* After partial cloning we need to rescale frequencies, so they are
5233 within proper range in the cloned function. */
5234 if (new_entry)
5235 {
5236 struct cgraph_edge *e;
5237 rebuild_frequencies ();
5238
5239 new_version_node->count = ENTRY_BLOCK_PTR->count;
5240 for (e = new_version_node->callees; e; e = e->next_callee)
5241 {
5242 basic_block bb = gimple_bb (e->call_stmt);
02ec6988
MJ
5243 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5244 bb);
5245 e->count = bb->count;
5246 }
5247 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5248 {
5249 basic_block bb = gimple_bb (e->call_stmt);
5250 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5251 bb);
b35366ce
JH
5252 e->count = bb->count;
5253 }
5254 }
5255
9187e02d
JH
5256 free_dominance_info (CDI_DOMINATORS);
5257 free_dominance_info (CDI_POST_DOMINATORS);
5258
b5b8b0ac 5259 gcc_assert (!id.debug_stmts);
0f1961a2 5260 VEC_free (gimple, heap, init_stmts);
110cfe1c 5261 pop_cfun ();
873aa8f5
JH
5262 current_function_decl = old_current_function_decl;
5263 gcc_assert (!current_function_decl
5264 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
19734dd8
RL
5265 return;
5266}
5267
f82a627c
EB
5268/* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5269 the callee and return the inlined body on success. */
5270
5271tree
5272maybe_inline_call_in_expr (tree exp)
5273{
5274 tree fn = get_callee_fndecl (exp);
5275
5276 /* We can only try to inline "const" functions. */
5277 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5278 {
5279 struct pointer_map_t *decl_map = pointer_map_create ();
5280 call_expr_arg_iterator iter;
5281 copy_body_data id;
5282 tree param, arg, t;
5283
5284 /* Remap the parameters. */
5285 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5286 param;
910ad8de 5287 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
f82a627c
EB
5288 *pointer_map_insert (decl_map, param) = arg;
5289
5290 memset (&id, 0, sizeof (id));
5291 id.src_fn = fn;
5292 id.dst_fn = current_function_decl;
5293 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5294 id.decl_map = decl_map;
5295
5296 id.copy_decl = copy_decl_no_change;
5297 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5298 id.transform_new_cfg = false;
5299 id.transform_return_to_modify = true;
267ffce3 5300 id.transform_lang_insert_block = NULL;
f82a627c
EB
5301
5302 /* Make sure not to unshare trees behind the front-end's back
5303 since front-end specific mechanisms may rely on sharing. */
5304 id.regimplify = false;
5305 id.do_not_unshare = true;
5306
5307 /* We're not inside any EH region. */
1d65f45c 5308 id.eh_lp_nr = 0;
f82a627c
EB
5309
5310 t = copy_tree_body (&id);
5311 pointer_map_destroy (decl_map);
5312
5313 /* We can only return something suitable for use in a GENERIC
5314 expression tree. */
5315 if (TREE_CODE (t) == MODIFY_EXPR)
5316 return TREE_OPERAND (t, 1);
5317 }
5318
5319 return NULL_TREE;
5320}
5321
52dd234b
RH
5322/* Duplicate a type, fields and all. */
5323
5324tree
5325build_duplicate_type (tree type)
5326{
1b369fae 5327 struct copy_body_data id;
52dd234b
RH
5328
5329 memset (&id, 0, sizeof (id));
1b369fae
RH
5330 id.src_fn = current_function_decl;
5331 id.dst_fn = current_function_decl;
5332 id.src_cfun = cfun;
6be42dd4 5333 id.decl_map = pointer_map_create ();
b5b8b0ac 5334 id.debug_map = NULL;
4009f2e7 5335 id.copy_decl = copy_decl_no_change;
52dd234b
RH
5336
5337 type = remap_type_1 (type, &id);
5338
6be42dd4 5339 pointer_map_destroy (id.decl_map);
b5b8b0ac
AO
5340 if (id.debug_map)
5341 pointer_map_destroy (id.debug_map);
52dd234b 5342
f31c9f09
DG
5343 TYPE_CANONICAL (type) = type;
5344
52dd234b
RH
5345 return type;
5346}