]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-inline.c
Merge from transactional-memory branch.
[thirdparty/gcc.git] / gcc / tree-inline.c
CommitLineData
ac534736 1/* Tree inlining.
ddb555ed 2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
ebb07520 3 Free Software Foundation, Inc.
588d3ade
AO
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
5
54a7b573 6This file is part of GCC.
588d3ade 7
54a7b573 8GCC is free software; you can redistribute it and/or modify
588d3ade 9it under the terms of the GNU General Public License as published by
9dcd6f09 10the Free Software Foundation; either version 3, or (at your option)
588d3ade
AO
11any later version.
12
54a7b573 13GCC is distributed in the hope that it will be useful,
588d3ade
AO
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
588d3ade
AO
21
22#include "config.h"
23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
718f9c0f 26#include "diagnostic-core.h"
588d3ade
AO
27#include "tree.h"
28#include "tree-inline.h"
d4e4baa9
AO
29#include "flags.h"
30#include "params.h"
31#include "input.h"
32#include "insn-config.h"
d4e4baa9 33#include "hashtab.h"
d23c55c2 34#include "langhooks.h"
e21aff8a
SB
35#include "basic-block.h"
36#include "tree-iterator.h"
1c4a429a 37#include "cgraph.h"
ddd2d57e 38#include "intl.h"
6de9cd9a 39#include "tree-mudflap.h"
089efaa4 40#include "tree-flow.h"
18c6ada9 41#include "function.h"
e21aff8a 42#include "tree-flow.h"
cf835838 43#include "tree-pretty-print.h"
e21aff8a 44#include "except.h"
1eb3331e 45#include "debug.h"
e21aff8a 46#include "pointer-set.h"
19734dd8 47#include "ipa-prop.h"
6946b3f7 48#include "value-prof.h"
110cfe1c 49#include "tree-pass.h"
18177c7e
RG
50#include "target.h"
51#include "integrate.h"
d4e4baa9 52
2eb79bbb
SB
53#include "rtl.h" /* FIXME: For asm_str_count. */
54
6de9cd9a
DN
55/* I'm not real happy about this, but we need to handle gimple and
56 non-gimple trees. */
726a989a 57#include "gimple.h"
588d3ade 58
1b369fae 59/* Inlining, Cloning, Versioning, Parallelization
e21aff8a
SB
60
61 Inlining: a function body is duplicated, but the PARM_DECLs are
62 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
726a989a 63 MODIFY_EXPRs that store to a dedicated returned-value variable.
e21aff8a
SB
64 The duplicated eh_region info of the copy will later be appended
65 to the info for the caller; the eh_region info in copied throwing
1d65f45c 66 statements and RESX statements are adjusted accordingly.
e21aff8a 67
e21aff8a
SB
68 Cloning: (only in C++) We have one body for a con/de/structor, and
69 multiple function decls, each with a unique parameter list.
70 Duplicate the body, using the given splay tree; some parameters
71 will become constants (like 0 or 1).
72
1b369fae
RH
73 Versioning: a function body is duplicated and the result is a new
74 function rather than into blocks of an existing function as with
75 inlining. Some parameters will become constants.
76
77 Parallelization: a region of a function is duplicated resulting in
78 a new function. Variables may be replaced with complex expressions
79 to enable shared variable semantics.
80
e21aff8a
SB
81 All of these will simultaneously lookup any callgraph edges. If
82 we're going to inline the duplicated function body, and the given
83 function has some cloned callgraph nodes (one for each place this
84 function will be inlined) those callgraph edges will be duplicated.
1b369fae 85 If we're cloning the body, those callgraph edges will be
e21aff8a
SB
86 updated to point into the new body. (Note that the original
87 callgraph node and edge list will not be altered.)
88
726a989a 89 See the CALL_EXPR handling case in copy_tree_body_r (). */
e21aff8a 90
d4e4baa9
AO
91/* To Do:
92
93 o In order to make inlining-on-trees work, we pessimized
94 function-local static constants. In particular, they are now
95 always output, even when not addressed. Fix this by treating
96 function-local static constants just like global static
97 constants; the back-end already knows not to output them if they
98 are not needed.
99
100 o Provide heuristics to clamp inlining of recursive template
101 calls? */
102
7f9bc51b 103
7f9bc51b
ZD
104/* Weights that estimate_num_insns uses to estimate the size of the
105 produced code. */
106
107eni_weights eni_size_weights;
108
109/* Weights that estimate_num_insns uses to estimate the time necessary
110 to execute the produced code. */
111
112eni_weights eni_time_weights;
113
d4e4baa9
AO
114/* Prototypes. */
115
6938f93f 116static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
1b369fae 117static void remap_block (tree *, copy_body_data *);
1b369fae 118static void copy_bind_expr (tree *, int *, copy_body_data *);
6de9cd9a 119static tree mark_local_for_remap_r (tree *, int *, void *);
19114537 120static void unsave_expr_1 (tree);
6de9cd9a 121static tree unsave_r (tree *, int *, void *);
e21aff8a 122static void declare_inline_vars (tree, tree);
892c7e1e 123static void remap_save_expr (tree *, void *, int *);
4a283090 124static void prepend_lexical_block (tree current_block, tree new_block);
1b369fae 125static tree copy_decl_to_var (tree, copy_body_data *);
c08cd4c1 126static tree copy_result_decl_to_var (tree, copy_body_data *);
1b369fae 127static tree copy_decl_maybe_to_var (tree, copy_body_data *);
726a989a 128static gimple remap_gimple_stmt (gimple, copy_body_data *);
078c3644 129static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
e21aff8a 130
5e20bdd7
JZ
131/* Insert a tree->tree mapping for ID. Despite the name suggests
132 that the trees should be variables, it is used for more than that. */
133
1b369fae
RH
134void
135insert_decl_map (copy_body_data *id, tree key, tree value)
5e20bdd7 136{
6be42dd4 137 *pointer_map_insert (id->decl_map, key) = value;
5e20bdd7
JZ
138
139 /* Always insert an identity map as well. If we see this same new
140 node again, we won't want to duplicate it a second time. */
141 if (key != value)
6be42dd4 142 *pointer_map_insert (id->decl_map, value) = value;
5e20bdd7
JZ
143}
144
b5b8b0ac
AO
145/* Insert a tree->tree mapping for ID. This is only used for
146 variables. */
147
148static void
149insert_debug_decl_map (copy_body_data *id, tree key, tree value)
150{
151 if (!gimple_in_ssa_p (id->src_cfun))
152 return;
153
154 if (!MAY_HAVE_DEBUG_STMTS)
155 return;
156
157 if (!target_for_debug_bind (key))
158 return;
159
160 gcc_assert (TREE_CODE (key) == PARM_DECL);
161 gcc_assert (TREE_CODE (value) == VAR_DECL);
162
163 if (!id->debug_map)
164 id->debug_map = pointer_map_create ();
165
166 *pointer_map_insert (id->debug_map, key) = value;
167}
168
082ab5ff
JJ
169/* If nonzero, we're remapping the contents of inlined debug
170 statements. If negative, an error has occurred, such as a
171 reference to a variable that isn't available in the inlined
172 context. */
173static int processing_debug_stmt = 0;
174
110cfe1c
JH
175/* Construct new SSA name for old NAME. ID is the inline context. */
176
177static tree
178remap_ssa_name (tree name, copy_body_data *id)
179{
82d6e6fc 180 tree new_tree;
6be42dd4 181 tree *n;
110cfe1c
JH
182
183 gcc_assert (TREE_CODE (name) == SSA_NAME);
184
6be42dd4 185 n = (tree *) pointer_map_contains (id->decl_map, name);
110cfe1c 186 if (n)
129a37fc 187 return unshare_expr (*n);
110cfe1c 188
082ab5ff
JJ
189 if (processing_debug_stmt)
190 {
ddb555ed
JJ
191 if (TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
192 && SSA_NAME_IS_DEFAULT_DEF (name)
193 && id->entry_bb == NULL
194 && single_succ_p (ENTRY_BLOCK_PTR))
195 {
196 tree vexpr = make_node (DEBUG_EXPR_DECL);
197 gimple def_temp;
198 gimple_stmt_iterator gsi;
199 tree val = SSA_NAME_VAR (name);
200
201 n = (tree *) pointer_map_contains (id->decl_map, val);
202 if (n != NULL)
203 val = *n;
204 if (TREE_CODE (val) != PARM_DECL)
205 {
206 processing_debug_stmt = -1;
207 return name;
208 }
209 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
210 DECL_ARTIFICIAL (vexpr) = 1;
211 TREE_TYPE (vexpr) = TREE_TYPE (name);
212 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
213 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
214 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
215 return vexpr;
216 }
217
082ab5ff
JJ
218 processing_debug_stmt = -1;
219 return name;
220 }
221
110cfe1c
JH
222 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
223 in copy_bb. */
82d6e6fc 224 new_tree = remap_decl (SSA_NAME_VAR (name), id);
726a989a 225
110cfe1c 226 /* We might've substituted constant or another SSA_NAME for
b8698a0f 227 the variable.
110cfe1c
JH
228
229 Replace the SSA name representing RESULT_DECL by variable during
230 inlining: this saves us from need to introduce PHI node in a case
231 return value is just partly initialized. */
82d6e6fc 232 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
110cfe1c
JH
233 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
234 || !id->transform_return_to_modify))
235 {
25a6a873 236 struct ptr_info_def *pi;
82d6e6fc
KG
237 new_tree = make_ssa_name (new_tree, NULL);
238 insert_decl_map (id, name, new_tree);
239 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
110cfe1c 240 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
82d6e6fc 241 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
25a6a873
RG
242 /* At least IPA points-to info can be directly transferred. */
243 if (id->src_cfun->gimple_df
244 && id->src_cfun->gimple_df->ipa_pta
245 && (pi = SSA_NAME_PTR_INFO (name))
246 && !pi->pt.anything)
247 {
248 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
249 new_pi->pt = pi->pt;
250 }
726a989a 251 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
045685a9
JH
252 {
253 /* By inlining function having uninitialized variable, we might
254 extend the lifetime (variable might get reused). This cause
255 ICE in the case we end up extending lifetime of SSA name across
fa10beec 256 abnormal edge, but also increase register pressure.
045685a9 257
726a989a
RB
258 We simply initialize all uninitialized vars by 0 except
259 for case we are inlining to very first BB. We can avoid
260 this for all BBs that are not inside strongly connected
261 regions of the CFG, but this is expensive to test. */
262 if (id->entry_bb
263 && is_gimple_reg (SSA_NAME_VAR (name))
dcad005d 264 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
045685a9 265 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
0723b99a 266 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
045685a9
JH
267 || EDGE_COUNT (id->entry_bb->preds) != 1))
268 {
726a989a
RB
269 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
270 gimple init_stmt;
e8160c9a 271 tree zero = build_zero_cst (TREE_TYPE (new_tree));
b8698a0f 272
e8160c9a 273 init_stmt = gimple_build_assign (new_tree, zero);
726a989a 274 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
82d6e6fc 275 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
045685a9
JH
276 }
277 else
278 {
82d6e6fc 279 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
726a989a
RB
280 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
281 == name)
82d6e6fc 282 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
045685a9
JH
283 }
284 }
110cfe1c
JH
285 }
286 else
82d6e6fc
KG
287 insert_decl_map (id, name, new_tree);
288 return new_tree;
110cfe1c
JH
289}
290
e21aff8a 291/* Remap DECL during the copying of the BLOCK tree for the function. */
d4e4baa9 292
1b369fae
RH
293tree
294remap_decl (tree decl, copy_body_data *id)
d4e4baa9 295{
6be42dd4 296 tree *n;
e21aff8a
SB
297
298 /* We only remap local variables in the current function. */
3c2a7a6a 299
e21aff8a
SB
300 /* See if we have remapped this declaration. */
301
6be42dd4 302 n = (tree *) pointer_map_contains (id->decl_map, decl);
e21aff8a 303
b5b8b0ac
AO
304 if (!n && processing_debug_stmt)
305 {
306 processing_debug_stmt = -1;
307 return decl;
308 }
309
e21aff8a
SB
310 /* If we didn't already have an equivalent for this declaration,
311 create one now. */
d4e4baa9
AO
312 if (!n)
313 {
d4e4baa9 314 /* Make a copy of the variable or label. */
1b369fae 315 tree t = id->copy_decl (decl, id);
b8698a0f 316
596b98ce
AO
317 /* Remember it, so that if we encounter this local entity again
318 we can reuse this copy. Do this early because remap_type may
319 need this decl for TYPE_STUB_DECL. */
320 insert_decl_map (id, decl, t);
321
1b369fae
RH
322 if (!DECL_P (t))
323 return t;
324
3c2a7a6a
RH
325 /* Remap types, if necessary. */
326 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
327 if (TREE_CODE (t) == TYPE_DECL)
328 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
3c2a7a6a
RH
329
330 /* Remap sizes as necessary. */
726a989a
RB
331 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
332 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
d4e4baa9 333
8c27b7d4 334 /* If fields, do likewise for offset and qualifier. */
5377d5ba
RK
335 if (TREE_CODE (t) == FIELD_DECL)
336 {
726a989a 337 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
5377d5ba 338 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
726a989a 339 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
5377d5ba
RK
340 }
341
bb7e6d55
AO
342 if ((TREE_CODE (t) == VAR_DECL
343 || TREE_CODE (t) == RESULT_DECL
344 || TREE_CODE (t) == PARM_DECL)
345 && id->src_fn && DECL_STRUCT_FUNCTION (id->src_fn)
346 && gimple_referenced_vars (DECL_STRUCT_FUNCTION (id->src_fn))
347 /* We don't want to mark as referenced VAR_DECLs that were
348 not marked as such in the src function. */
349 && (TREE_CODE (decl) != VAR_DECL
350 || referenced_var_lookup (DECL_STRUCT_FUNCTION (id->src_fn),
351 DECL_UID (decl))))
352 add_referenced_var (t);
5e20bdd7 353 return t;
d4e4baa9
AO
354 }
355
f82a627c
EB
356 if (id->do_not_unshare)
357 return *n;
358 else
359 return unshare_expr (*n);
d4e4baa9
AO
360}
361
3c2a7a6a 362static tree
1b369fae 363remap_type_1 (tree type, copy_body_data *id)
3c2a7a6a 364{
82d6e6fc 365 tree new_tree, t;
3c2a7a6a 366
ed397c43
RK
367 /* We do need a copy. build and register it now. If this is a pointer or
368 reference type, remap the designated type and make a new pointer or
369 reference type. */
370 if (TREE_CODE (type) == POINTER_TYPE)
371 {
82d6e6fc 372 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
ed397c43
RK
373 TYPE_MODE (type),
374 TYPE_REF_CAN_ALIAS_ALL (type));
040c6d51
MM
375 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
376 new_tree = build_type_attribute_qual_variant (new_tree,
377 TYPE_ATTRIBUTES (type),
378 TYPE_QUALS (type));
82d6e6fc
KG
379 insert_decl_map (id, type, new_tree);
380 return new_tree;
ed397c43
RK
381 }
382 else if (TREE_CODE (type) == REFERENCE_TYPE)
383 {
82d6e6fc 384 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
ed397c43
RK
385 TYPE_MODE (type),
386 TYPE_REF_CAN_ALIAS_ALL (type));
040c6d51
MM
387 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
388 new_tree = build_type_attribute_qual_variant (new_tree,
389 TYPE_ATTRIBUTES (type),
390 TYPE_QUALS (type));
82d6e6fc
KG
391 insert_decl_map (id, type, new_tree);
392 return new_tree;
ed397c43
RK
393 }
394 else
82d6e6fc 395 new_tree = copy_node (type);
ed397c43 396
82d6e6fc 397 insert_decl_map (id, type, new_tree);
3c2a7a6a
RH
398
399 /* This is a new type, not a copy of an old type. Need to reassociate
400 variants. We can handle everything except the main variant lazily. */
401 t = TYPE_MAIN_VARIANT (type);
402 if (type != t)
403 {
404 t = remap_type (t, id);
82d6e6fc
KG
405 TYPE_MAIN_VARIANT (new_tree) = t;
406 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
407 TYPE_NEXT_VARIANT (t) = new_tree;
3c2a7a6a
RH
408 }
409 else
410 {
82d6e6fc
KG
411 TYPE_MAIN_VARIANT (new_tree) = new_tree;
412 TYPE_NEXT_VARIANT (new_tree) = NULL;
3c2a7a6a
RH
413 }
414
596b98ce 415 if (TYPE_STUB_DECL (type))
82d6e6fc 416 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
596b98ce 417
3c2a7a6a 418 /* Lazily create pointer and reference types. */
82d6e6fc
KG
419 TYPE_POINTER_TO (new_tree) = NULL;
420 TYPE_REFERENCE_TO (new_tree) = NULL;
3c2a7a6a 421
82d6e6fc 422 switch (TREE_CODE (new_tree))
3c2a7a6a
RH
423 {
424 case INTEGER_TYPE:
425 case REAL_TYPE:
325217ed 426 case FIXED_POINT_TYPE:
3c2a7a6a
RH
427 case ENUMERAL_TYPE:
428 case BOOLEAN_TYPE:
82d6e6fc 429 t = TYPE_MIN_VALUE (new_tree);
3c2a7a6a 430 if (t && TREE_CODE (t) != INTEGER_CST)
82d6e6fc 431 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
1c9766da 432
82d6e6fc 433 t = TYPE_MAX_VALUE (new_tree);
3c2a7a6a 434 if (t && TREE_CODE (t) != INTEGER_CST)
82d6e6fc
KG
435 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
436 return new_tree;
9f63daea 437
3c2a7a6a 438 case FUNCTION_TYPE:
82d6e6fc
KG
439 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
440 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
441 return new_tree;
3c2a7a6a
RH
442
443 case ARRAY_TYPE:
82d6e6fc
KG
444 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
445 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
3c2a7a6a
RH
446 break;
447
448 case RECORD_TYPE:
449 case UNION_TYPE:
450 case QUAL_UNION_TYPE:
52dd234b
RH
451 {
452 tree f, nf = NULL;
453
910ad8de 454 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
52dd234b
RH
455 {
456 t = remap_decl (f, id);
82d6e6fc 457 DECL_CONTEXT (t) = new_tree;
910ad8de 458 DECL_CHAIN (t) = nf;
52dd234b
RH
459 nf = t;
460 }
82d6e6fc 461 TYPE_FIELDS (new_tree) = nreverse (nf);
52dd234b 462 }
3c2a7a6a
RH
463 break;
464
3c2a7a6a
RH
465 case OFFSET_TYPE:
466 default:
467 /* Shouldn't have been thought variable sized. */
1e128c5f 468 gcc_unreachable ();
3c2a7a6a
RH
469 }
470
82d6e6fc
KG
471 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
472 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
3c2a7a6a 473
82d6e6fc 474 return new_tree;
3c2a7a6a
RH
475}
476
1b369fae
RH
477tree
478remap_type (tree type, copy_body_data *id)
52dd234b 479{
6be42dd4 480 tree *node;
4f5c64b8 481 tree tmp;
52dd234b
RH
482
483 if (type == NULL)
484 return type;
485
486 /* See if we have remapped this type. */
6be42dd4 487 node = (tree *) pointer_map_contains (id->decl_map, type);
52dd234b 488 if (node)
6be42dd4 489 return *node;
52dd234b
RH
490
491 /* The type only needs remapping if it's variably modified. */
1b369fae 492 if (! variably_modified_type_p (type, id->src_fn))
52dd234b
RH
493 {
494 insert_decl_map (id, type, type);
495 return type;
496 }
497
4f5c64b8
RG
498 id->remapping_type_depth++;
499 tmp = remap_type_1 (type, id);
500 id->remapping_type_depth--;
501
502 return tmp;
52dd234b
RH
503}
504
13e4e36e
L
505/* Return previously remapped type of TYPE in ID. Return NULL if TYPE
506 is NULL or TYPE has not been remapped before. */
507
508static tree
509remapped_type (tree type, copy_body_data *id)
510{
511 tree *node;
512
513 if (type == NULL)
514 return type;
515
516 /* See if we have remapped this type. */
517 node = (tree *) pointer_map_contains (id->decl_map, type);
518 if (node)
519 return *node;
520 else
521 return NULL;
522}
523
524 /* The type only needs remapping if it's variably modified. */
526d73ab 525/* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
b8698a0f 526
526d73ab
JH
527static bool
528can_be_nonlocal (tree decl, copy_body_data *id)
529{
530 /* We can not duplicate function decls. */
531 if (TREE_CODE (decl) == FUNCTION_DECL)
532 return true;
533
534 /* Local static vars must be non-local or we get multiple declaration
535 problems. */
536 if (TREE_CODE (decl) == VAR_DECL
537 && !auto_var_in_fn_p (decl, id->src_fn))
538 return true;
539
540 /* At the moment dwarf2out can handle only these types of nodes. We
541 can support more later. */
542 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
543 return false;
544
13e4e36e
L
545 /* We must use global type. We call remapped_type instead of
546 remap_type since we don't want to remap this type here if it
547 hasn't been remapped before. */
548 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
526d73ab
JH
549 return false;
550
551 /* Wihtout SSA we can't tell if variable is used. */
552 if (!gimple_in_ssa_p (cfun))
553 return false;
554
555 /* Live variables must be copied so we can attach DECL_RTL. */
556 if (var_ann (decl))
557 return false;
558
559 return true;
560}
561
6de9cd9a 562static tree
526d73ab 563remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
d4e4baa9 564{
6de9cd9a
DN
565 tree old_var;
566 tree new_decls = NULL_TREE;
d4e4baa9 567
6de9cd9a 568 /* Remap its variables. */
910ad8de 569 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
d4e4baa9 570 {
6de9cd9a
DN
571 tree new_var;
572
526d73ab 573 if (can_be_nonlocal (old_var, id))
30be951a 574 {
526d73ab 575 if (TREE_CODE (old_var) == VAR_DECL
5c3ec539 576 && ! DECL_EXTERNAL (old_var)
526d73ab 577 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
c021f10b 578 add_local_decl (cfun, old_var);
9e6aced0 579 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
526d73ab
JH
580 && !DECL_IGNORED_P (old_var)
581 && nonlocalized_list)
70235ab9 582 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
30be951a
JH
583 continue;
584 }
585
6de9cd9a
DN
586 /* Remap the variable. */
587 new_var = remap_decl (old_var, id);
588
726a989a 589 /* If we didn't remap this variable, we can't mess with its
6de9cd9a
DN
590 TREE_CHAIN. If we remapped this variable to the return slot, it's
591 already declared somewhere else, so don't declare it here. */
b8698a0f 592
526d73ab 593 if (new_var == id->retvar)
6de9cd9a 594 ;
526d73ab
JH
595 else if (!new_var)
596 {
9e6aced0 597 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
526d73ab
JH
598 && !DECL_IGNORED_P (old_var)
599 && nonlocalized_list)
70235ab9 600 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
526d73ab 601 }
d4e4baa9
AO
602 else
603 {
1e128c5f 604 gcc_assert (DECL_P (new_var));
910ad8de 605 DECL_CHAIN (new_var) = new_decls;
6de9cd9a 606 new_decls = new_var;
60a5d78a
JJ
607
608 /* Also copy value-expressions. */
609 if (TREE_CODE (new_var) == VAR_DECL
610 && DECL_HAS_VALUE_EXPR_P (new_var))
611 {
612 tree tem = DECL_VALUE_EXPR (new_var);
613 bool old_regimplify = id->regimplify;
614 id->remapping_type_depth++;
615 walk_tree (&tem, copy_tree_body_r, id, NULL);
616 id->remapping_type_depth--;
617 id->regimplify = old_regimplify;
618 SET_DECL_VALUE_EXPR (new_var, tem);
619 }
d4e4baa9 620 }
d4e4baa9 621 }
d4e4baa9 622
6de9cd9a
DN
623 return nreverse (new_decls);
624}
625
626/* Copy the BLOCK to contain remapped versions of the variables
627 therein. And hook the new block into the block-tree. */
628
629static void
1b369fae 630remap_block (tree *block, copy_body_data *id)
6de9cd9a 631{
d436bff8
AH
632 tree old_block;
633 tree new_block;
d436bff8
AH
634
635 /* Make the new block. */
636 old_block = *block;
637 new_block = make_node (BLOCK);
638 TREE_USED (new_block) = TREE_USED (old_block);
639 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
3e2844cb 640 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
526d73ab
JH
641 BLOCK_NONLOCALIZED_VARS (new_block)
642 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
d436bff8
AH
643 *block = new_block;
644
645 /* Remap its variables. */
526d73ab
JH
646 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
647 &BLOCK_NONLOCALIZED_VARS (new_block),
648 id);
d436bff8 649
1b369fae 650 if (id->transform_lang_insert_block)
9ff420f1 651 id->transform_lang_insert_block (new_block);
1b369fae 652
d436bff8 653 /* Remember the remapped block. */
6de9cd9a 654 insert_decl_map (id, old_block, new_block);
d4e4baa9
AO
655}
656
acb8f212
JH
657/* Copy the whole block tree and root it in id->block. */
658static tree
1b369fae 659remap_blocks (tree block, copy_body_data *id)
acb8f212
JH
660{
661 tree t;
82d6e6fc 662 tree new_tree = block;
acb8f212
JH
663
664 if (!block)
665 return NULL;
666
82d6e6fc
KG
667 remap_block (&new_tree, id);
668 gcc_assert (new_tree != block);
acb8f212 669 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
4a283090
JH
670 prepend_lexical_block (new_tree, remap_blocks (t, id));
671 /* Blocks are in arbitrary order, but make things slightly prettier and do
672 not swap order when producing a copy. */
673 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
82d6e6fc 674 return new_tree;
acb8f212
JH
675}
676
d4e4baa9 677static void
6de9cd9a 678copy_statement_list (tree *tp)
d4e4baa9 679{
6de9cd9a 680 tree_stmt_iterator oi, ni;
82d6e6fc 681 tree new_tree;
6de9cd9a 682
82d6e6fc
KG
683 new_tree = alloc_stmt_list ();
684 ni = tsi_start (new_tree);
6de9cd9a 685 oi = tsi_start (*tp);
b1d82db0 686 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
82d6e6fc 687 *tp = new_tree;
6de9cd9a
DN
688
689 for (; !tsi_end_p (oi); tsi_next (&oi))
a406865a
RG
690 {
691 tree stmt = tsi_stmt (oi);
62e36382
JM
692 if (TREE_CODE (stmt) == STATEMENT_LIST)
693 /* This copy is not redundant; tsi_link_after will smash this
694 STATEMENT_LIST into the end of the one we're building, and we
695 don't want to do that with the original. */
696 copy_statement_list (&stmt);
a406865a
RG
697 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
698 }
6de9cd9a 699}
d4e4baa9 700
6de9cd9a 701static void
1b369fae 702copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
6de9cd9a
DN
703{
704 tree block = BIND_EXPR_BLOCK (*tp);
d4e4baa9
AO
705 /* Copy (and replace) the statement. */
706 copy_tree_r (tp, walk_subtrees, NULL);
6de9cd9a
DN
707 if (block)
708 {
709 remap_block (&block, id);
710 BIND_EXPR_BLOCK (*tp) = block;
711 }
d4e4baa9 712
6de9cd9a 713 if (BIND_EXPR_VARS (*tp))
60a5d78a
JJ
714 /* This will remap a lot of the same decls again, but this should be
715 harmless. */
716 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
d4e4baa9
AO
717}
718
726a989a
RB
719
720/* Create a new gimple_seq by remapping all the statements in BODY
721 using the inlining information in ID. */
722
b34fd25c 723static gimple_seq
726a989a
RB
724remap_gimple_seq (gimple_seq body, copy_body_data *id)
725{
726 gimple_stmt_iterator si;
727 gimple_seq new_body = NULL;
728
729 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
730 {
731 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
732 gimple_seq_add_stmt (&new_body, new_stmt);
733 }
734
735 return new_body;
736}
737
738
739/* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
740 block using the mapping information in ID. */
741
742static gimple
743copy_gimple_bind (gimple stmt, copy_body_data *id)
744{
745 gimple new_bind;
746 tree new_block, new_vars;
747 gimple_seq body, new_body;
748
749 /* Copy the statement. Note that we purposely don't use copy_stmt
750 here because we need to remap statements as we copy. */
751 body = gimple_bind_body (stmt);
752 new_body = remap_gimple_seq (body, id);
753
754 new_block = gimple_bind_block (stmt);
755 if (new_block)
756 remap_block (&new_block, id);
757
758 /* This will remap a lot of the same decls again, but this should be
759 harmless. */
760 new_vars = gimple_bind_vars (stmt);
761 if (new_vars)
526d73ab 762 new_vars = remap_decls (new_vars, NULL, id);
726a989a
RB
763
764 new_bind = gimple_build_bind (new_vars, new_body, new_block);
765
766 return new_bind;
767}
768
769
770/* Remap the GIMPLE operand pointed to by *TP. DATA is really a
771 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
772 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
773 recursing into the children nodes of *TP. */
774
775static tree
776remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
777{
778 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
779 copy_body_data *id = (copy_body_data *) wi_p->info;
780 tree fn = id->src_fn;
781
782 if (TREE_CODE (*tp) == SSA_NAME)
783 {
784 *tp = remap_ssa_name (*tp, id);
785 *walk_subtrees = 0;
786 return NULL;
787 }
788 else if (auto_var_in_fn_p (*tp, fn))
789 {
790 /* Local variables and labels need to be replaced by equivalent
791 variables. We don't want to copy static variables; there's
792 only one of those, no matter how many times we inline the
793 containing function. Similarly for globals from an outer
794 function. */
795 tree new_decl;
796
797 /* Remap the declaration. */
798 new_decl = remap_decl (*tp, id);
799 gcc_assert (new_decl);
800 /* Replace this variable with the copy. */
801 STRIP_TYPE_NOPS (new_decl);
211ca15c
RG
802 /* ??? The C++ frontend uses void * pointer zero to initialize
803 any other type. This confuses the middle-end type verification.
804 As cloned bodies do not go through gimplification again the fixup
805 there doesn't trigger. */
806 if (TREE_CODE (new_decl) == INTEGER_CST
807 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
808 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
726a989a
RB
809 *tp = new_decl;
810 *walk_subtrees = 0;
811 }
812 else if (TREE_CODE (*tp) == STATEMENT_LIST)
813 gcc_unreachable ();
814 else if (TREE_CODE (*tp) == SAVE_EXPR)
815 gcc_unreachable ();
816 else if (TREE_CODE (*tp) == LABEL_DECL
817 && (!DECL_CONTEXT (*tp)
818 || decl_function_context (*tp) == id->src_fn))
819 /* These may need to be remapped for EH handling. */
820 *tp = remap_decl (*tp, id);
821 else if (TYPE_P (*tp))
822 /* Types may need remapping as well. */
823 *tp = remap_type (*tp, id);
824 else if (CONSTANT_CLASS_P (*tp))
825 {
826 /* If this is a constant, we have to copy the node iff the type
827 will be remapped. copy_tree_r will not copy a constant. */
828 tree new_type = remap_type (TREE_TYPE (*tp), id);
829
830 if (new_type == TREE_TYPE (*tp))
831 *walk_subtrees = 0;
832
833 else if (TREE_CODE (*tp) == INTEGER_CST)
834 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
835 TREE_INT_CST_HIGH (*tp));
836 else
837 {
838 *tp = copy_node (*tp);
839 TREE_TYPE (*tp) = new_type;
840 }
841 }
842 else
843 {
844 /* Otherwise, just copy the node. Note that copy_tree_r already
845 knows not to copy VAR_DECLs, etc., so this is safe. */
41a58a92
RG
846
847 /* We should never have TREE_BLOCK set on non-statements. */
848 if (EXPR_P (*tp))
849 gcc_assert (!TREE_BLOCK (*tp));
850
70f34814 851 if (TREE_CODE (*tp) == MEM_REF)
726a989a 852 {
93e452ed 853 tree ptr = TREE_OPERAND (*tp, 0);
41a58a92 854 tree type = remap_type (TREE_TYPE (*tp), id);
93e452ed 855 tree old = *tp;
93e452ed 856
70f34814 857 /* We need to re-canonicalize MEM_REFs from inline substitutions
93e452ed
RG
858 that can happen when a pointer argument is an ADDR_EXPR.
859 Recurse here manually to allow that. */
860 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
315f5f1b
RG
861 *tp = fold_build2 (MEM_REF, type,
862 ptr, TREE_OPERAND (*tp, 1));
863 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
0de204de 864 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
93e452ed
RG
865 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
866 *walk_subtrees = 0;
867 return NULL;
726a989a
RB
868 }
869
870 /* Here is the "usual case". Copy this tree node, and then
871 tweak some special cases. */
872 copy_tree_r (tp, walk_subtrees, NULL);
873
41a58a92
RG
874 if (TREE_CODE (*tp) != OMP_CLAUSE)
875 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
876
726a989a
RB
877 /* Global variables we haven't seen yet need to go into referenced
878 vars. If not referenced from types only. */
879 if (gimple_in_ssa_p (cfun)
880 && TREE_CODE (*tp) == VAR_DECL
b5b8b0ac
AO
881 && id->remapping_type_depth == 0
882 && !processing_debug_stmt)
726a989a
RB
883 add_referenced_var (*tp);
884
726a989a
RB
885 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
886 {
887 /* The copied TARGET_EXPR has never been expanded, even if the
888 original node was expanded already. */
889 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
890 TREE_OPERAND (*tp, 3) = NULL_TREE;
891 }
892 else if (TREE_CODE (*tp) == ADDR_EXPR)
893 {
894 /* Variable substitution need not be simple. In particular,
70f34814 895 the MEM_REF substitution above. Make sure that
726a989a
RB
896 TREE_CONSTANT and friends are up-to-date. But make sure
897 to not improperly set TREE_BLOCK on some sub-expressions. */
898 int invariant = is_gimple_min_invariant (*tp);
899 tree block = id->block;
900 id->block = NULL_TREE;
f1071b12 901 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
726a989a 902 id->block = block;
70f34814 903 recompute_tree_invariant_for_addr_expr (*tp);
726a989a
RB
904
905 /* If this used to be invariant, but is not any longer,
906 then regimplification is probably needed. */
907 if (invariant && !is_gimple_min_invariant (*tp))
908 id->regimplify = true;
909
910 *walk_subtrees = 0;
911 }
912 }
913
914 /* Keep iterating. */
915 return NULL_TREE;
916}
917
918
919/* Called from copy_body_id via walk_tree. DATA is really a
1b369fae 920 `copy_body_data *'. */
aa4a53af 921
1b369fae 922tree
726a989a 923copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
d4e4baa9 924{
1b369fae
RH
925 copy_body_data *id = (copy_body_data *) data;
926 tree fn = id->src_fn;
acb8f212 927 tree new_block;
d4e4baa9 928
e21aff8a
SB
929 /* Begin by recognizing trees that we'll completely rewrite for the
930 inlining context. Our output for these trees is completely
931 different from out input (e.g. RETURN_EXPR is deleted, and morphs
932 into an edge). Further down, we'll handle trees that get
933 duplicated and/or tweaked. */
d4e4baa9 934
1b369fae 935 /* When requested, RETURN_EXPRs should be transformed to just the
726a989a 936 contained MODIFY_EXPR. The branch semantics of the return will
1b369fae
RH
937 be handled elsewhere by manipulating the CFG rather than a statement. */
938 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
d4e4baa9 939 {
e21aff8a 940 tree assignment = TREE_OPERAND (*tp, 0);
d4e4baa9
AO
941
942 /* If we're returning something, just turn that into an
e21aff8a
SB
943 assignment into the equivalent of the original RESULT_DECL.
944 If the "assignment" is just the result decl, the result
945 decl has already been set (e.g. a recent "foo (&result_decl,
946 ...)"); just toss the entire RETURN_EXPR. */
726a989a 947 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
e21aff8a
SB
948 {
949 /* Replace the RETURN_EXPR with (a copy of) the
726a989a 950 MODIFY_EXPR hanging underneath. */
e21aff8a
SB
951 *tp = copy_node (assignment);
952 }
953 else /* Else the RETURN_EXPR returns no value. */
954 {
955 *tp = NULL;
cceb1885 956 return (tree) (void *)1;
e21aff8a 957 }
d4e4baa9 958 }
110cfe1c
JH
959 else if (TREE_CODE (*tp) == SSA_NAME)
960 {
961 *tp = remap_ssa_name (*tp, id);
962 *walk_subtrees = 0;
963 return NULL;
964 }
e21aff8a 965
d4e4baa9
AO
966 /* Local variables and labels need to be replaced by equivalent
967 variables. We don't want to copy static variables; there's only
968 one of those, no matter how many times we inline the containing
5377d5ba 969 function. Similarly for globals from an outer function. */
50886bf1 970 else if (auto_var_in_fn_p (*tp, fn))
d4e4baa9
AO
971 {
972 tree new_decl;
973
974 /* Remap the declaration. */
975 new_decl = remap_decl (*tp, id);
1e128c5f 976 gcc_assert (new_decl);
d4e4baa9
AO
977 /* Replace this variable with the copy. */
978 STRIP_TYPE_NOPS (new_decl);
979 *tp = new_decl;
e4cf29ae 980 *walk_subtrees = 0;
d4e4baa9 981 }
6de9cd9a
DN
982 else if (TREE_CODE (*tp) == STATEMENT_LIST)
983 copy_statement_list (tp);
a406865a
RG
984 else if (TREE_CODE (*tp) == SAVE_EXPR
985 || TREE_CODE (*tp) == TARGET_EXPR)
82c82743 986 remap_save_expr (tp, id->decl_map, walk_subtrees);
17acc01a
JH
987 else if (TREE_CODE (*tp) == LABEL_DECL
988 && (! DECL_CONTEXT (*tp)
1b369fae 989 || decl_function_context (*tp) == id->src_fn))
e21aff8a 990 /* These may need to be remapped for EH handling. */
17acc01a 991 *tp = remap_decl (*tp, id);
6de9cd9a
DN
992 else if (TREE_CODE (*tp) == BIND_EXPR)
993 copy_bind_expr (tp, walk_subtrees, id);
3c2a7a6a
RH
994 /* Types may need remapping as well. */
995 else if (TYPE_P (*tp))
996 *tp = remap_type (*tp, id);
997
bb04998a
RK
998 /* If this is a constant, we have to copy the node iff the type will be
999 remapped. copy_tree_r will not copy a constant. */
3cf11075 1000 else if (CONSTANT_CLASS_P (*tp))
bb04998a
RK
1001 {
1002 tree new_type = remap_type (TREE_TYPE (*tp), id);
1003
1004 if (new_type == TREE_TYPE (*tp))
1005 *walk_subtrees = 0;
1006
1007 else if (TREE_CODE (*tp) == INTEGER_CST)
1008 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1009 TREE_INT_CST_HIGH (*tp));
1010 else
1011 {
1012 *tp = copy_node (*tp);
1013 TREE_TYPE (*tp) = new_type;
1014 }
1015 }
1016
d4e4baa9
AO
1017 /* Otherwise, just copy the node. Note that copy_tree_r already
1018 knows not to copy VAR_DECLs, etc., so this is safe. */
1019 else
1020 {
e21aff8a
SB
1021 /* Here we handle trees that are not completely rewritten.
1022 First we detect some inlining-induced bogosities for
1023 discarding. */
726a989a
RB
1024 if (TREE_CODE (*tp) == MODIFY_EXPR
1025 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1026 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
d4e4baa9
AO
1027 {
1028 /* Some assignments VAR = VAR; don't generate any rtl code
1029 and thus don't count as variable modification. Avoid
1030 keeping bogosities like 0 = 0. */
726a989a 1031 tree decl = TREE_OPERAND (*tp, 0), value;
6be42dd4 1032 tree *n;
d4e4baa9 1033
6be42dd4 1034 n = (tree *) pointer_map_contains (id->decl_map, decl);
d4e4baa9
AO
1035 if (n)
1036 {
6be42dd4 1037 value = *n;
d4e4baa9 1038 STRIP_TYPE_NOPS (value);
becfd6e5 1039 if (TREE_CONSTANT (value) || TREE_READONLY (value))
68594ce7 1040 {
c2255bc4 1041 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
726a989a 1042 return copy_tree_body_r (tp, walk_subtrees, data);
68594ce7 1043 }
d4e4baa9
AO
1044 }
1045 }
1b369fae 1046 else if (TREE_CODE (*tp) == INDIRECT_REF)
6de9cd9a
DN
1047 {
1048 /* Get rid of *& from inline substitutions that can happen when a
1049 pointer argument is an ADDR_EXPR. */
81cfbbc2 1050 tree decl = TREE_OPERAND (*tp, 0);
6be42dd4 1051 tree *n;
6de9cd9a 1052
6be42dd4 1053 n = (tree *) pointer_map_contains (id->decl_map, decl);
6de9cd9a
DN
1054 if (n)
1055 {
82d6e6fc 1056 tree new_tree;
d84b37b0 1057 tree old;
30d2e943
RG
1058 /* If we happen to get an ADDR_EXPR in n->value, strip
1059 it manually here as we'll eventually get ADDR_EXPRs
1060 which lie about their types pointed to. In this case
1061 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
095ecc24
RG
1062 but we absolutely rely on that. As fold_indirect_ref
1063 does other useful transformations, try that first, though. */
6be42dd4 1064 tree type = TREE_TYPE (TREE_TYPE (*n));
f82a627c
EB
1065 if (id->do_not_unshare)
1066 new_tree = *n;
1067 else
1068 new_tree = unshare_expr (*n);
d84b37b0 1069 old = *tp;
82d6e6fc 1070 *tp = gimple_fold_indirect_ref (new_tree);
095ecc24
RG
1071 if (! *tp)
1072 {
82d6e6fc 1073 if (TREE_CODE (new_tree) == ADDR_EXPR)
de4af523 1074 {
db3927fb
AH
1075 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1076 type, new_tree);
de4af523
JJ
1077 /* ??? We should either assert here or build
1078 a VIEW_CONVERT_EXPR instead of blindly leaking
1079 incompatible types to our IL. */
1080 if (! *tp)
82d6e6fc 1081 *tp = TREE_OPERAND (new_tree, 0);
de4af523 1082 }
095ecc24 1083 else
d84b37b0 1084 {
82d6e6fc 1085 *tp = build1 (INDIRECT_REF, type, new_tree);
d84b37b0 1086 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
955f6531 1087 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
a61c3633
EB
1088 TREE_READONLY (*tp) = TREE_READONLY (old);
1089 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
d84b37b0 1090 }
095ecc24 1091 }
81cfbbc2
JH
1092 *walk_subtrees = 0;
1093 return NULL;
68594ce7
JM
1094 }
1095 }
70f34814
RG
1096 else if (TREE_CODE (*tp) == MEM_REF)
1097 {
1098 /* We need to re-canonicalize MEM_REFs from inline substitutions
1099 that can happen when a pointer argument is an ADDR_EXPR. */
1100 tree decl = TREE_OPERAND (*tp, 0);
1101 tree *n;
1102
1103 n = (tree *) pointer_map_contains (id->decl_map, decl);
1104 if (n)
1105 {
1106 tree old = *tp;
1107 *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp),
1108 unshare_expr (*n), TREE_OPERAND (*tp, 1));
1109 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1110 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1111 *walk_subtrees = 0;
1112 return NULL;
1113 }
1114 }
68594ce7 1115
e21aff8a
SB
1116 /* Here is the "usual case". Copy this tree node, and then
1117 tweak some special cases. */
1b369fae 1118 copy_tree_r (tp, walk_subtrees, NULL);
110cfe1c 1119
4f5c64b8 1120 /* Global variables we haven't seen yet needs to go into referenced
b5b8b0ac 1121 vars. If not referenced from types or debug stmts only. */
726a989a
RB
1122 if (gimple_in_ssa_p (cfun)
1123 && TREE_CODE (*tp) == VAR_DECL
b5b8b0ac
AO
1124 && id->remapping_type_depth == 0
1125 && !processing_debug_stmt)
110cfe1c 1126 add_referenced_var (*tp);
b8698a0f 1127
acb8f212
JH
1128 /* If EXPR has block defined, map it to newly constructed block.
1129 When inlining we want EXPRs without block appear in the block
ee0192a2 1130 of function call if we are not remapping a type. */
726a989a 1131 if (EXPR_P (*tp))
acb8f212 1132 {
ee0192a2 1133 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
acb8f212
JH
1134 if (TREE_BLOCK (*tp))
1135 {
6be42dd4
RG
1136 tree *n;
1137 n = (tree *) pointer_map_contains (id->decl_map,
1138 TREE_BLOCK (*tp));
60a5d78a
JJ
1139 gcc_assert (n || id->remapping_type_depth != 0);
1140 if (n)
1141 new_block = *n;
acb8f212
JH
1142 }
1143 TREE_BLOCK (*tp) = new_block;
1144 }
68594ce7 1145
726a989a 1146 if (TREE_CODE (*tp) != OMP_CLAUSE)
07beea0d 1147 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
3c2a7a6a 1148
68594ce7
JM
1149 /* The copied TARGET_EXPR has never been expanded, even if the
1150 original node was expanded already. */
1151 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1152 {
1153 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1154 TREE_OPERAND (*tp, 3) = NULL_TREE;
1155 }
84cce55d
RH
1156
1157 /* Variable substitution need not be simple. In particular, the
1158 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1159 and friends are up-to-date. */
1160 else if (TREE_CODE (*tp) == ADDR_EXPR)
1161 {
ad6003f2 1162 int invariant = is_gimple_min_invariant (*tp);
726a989a
RB
1163 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1164
8e85fd14
RG
1165 /* Handle the case where we substituted an INDIRECT_REF
1166 into the operand of the ADDR_EXPR. */
1167 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1168 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1169 else
1170 recompute_tree_invariant_for_addr_expr (*tp);
726a989a 1171
416c991f
JJ
1172 /* If this used to be invariant, but is not any longer,
1173 then regimplification is probably needed. */
ad6003f2 1174 if (invariant && !is_gimple_min_invariant (*tp))
416c991f 1175 id->regimplify = true;
726a989a 1176
84cce55d
RH
1177 *walk_subtrees = 0;
1178 }
d4e4baa9
AO
1179 }
1180
1181 /* Keep iterating. */
1182 return NULL_TREE;
1183}
1184
1d65f45c
RH
1185/* Helper for remap_gimple_stmt. Given an EH region number for the
1186 source function, map that to the duplicate EH region number in
1187 the destination function. */
1188
1189static int
1190remap_eh_region_nr (int old_nr, copy_body_data *id)
1191{
1192 eh_region old_r, new_r;
1193 void **slot;
1194
1195 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1196 slot = pointer_map_contains (id->eh_map, old_r);
1197 new_r = (eh_region) *slot;
1198
1199 return new_r->index;
1200}
1201
1202/* Similar, but operate on INTEGER_CSTs. */
1203
1204static tree
1205remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1206{
1207 int old_nr, new_nr;
1208
1209 old_nr = tree_low_cst (old_t_nr, 0);
1210 new_nr = remap_eh_region_nr (old_nr, id);
1211
9f616812 1212 return build_int_cst (integer_type_node, new_nr);
1d65f45c 1213}
726a989a
RB
1214
1215/* Helper for copy_bb. Remap statement STMT using the inlining
1216 information in ID. Return the new statement copy. */
1217
1218static gimple
1219remap_gimple_stmt (gimple stmt, copy_body_data *id)
1220{
1221 gimple copy = NULL;
1222 struct walk_stmt_info wi;
1223 tree new_block;
5a6e26b7 1224 bool skip_first = false;
726a989a
RB
1225
1226 /* Begin by recognizing trees that we'll completely rewrite for the
1227 inlining context. Our output for these trees is completely
1228 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1229 into an edge). Further down, we'll handle trees that get
1230 duplicated and/or tweaked. */
1231
1232 /* When requested, GIMPLE_RETURNs should be transformed to just the
1233 contained GIMPLE_ASSIGN. The branch semantics of the return will
1234 be handled elsewhere by manipulating the CFG rather than the
1235 statement. */
1236 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1237 {
1238 tree retval = gimple_return_retval (stmt);
1239
1240 /* If we're returning something, just turn that into an
1241 assignment into the equivalent of the original RESULT_DECL.
1242 If RETVAL is just the result decl, the result decl has
1243 already been set (e.g. a recent "foo (&result_decl, ...)");
1244 just toss the entire GIMPLE_RETURN. */
6938f93f
JH
1245 if (retval
1246 && (TREE_CODE (retval) != RESULT_DECL
1247 && (TREE_CODE (retval) != SSA_NAME
1248 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
5a6e26b7
JH
1249 {
1250 copy = gimple_build_assign (id->retvar, retval);
1251 /* id->retvar is already substituted. Skip it on later remapping. */
1252 skip_first = true;
1253 }
726a989a
RB
1254 else
1255 return gimple_build_nop ();
1256 }
1257 else if (gimple_has_substatements (stmt))
1258 {
1259 gimple_seq s1, s2;
1260
1261 /* When cloning bodies from the C++ front end, we will be handed bodies
1262 in High GIMPLE form. Handle here all the High GIMPLE statements that
1263 have embedded statements. */
1264 switch (gimple_code (stmt))
1265 {
1266 case GIMPLE_BIND:
1267 copy = copy_gimple_bind (stmt, id);
1268 break;
1269
1270 case GIMPLE_CATCH:
1271 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1272 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1273 break;
1274
1275 case GIMPLE_EH_FILTER:
1276 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1277 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1278 break;
1279
1280 case GIMPLE_TRY:
1281 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1282 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
b8698a0f 1283 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
726a989a
RB
1284 break;
1285
1286 case GIMPLE_WITH_CLEANUP_EXPR:
1287 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1288 copy = gimple_build_wce (s1);
1289 break;
1290
1291 case GIMPLE_OMP_PARALLEL:
1292 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1293 copy = gimple_build_omp_parallel
1294 (s1,
1295 gimple_omp_parallel_clauses (stmt),
1296 gimple_omp_parallel_child_fn (stmt),
1297 gimple_omp_parallel_data_arg (stmt));
1298 break;
1299
1300 case GIMPLE_OMP_TASK:
1301 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1302 copy = gimple_build_omp_task
1303 (s1,
1304 gimple_omp_task_clauses (stmt),
1305 gimple_omp_task_child_fn (stmt),
1306 gimple_omp_task_data_arg (stmt),
1307 gimple_omp_task_copy_fn (stmt),
1308 gimple_omp_task_arg_size (stmt),
1309 gimple_omp_task_arg_align (stmt));
1310 break;
1311
1312 case GIMPLE_OMP_FOR:
1313 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1314 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1315 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1316 gimple_omp_for_collapse (stmt), s2);
1317 {
1318 size_t i;
1319 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1320 {
1321 gimple_omp_for_set_index (copy, i,
1322 gimple_omp_for_index (stmt, i));
1323 gimple_omp_for_set_initial (copy, i,
1324 gimple_omp_for_initial (stmt, i));
1325 gimple_omp_for_set_final (copy, i,
1326 gimple_omp_for_final (stmt, i));
1327 gimple_omp_for_set_incr (copy, i,
1328 gimple_omp_for_incr (stmt, i));
1329 gimple_omp_for_set_cond (copy, i,
1330 gimple_omp_for_cond (stmt, i));
1331 }
1332 }
1333 break;
1334
1335 case GIMPLE_OMP_MASTER:
1336 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1337 copy = gimple_build_omp_master (s1);
1338 break;
1339
1340 case GIMPLE_OMP_ORDERED:
1341 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1342 copy = gimple_build_omp_ordered (s1);
1343 break;
1344
1345 case GIMPLE_OMP_SECTION:
1346 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1347 copy = gimple_build_omp_section (s1);
1348 break;
1349
1350 case GIMPLE_OMP_SECTIONS:
1351 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1352 copy = gimple_build_omp_sections
1353 (s1, gimple_omp_sections_clauses (stmt));
1354 break;
1355
1356 case GIMPLE_OMP_SINGLE:
1357 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1358 copy = gimple_build_omp_single
1359 (s1, gimple_omp_single_clauses (stmt));
1360 break;
1361
05a26161
JJ
1362 case GIMPLE_OMP_CRITICAL:
1363 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1364 copy
1365 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1366 break;
1367
0a35513e
AH
1368 case GIMPLE_TRANSACTION:
1369 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1370 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1371 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1372 break;
1373
726a989a
RB
1374 default:
1375 gcc_unreachable ();
1376 }
1377 }
1378 else
1379 {
1380 if (gimple_assign_copy_p (stmt)
1381 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1382 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1383 {
1384 /* Here we handle statements that are not completely rewritten.
1385 First we detect some inlining-induced bogosities for
1386 discarding. */
1387
1388 /* Some assignments VAR = VAR; don't generate any rtl code
1389 and thus don't count as variable modification. Avoid
1390 keeping bogosities like 0 = 0. */
1391 tree decl = gimple_assign_lhs (stmt), value;
1392 tree *n;
1393
1394 n = (tree *) pointer_map_contains (id->decl_map, decl);
1395 if (n)
1396 {
1397 value = *n;
1398 STRIP_TYPE_NOPS (value);
1399 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1400 return gimple_build_nop ();
1401 }
1402 }
1403
b5b8b0ac
AO
1404 if (gimple_debug_bind_p (stmt))
1405 {
1406 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1407 gimple_debug_bind_get_value (stmt),
1408 stmt);
1409 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1410 return copy;
1411 }
ddb555ed
JJ
1412 if (gimple_debug_source_bind_p (stmt))
1413 {
1414 copy = gimple_build_debug_source_bind
1415 (gimple_debug_source_bind_get_var (stmt),
1416 gimple_debug_source_bind_get_value (stmt), stmt);
1417 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1418 return copy;
1419 }
1d65f45c
RH
1420
1421 /* Create a new deep copy of the statement. */
1422 copy = gimple_copy (stmt);
1423
1424 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1425 RESX and EH_DISPATCH. */
1426 if (id->eh_map)
1427 switch (gimple_code (copy))
1428 {
1429 case GIMPLE_CALL:
1430 {
1431 tree r, fndecl = gimple_call_fndecl (copy);
1432 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1433 switch (DECL_FUNCTION_CODE (fndecl))
1434 {
1435 case BUILT_IN_EH_COPY_VALUES:
1436 r = gimple_call_arg (copy, 1);
1437 r = remap_eh_region_tree_nr (r, id);
1438 gimple_call_set_arg (copy, 1, r);
1439 /* FALLTHRU */
1440
1441 case BUILT_IN_EH_POINTER:
1442 case BUILT_IN_EH_FILTER:
1443 r = gimple_call_arg (copy, 0);
1444 r = remap_eh_region_tree_nr (r, id);
1445 gimple_call_set_arg (copy, 0, r);
1446 break;
1447
1448 default:
1449 break;
1450 }
d086d311 1451
25a6a873
RG
1452 /* Reset alias info if we didn't apply measures to
1453 keep it valid over inlining by setting DECL_PT_UID. */
1454 if (!id->src_cfun->gimple_df
1455 || !id->src_cfun->gimple_df->ipa_pta)
1456 gimple_call_reset_alias_info (copy);
1d65f45c
RH
1457 }
1458 break;
1459
1460 case GIMPLE_RESX:
1461 {
1462 int r = gimple_resx_region (copy);
1463 r = remap_eh_region_nr (r, id);
1464 gimple_resx_set_region (copy, r);
1465 }
1466 break;
1467
1468 case GIMPLE_EH_DISPATCH:
1469 {
1470 int r = gimple_eh_dispatch_region (copy);
1471 r = remap_eh_region_nr (r, id);
1472 gimple_eh_dispatch_set_region (copy, r);
1473 }
1474 break;
1475
1476 default:
1477 break;
1478 }
726a989a
RB
1479 }
1480
1481 /* If STMT has a block defined, map it to the newly constructed
1482 block. When inlining we want statements without a block to
1483 appear in the block of the function call. */
1484 new_block = id->block;
1485 if (gimple_block (copy))
1486 {
1487 tree *n;
1488 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1489 gcc_assert (n);
1490 new_block = *n;
1491 }
1492
1493 gimple_set_block (copy, new_block);
1494
ddb555ed 1495 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
b5b8b0ac
AO
1496 return copy;
1497
726a989a
RB
1498 /* Remap all the operands in COPY. */
1499 memset (&wi, 0, sizeof (wi));
1500 wi.info = id;
5a6e26b7
JH
1501 if (skip_first)
1502 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1503 else
b8698a0f 1504 walk_gimple_op (copy, remap_gimple_op_r, &wi);
726a989a 1505
5006671f
RG
1506 /* Clear the copied virtual operands. We are not remapping them here
1507 but are going to recreate them from scratch. */
1508 if (gimple_has_mem_ops (copy))
1509 {
1510 gimple_set_vdef (copy, NULL_TREE);
1511 gimple_set_vuse (copy, NULL_TREE);
1512 }
1513
726a989a
RB
1514 return copy;
1515}
1516
1517
e21aff8a
SB
1518/* Copy basic block, scale profile accordingly. Edges will be taken care of
1519 later */
1520
1521static basic_block
0178d644
VR
1522copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1523 gcov_type count_scale)
e21aff8a 1524{
c2a4718a 1525 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
e21aff8a 1526 basic_block copy_basic_block;
726a989a 1527 tree decl;
0d63a740 1528 gcov_type freq;
91382288
JH
1529 basic_block prev;
1530
1531 /* Search for previous copied basic block. */
1532 prev = bb->prev_bb;
1533 while (!prev->aux)
1534 prev = prev->prev_bb;
e21aff8a
SB
1535
1536 /* create_basic_block() will append every new block to
1537 basic_block_info automatically. */
cceb1885 1538 copy_basic_block = create_basic_block (NULL, (void *) 0,
91382288 1539 (basic_block) prev->aux);
e21aff8a 1540 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
45a80bb9 1541
726a989a
RB
1542 /* We are going to rebuild frequencies from scratch. These values
1543 have just small importance to drive canonicalize_loop_headers. */
0d63a740 1544 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
726a989a 1545
0d63a740
JH
1546 /* We recompute frequencies after inlining, so this is quite safe. */
1547 if (freq > BB_FREQ_MAX)
1548 freq = BB_FREQ_MAX;
1549 copy_basic_block->frequency = freq;
e21aff8a 1550
726a989a
RB
1551 copy_gsi = gsi_start_bb (copy_basic_block);
1552
1553 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
e21aff8a 1554 {
726a989a
RB
1555 gimple stmt = gsi_stmt (gsi);
1556 gimple orig_stmt = stmt;
e21aff8a 1557
416c991f 1558 id->regimplify = false;
726a989a
RB
1559 stmt = remap_gimple_stmt (stmt, id);
1560 if (gimple_nop_p (stmt))
1561 continue;
1562
1563 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
c2a4718a 1564 seq_gsi = copy_gsi;
726a989a
RB
1565
1566 /* With return slot optimization we can end up with
1567 non-gimple (foo *)&this->m, fix that here. */
4c29307d
JJ
1568 if (is_gimple_assign (stmt)
1569 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1570 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
e21aff8a 1571 {
726a989a 1572 tree new_rhs;
c2a4718a 1573 new_rhs = force_gimple_operand_gsi (&seq_gsi,
4a2b7f24 1574 gimple_assign_rhs1 (stmt),
cf1bcf06
EB
1575 true, NULL, false,
1576 GSI_CONTINUE_LINKING);
726a989a 1577 gimple_assign_set_rhs1 (stmt, new_rhs);
c2a4718a 1578 id->regimplify = false;
726a989a 1579 }
2b65dae5 1580
c2a4718a
JJ
1581 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1582
1583 if (id->regimplify)
1584 gimple_regimplify_operands (stmt, &seq_gsi);
1585
1586 /* If copy_basic_block has been empty at the start of this iteration,
1587 call gsi_start_bb again to get at the newly added statements. */
1588 if (gsi_end_p (copy_gsi))
1589 copy_gsi = gsi_start_bb (copy_basic_block);
1590 else
1591 gsi_next (&copy_gsi);
110cfe1c 1592
726a989a
RB
1593 /* Process the new statement. The call to gimple_regimplify_operands
1594 possibly turned the statement into multiple statements, we
1595 need to process all of them. */
c2a4718a 1596 do
726a989a 1597 {
9187e02d
JH
1598 tree fn;
1599
c2a4718a 1600 stmt = gsi_stmt (copy_gsi);
726a989a
RB
1601 if (is_gimple_call (stmt)
1602 && gimple_call_va_arg_pack_p (stmt)
1603 && id->gimple_call)
1604 {
1605 /* __builtin_va_arg_pack () should be replaced by
1606 all arguments corresponding to ... in the caller. */
1607 tree p;
1608 gimple new_call;
1609 VEC(tree, heap) *argarray;
1610 size_t nargs = gimple_call_num_args (id->gimple_call);
1611 size_t n;
1612
910ad8de 1613 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
726a989a
RB
1614 nargs--;
1615
1616 /* Create the new array of arguments. */
1617 n = nargs + gimple_call_num_args (stmt);
1618 argarray = VEC_alloc (tree, heap, n);
1619 VEC_safe_grow (tree, heap, argarray, n);
1620
1621 /* Copy all the arguments before '...' */
1622 memcpy (VEC_address (tree, argarray),
1623 gimple_call_arg_ptr (stmt, 0),
1624 gimple_call_num_args (stmt) * sizeof (tree));
1625
1626 /* Append the arguments passed in '...' */
1627 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1628 gimple_call_arg_ptr (id->gimple_call, 0)
1629 + (gimple_call_num_args (id->gimple_call) - nargs),
1630 nargs * sizeof (tree));
1631
1632 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1633 argarray);
1634
1635 VEC_free (tree, heap, argarray);
1636
1637 /* Copy all GIMPLE_CALL flags, location and block, except
1638 GF_CALL_VA_ARG_PACK. */
1639 gimple_call_copy_flags (new_call, stmt);
1640 gimple_call_set_va_arg_pack (new_call, false);
1641 gimple_set_location (new_call, gimple_location (stmt));
1642 gimple_set_block (new_call, gimple_block (stmt));
1643 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1644
1645 gsi_replace (&copy_gsi, new_call, false);
1646 stmt = new_call;
1647 }
1648 else if (is_gimple_call (stmt)
1649 && id->gimple_call
1650 && (decl = gimple_call_fndecl (stmt))
1651 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1652 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
e0704a46 1653 {
726a989a
RB
1654 /* __builtin_va_arg_pack_len () should be replaced by
1655 the number of anonymous arguments. */
1656 size_t nargs = gimple_call_num_args (id->gimple_call);
1657 tree count, p;
1658 gimple new_stmt;
1659
910ad8de 1660 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
726a989a
RB
1661 nargs--;
1662
1663 count = build_int_cst (integer_type_node, nargs);
1664 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1665 gsi_replace (&copy_gsi, new_stmt, false);
1666 stmt = new_stmt;
1667 }
b8a00a4d 1668
726a989a
RB
1669 /* Statements produced by inlining can be unfolded, especially
1670 when we constant propagated some operands. We can't fold
1671 them right now for two reasons:
1672 1) folding require SSA_NAME_DEF_STMTs to be correct
1673 2) we can't change function calls to builtins.
1674 So we just mark statement for later folding. We mark
1675 all new statements, instead just statements that has changed
1676 by some nontrivial substitution so even statements made
1677 foldable indirectly are updated. If this turns out to be
1678 expensive, copy_body can be told to watch for nontrivial
1679 changes. */
1680 if (id->statements_to_fold)
1681 pointer_set_insert (id->statements_to_fold, stmt);
1682
1683 /* We're duplicating a CALL_EXPR. Find any corresponding
1684 callgraph edges and update or duplicate them. */
1685 if (is_gimple_call (stmt))
1686 {
9b2a5ef7 1687 struct cgraph_edge *edge;
f618d33e 1688 int flags;
6ef5231b 1689
726a989a 1690 switch (id->transform_call_graph_edges)
e0704a46 1691 {
9b2a5ef7
RH
1692 case CB_CGE_DUPLICATE:
1693 edge = cgraph_edge (id->src_node, orig_stmt);
1694 if (edge)
0d63a740
JH
1695 {
1696 int edge_freq = edge->frequency;
1697 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1698 gimple_uid (stmt),
1699 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
898b8927 1700 true);
0d63a740
JH
1701 /* We could also just rescale the frequency, but
1702 doing so would introduce roundoff errors and make
1703 verifier unhappy. */
b8698a0f 1704 edge->frequency
0d63a740
JH
1705 = compute_call_stmt_bb_frequency (id->dst_node->decl,
1706 copy_basic_block);
1707 if (dump_file
1708 && profile_status_for_function (cfun) != PROFILE_ABSENT
1709 && (edge_freq > edge->frequency + 10
1710 || edge_freq < edge->frequency - 10))
1711 {
1712 fprintf (dump_file, "Edge frequency estimated by "
1713 "cgraph %i diverge from inliner's estimate %i\n",
1714 edge_freq,
1715 edge->frequency);
1716 fprintf (dump_file,
1717 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1718 bb->index,
1719 bb->frequency,
1720 copy_basic_block->frequency);
1721 }
8132a837 1722 stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
0d63a740 1723 }
9b2a5ef7
RH
1724 break;
1725
1726 case CB_CGE_MOVE_CLONES:
1727 cgraph_set_call_stmt_including_clones (id->dst_node,
1728 orig_stmt, stmt);
1729 edge = cgraph_edge (id->dst_node, stmt);
1730 break;
1731
1732 case CB_CGE_MOVE:
1733 edge = cgraph_edge (id->dst_node, orig_stmt);
1734 if (edge)
1735 cgraph_set_call_stmt (edge, stmt);
1736 break;
1737
1738 default:
1739 gcc_unreachable ();
110cfe1c 1740 }
f618d33e 1741
9b2a5ef7
RH
1742 /* Constant propagation on argument done during inlining
1743 may create new direct call. Produce an edge for it. */
b8698a0f 1744 if ((!edge
e33c6cd6 1745 || (edge->indirect_inlining_edge
9b2a5ef7 1746 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
f9fd305b 1747 && id->dst_node->analyzed
9b2a5ef7
RH
1748 && (fn = gimple_call_fndecl (stmt)) != NULL)
1749 {
581985d7 1750 struct cgraph_node *dest = cgraph_get_node (fn);
9b2a5ef7
RH
1751
1752 /* We have missing edge in the callgraph. This can happen
1753 when previous inlining turned an indirect call into a
0e3776db 1754 direct call by constant propagating arguments or we are
20a6bb58 1755 producing dead clone (for further cloning). In all
9b2a5ef7
RH
1756 other cases we hit a bug (incorrect node sharing is the
1757 most common reason for missing edges). */
0e3776db 1758 gcc_assert (dest->needed || !dest->analyzed
bd3cdcc0 1759 || dest->address_taken
0cac82a0
JH
1760 || !id->src_node->analyzed
1761 || !id->dst_node->analyzed);
9b2a5ef7
RH
1762 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1763 cgraph_create_edge_including_clones
47cb0d7d 1764 (id->dst_node, dest, orig_stmt, stmt, bb->count,
b8698a0f 1765 compute_call_stmt_bb_frequency (id->dst_node->decl,
0d63a740 1766 copy_basic_block),
898b8927 1767 CIF_ORIGINALLY_INDIRECT_CALL);
9b2a5ef7
RH
1768 else
1769 cgraph_create_edge (id->dst_node, dest, stmt,
47cb0d7d
JH
1770 bb->count,
1771 compute_call_stmt_bb_frequency
898b8927 1772 (id->dst_node->decl, copy_basic_block))->inline_failed
9b2a5ef7
RH
1773 = CIF_ORIGINALLY_INDIRECT_CALL;
1774 if (dump_file)
1775 {
91382288 1776 fprintf (dump_file, "Created new direct edge to %s\n",
9b2a5ef7
RH
1777 cgraph_node_name (dest));
1778 }
1779 }
9187e02d 1780
f618d33e 1781 flags = gimple_call_flags (stmt);
f618d33e
MJ
1782 if (flags & ECF_MAY_BE_ALLOCA)
1783 cfun->calls_alloca = true;
1784 if (flags & ECF_RETURNS_TWICE)
1785 cfun->calls_setjmp = true;
726a989a 1786 }
e21aff8a 1787
1d65f45c
RH
1788 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1789 id->eh_map, id->eh_lp_nr);
726a989a 1790
b5b8b0ac 1791 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
726a989a
RB
1792 {
1793 ssa_op_iter i;
1794 tree def;
1795
1796 find_new_referenced_vars (gsi_stmt (copy_gsi));
1797 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1798 if (TREE_CODE (def) == SSA_NAME)
1799 SSA_NAME_DEF_STMT (def) = stmt;
1800 }
1801
1802 gsi_next (&copy_gsi);
e21aff8a 1803 }
c2a4718a 1804 while (!gsi_end_p (copy_gsi));
726a989a
RB
1805
1806 copy_gsi = gsi_last_bb (copy_basic_block);
e21aff8a 1807 }
726a989a 1808
e21aff8a
SB
1809 return copy_basic_block;
1810}
1811
110cfe1c
JH
1812/* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1813 form is quite easy, since dominator relationship for old basic blocks does
1814 not change.
1815
1816 There is however exception where inlining might change dominator relation
1817 across EH edges from basic block within inlined functions destinating
5305a4cb 1818 to landing pads in function we inline into.
110cfe1c 1819
e9705dc5
AO
1820 The function fills in PHI_RESULTs of such PHI nodes if they refer
1821 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1822 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1823 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1824 set, and this means that there will be no overlapping live ranges
110cfe1c
JH
1825 for the underlying symbol.
1826
1827 This might change in future if we allow redirecting of EH edges and
1828 we might want to change way build CFG pre-inlining to include
1829 all the possible edges then. */
1830static void
e9705dc5
AO
1831update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1832 bool can_throw, bool nonlocal_goto)
110cfe1c
JH
1833{
1834 edge e;
1835 edge_iterator ei;
1836
1837 FOR_EACH_EDGE (e, ei, bb->succs)
1838 if (!e->dest->aux
1839 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1840 {
726a989a
RB
1841 gimple phi;
1842 gimple_stmt_iterator si;
110cfe1c 1843
e9705dc5
AO
1844 if (!nonlocal_goto)
1845 gcc_assert (e->flags & EDGE_EH);
726a989a 1846
e9705dc5
AO
1847 if (!can_throw)
1848 gcc_assert (!(e->flags & EDGE_EH));
726a989a
RB
1849
1850 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
110cfe1c 1851 {
e9705dc5
AO
1852 edge re;
1853
726a989a
RB
1854 phi = gsi_stmt (si);
1855
e9705dc5
AO
1856 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1857 gcc_assert (!e->dest->aux);
1858
496a4ef5
JH
1859 gcc_assert ((e->flags & EDGE_EH)
1860 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
e9705dc5
AO
1861
1862 if (!is_gimple_reg (PHI_RESULT (phi)))
1863 {
726a989a 1864 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
e9705dc5
AO
1865 continue;
1866 }
1867
1868 re = find_edge (ret_bb, e->dest);
1432b19f 1869 gcc_assert (re);
e9705dc5
AO
1870 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1871 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1872
1873 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1874 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
110cfe1c
JH
1875 }
1876 }
1877}
1878
726a989a 1879
128a79fb
KH
1880/* Copy edges from BB into its copy constructed earlier, scale profile
1881 accordingly. Edges will be taken care of later. Assume aux
90a7788b
JJ
1882 pointers to point to the copies of each BB. Return true if any
1883 debug stmts are left after a statement that must end the basic block. */
726a989a 1884
90a7788b 1885static bool
0178d644 1886copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
e21aff8a 1887{
cceb1885 1888 basic_block new_bb = (basic_block) bb->aux;
e21aff8a
SB
1889 edge_iterator ei;
1890 edge old_edge;
726a989a 1891 gimple_stmt_iterator si;
e21aff8a 1892 int flags;
90a7788b 1893 bool need_debug_cleanup = false;
e21aff8a
SB
1894
1895 /* Use the indices from the original blocks to create edges for the
1896 new ones. */
1897 FOR_EACH_EDGE (old_edge, ei, bb->succs)
e0704a46
JH
1898 if (!(old_edge->flags & EDGE_EH))
1899 {
82d6e6fc 1900 edge new_edge;
e21aff8a 1901
e0704a46 1902 flags = old_edge->flags;
e21aff8a 1903
e0704a46
JH
1904 /* Return edges do get a FALLTHRU flag when the get inlined. */
1905 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1906 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1907 flags |= EDGE_FALLTHRU;
82d6e6fc
KG
1908 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1909 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1910 new_edge->probability = old_edge->probability;
e0704a46 1911 }
e21aff8a
SB
1912
1913 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
90a7788b 1914 return false;
e21aff8a 1915
726a989a 1916 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
e21aff8a 1917 {
726a989a 1918 gimple copy_stmt;
e9705dc5 1919 bool can_throw, nonlocal_goto;
e21aff8a 1920
726a989a 1921 copy_stmt = gsi_stmt (si);
b5b8b0ac
AO
1922 if (!is_gimple_debug (copy_stmt))
1923 {
1924 update_stmt (copy_stmt);
1925 if (gimple_in_ssa_p (cfun))
1926 mark_symbols_for_renaming (copy_stmt);
1927 }
726a989a 1928
e21aff8a 1929 /* Do this before the possible split_block. */
726a989a 1930 gsi_next (&si);
e21aff8a
SB
1931
1932 /* If this tree could throw an exception, there are two
1933 cases where we need to add abnormal edge(s): the
1934 tree wasn't in a region and there is a "current
1935 region" in the caller; or the original tree had
1936 EH edges. In both cases split the block after the tree,
1937 and add abnormal edge(s) as needed; we need both
1938 those from the callee and the caller.
1939 We check whether the copy can throw, because the const
1940 propagation can change an INDIRECT_REF which throws
1941 into a COMPONENT_REF which doesn't. If the copy
1942 can throw, the original could also throw. */
726a989a
RB
1943 can_throw = stmt_can_throw_internal (copy_stmt);
1944 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
e9705dc5
AO
1945
1946 if (can_throw || nonlocal_goto)
e21aff8a 1947 {
90a7788b
JJ
1948 if (!gsi_end_p (si))
1949 {
1950 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1951 gsi_next (&si);
1952 if (gsi_end_p (si))
1953 need_debug_cleanup = true;
1954 }
726a989a 1955 if (!gsi_end_p (si))
e21aff8a
SB
1956 /* Note that bb's predecessor edges aren't necessarily
1957 right at this point; split_block doesn't care. */
1958 {
1959 edge e = split_block (new_bb, copy_stmt);
110cfe1c 1960
e21aff8a 1961 new_bb = e->dest;
110cfe1c 1962 new_bb->aux = e->src->aux;
726a989a 1963 si = gsi_start_bb (new_bb);
e21aff8a 1964 }
e9705dc5 1965 }
e21aff8a 1966
1d65f45c
RH
1967 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1968 make_eh_dispatch_edges (copy_stmt);
1969 else if (can_throw)
e9705dc5 1970 make_eh_edges (copy_stmt);
110cfe1c 1971
e9705dc5 1972 if (nonlocal_goto)
726a989a 1973 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
e9705dc5
AO
1974
1975 if ((can_throw || nonlocal_goto)
1976 && gimple_in_ssa_p (cfun))
726a989a 1977 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
e9705dc5 1978 can_throw, nonlocal_goto);
110cfe1c 1979 }
90a7788b 1980 return need_debug_cleanup;
110cfe1c
JH
1981}
1982
1983/* Copy the PHIs. All blocks and edges are copied, some blocks
1984 was possibly split and new outgoing EH edges inserted.
1985 BB points to the block of original function and AUX pointers links
1986 the original and newly copied blocks. */
1987
1988static void
1989copy_phis_for_bb (basic_block bb, copy_body_data *id)
1990{
3d9a9f94 1991 basic_block const new_bb = (basic_block) bb->aux;
110cfe1c 1992 edge_iterator ei;
726a989a
RB
1993 gimple phi;
1994 gimple_stmt_iterator si;
6a78fd06
RG
1995 edge new_edge;
1996 bool inserted = false;
110cfe1c 1997
726a989a 1998 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
110cfe1c 1999 {
726a989a
RB
2000 tree res, new_res;
2001 gimple new_phi;
110cfe1c 2002
726a989a
RB
2003 phi = gsi_stmt (si);
2004 res = PHI_RESULT (phi);
2005 new_res = res;
110cfe1c
JH
2006 if (is_gimple_reg (res))
2007 {
726a989a 2008 walk_tree (&new_res, copy_tree_body_r, id, NULL);
110cfe1c
JH
2009 SSA_NAME_DEF_STMT (new_res)
2010 = new_phi = create_phi_node (new_res, new_bb);
2011 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2012 {
8b3057b3
JH
2013 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2014 tree arg;
2015 tree new_arg;
726a989a 2016 tree block = id->block;
8b3057b3
JH
2017 edge_iterator ei2;
2018
20a6bb58 2019 /* When doing partial cloning, we allow PHIs on the entry block
8b3057b3
JH
2020 as long as all the arguments are the same. Find any input
2021 edge to see argument to copy. */
2022 if (!old_edge)
2023 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2024 if (!old_edge->src->aux)
2025 break;
2026
2027 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2028 new_arg = arg;
726a989a
RB
2029 id->block = NULL_TREE;
2030 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2031 id->block = block;
110cfe1c 2032 gcc_assert (new_arg);
36b6e793
JJ
2033 /* With return slot optimization we can end up with
2034 non-gimple (foo *)&this->m, fix that here. */
2035 if (TREE_CODE (new_arg) != SSA_NAME
2036 && TREE_CODE (new_arg) != FUNCTION_DECL
2037 && !is_gimple_val (new_arg))
2038 {
726a989a
RB
2039 gimple_seq stmts = NULL;
2040 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
6a78fd06
RG
2041 gsi_insert_seq_on_edge (new_edge, stmts);
2042 inserted = true;
36b6e793 2043 }
b8698a0f 2044 add_phi_arg (new_phi, new_arg, new_edge,
f5045c96 2045 gimple_phi_arg_location_from_edge (phi, old_edge));
110cfe1c 2046 }
e21aff8a
SB
2047 }
2048 }
6a78fd06
RG
2049
2050 /* Commit the delayed edge insertions. */
2051 if (inserted)
2052 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2053 gsi_commit_one_edge_insert (new_edge, NULL);
e21aff8a
SB
2054}
2055
726a989a 2056
e21aff8a 2057/* Wrapper for remap_decl so it can be used as a callback. */
726a989a 2058
e21aff8a
SB
2059static tree
2060remap_decl_1 (tree decl, void *data)
2061{
1b369fae 2062 return remap_decl (decl, (copy_body_data *) data);
e21aff8a
SB
2063}
2064
110cfe1c
JH
2065/* Build struct function and associated datastructures for the new clone
2066 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
2067
2068static void
0d63a740 2069initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
110cfe1c 2070{
110cfe1c 2071 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
0d63a740 2072 gcov_type count_scale;
110cfe1c
JH
2073
2074 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2075 count_scale = (REG_BR_PROB_BASE * count
2076 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2077 else
0d63a740 2078 count_scale = REG_BR_PROB_BASE;
110cfe1c
JH
2079
2080 /* Register specific tree functions. */
726a989a 2081 gimple_register_cfg_hooks ();
39ecc018
JH
2082
2083 /* Get clean struct function. */
2084 push_struct_function (new_fndecl);
2085
2086 /* We will rebuild these, so just sanity check that they are empty. */
2087 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2088 gcc_assert (cfun->local_decls == NULL);
2089 gcc_assert (cfun->cfg == NULL);
2090 gcc_assert (cfun->decl == new_fndecl);
2091
20a6bb58 2092 /* Copy items we preserve during cloning. */
39ecc018
JH
2093 cfun->static_chain_decl = src_cfun->static_chain_decl;
2094 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2095 cfun->function_end_locus = src_cfun->function_end_locus;
2096 cfun->curr_properties = src_cfun->curr_properties;
2097 cfun->last_verified = src_cfun->last_verified;
39ecc018
JH
2098 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2099 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
39ecc018
JH
2100 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2101 cfun->stdarg = src_cfun->stdarg;
39ecc018 2102 cfun->after_inlining = src_cfun->after_inlining;
8f4f502f
EB
2103 cfun->can_throw_non_call_exceptions
2104 = src_cfun->can_throw_non_call_exceptions;
39ecc018
JH
2105 cfun->returns_struct = src_cfun->returns_struct;
2106 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2107 cfun->after_tree_profile = src_cfun->after_tree_profile;
2108
110cfe1c
JH
2109 init_empty_tree_cfg ();
2110
0d63a740 2111 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
110cfe1c
JH
2112 ENTRY_BLOCK_PTR->count =
2113 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2114 REG_BR_PROB_BASE);
0d63a740
JH
2115 ENTRY_BLOCK_PTR->frequency
2116 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
110cfe1c
JH
2117 EXIT_BLOCK_PTR->count =
2118 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2119 REG_BR_PROB_BASE);
2120 EXIT_BLOCK_PTR->frequency =
0d63a740 2121 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
110cfe1c
JH
2122 if (src_cfun->eh)
2123 init_eh_for_function ();
2124
2125 if (src_cfun->gimple_df)
2126 {
5db9ba0c 2127 init_tree_ssa (cfun);
110cfe1c
JH
2128 cfun->gimple_df->in_ssa_p = true;
2129 init_ssa_operands ();
2130 }
2131 pop_cfun ();
2132}
2133
90a7788b
JJ
2134/* Helper function for copy_cfg_body. Move debug stmts from the end
2135 of NEW_BB to the beginning of successor basic blocks when needed. If the
2136 successor has multiple predecessors, reset them, otherwise keep
2137 their value. */
2138
2139static void
2140maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2141{
2142 edge e;
2143 edge_iterator ei;
2144 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2145
2146 if (gsi_end_p (si)
2147 || gsi_one_before_end_p (si)
2148 || !(stmt_can_throw_internal (gsi_stmt (si))
2149 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2150 return;
2151
2152 FOR_EACH_EDGE (e, ei, new_bb->succs)
2153 {
2154 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2155 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2156 while (is_gimple_debug (gsi_stmt (ssi)))
2157 {
2158 gimple stmt = gsi_stmt (ssi), new_stmt;
2159 tree var;
2160 tree value;
2161
2162 /* For the last edge move the debug stmts instead of copying
2163 them. */
2164 if (ei_one_before_end_p (ei))
2165 {
2166 si = ssi;
2167 gsi_prev (&ssi);
ddb555ed 2168 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
90a7788b
JJ
2169 gimple_debug_bind_reset_value (stmt);
2170 gsi_remove (&si, false);
2171 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2172 continue;
2173 }
2174
ddb555ed 2175 if (gimple_debug_bind_p (stmt))
90a7788b 2176 {
ddb555ed
JJ
2177 var = gimple_debug_bind_get_var (stmt);
2178 if (single_pred_p (e->dest))
2179 {
2180 value = gimple_debug_bind_get_value (stmt);
2181 value = unshare_expr (value);
2182 }
2183 else
2184 value = NULL_TREE;
2185 new_stmt = gimple_build_debug_bind (var, value, stmt);
2186 }
2187 else if (gimple_debug_source_bind_p (stmt))
2188 {
2189 var = gimple_debug_source_bind_get_var (stmt);
2190 value = gimple_debug_source_bind_get_value (stmt);
2191 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
90a7788b
JJ
2192 }
2193 else
ddb555ed 2194 gcc_unreachable ();
90a7788b
JJ
2195 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2196 VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt);
2197 gsi_prev (&ssi);
2198 }
2199 }
2200}
2201
e21aff8a
SB
2202/* Make a copy of the body of FN so that it can be inserted inline in
2203 another function. Walks FN via CFG, returns new fndecl. */
2204
2205static tree
0d63a740 2206copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
91382288
JH
2207 basic_block entry_block_map, basic_block exit_block_map,
2208 bitmap blocks_to_copy, basic_block new_entry)
e21aff8a 2209{
1b369fae 2210 tree callee_fndecl = id->src_fn;
e21aff8a 2211 /* Original cfun for the callee, doesn't change. */
1b369fae 2212 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
110cfe1c 2213 struct function *cfun_to_copy;
e21aff8a
SB
2214 basic_block bb;
2215 tree new_fndecl = NULL;
90a7788b 2216 bool need_debug_cleanup = false;
0d63a740 2217 gcov_type count_scale;
110cfe1c 2218 int last;
20a6bb58
JH
2219 int incoming_frequency = 0;
2220 gcov_type incoming_count = 0;
e21aff8a 2221
1b369fae 2222 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
e21aff8a 2223 count_scale = (REG_BR_PROB_BASE * count
1b369fae 2224 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
e21aff8a 2225 else
0d63a740 2226 count_scale = REG_BR_PROB_BASE;
e21aff8a
SB
2227
2228 /* Register specific tree functions. */
726a989a 2229 gimple_register_cfg_hooks ();
e21aff8a 2230
b35366ce
JH
2231 /* If we are inlining just region of the function, make sure to connect new entry
2232 to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
2233 frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
20a6bb58 2234 probabilities of edges incoming from nonduplicated region. */
b35366ce
JH
2235 if (new_entry)
2236 {
2237 edge e;
2238 edge_iterator ei;
2239
2240 FOR_EACH_EDGE (e, ei, new_entry->preds)
2241 if (!e->src->aux)
2242 {
20a6bb58
JH
2243 incoming_frequency += EDGE_FREQUENCY (e);
2244 incoming_count += e->count;
b35366ce 2245 }
20a6bb58
JH
2246 incoming_count = incoming_count * count_scale / REG_BR_PROB_BASE;
2247 incoming_frequency
2248 = incoming_frequency * frequency_scale / REG_BR_PROB_BASE;
2249 ENTRY_BLOCK_PTR->count = incoming_count;
2250 ENTRY_BLOCK_PTR->frequency = incoming_frequency;
b35366ce
JH
2251 }
2252
e21aff8a
SB
2253 /* Must have a CFG here at this point. */
2254 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2255 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2256
110cfe1c
JH
2257 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2258
e21aff8a
SB
2259 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2260 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
110cfe1c
JH
2261 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2262 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
e21aff8a 2263
e21aff8a
SB
2264 /* Duplicate any exception-handling regions. */
2265 if (cfun->eh)
1d65f45c
RH
2266 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2267 remap_decl_1, id);
726a989a 2268
e21aff8a
SB
2269 /* Use aux pointers to map the original blocks to copy. */
2270 FOR_EACH_BB_FN (bb, cfun_to_copy)
91382288
JH
2271 if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index))
2272 {
2273 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2274 bb->aux = new_bb;
2275 new_bb->aux = bb;
2276 }
110cfe1c 2277
7c57be85 2278 last = last_basic_block;
726a989a 2279
e21aff8a
SB
2280 /* Now that we've duplicated the blocks, duplicate their edges. */
2281 FOR_ALL_BB_FN (bb, cfun_to_copy)
91382288
JH
2282 if (!blocks_to_copy
2283 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2284 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
726a989a 2285
91382288 2286 if (new_entry)
110cfe1c 2287 {
b35366ce 2288 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
91382288 2289 e->probability = REG_BR_PROB_BASE;
20a6bb58 2290 e->count = incoming_count;
110cfe1c 2291 }
726a989a 2292
8b3057b3
JH
2293 if (gimple_in_ssa_p (cfun))
2294 FOR_ALL_BB_FN (bb, cfun_to_copy)
2295 if (!blocks_to_copy
2296 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2297 copy_phis_for_bb (bb, id);
2298
91382288
JH
2299 FOR_ALL_BB_FN (bb, cfun_to_copy)
2300 if (bb->aux)
2301 {
2302 if (need_debug_cleanup
2303 && bb->index != ENTRY_BLOCK
2304 && bb->index != EXIT_BLOCK)
2305 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2306 ((basic_block)bb->aux)->aux = NULL;
2307 bb->aux = NULL;
2308 }
2309
110cfe1c
JH
2310 /* Zero out AUX fields of newly created block during EH edge
2311 insertion. */
7c57be85 2312 for (; last < last_basic_block; last++)
90a7788b
JJ
2313 {
2314 if (need_debug_cleanup)
2315 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2316 BASIC_BLOCK (last)->aux = NULL;
2317 }
110cfe1c
JH
2318 entry_block_map->aux = NULL;
2319 exit_block_map->aux = NULL;
e21aff8a 2320
1d65f45c
RH
2321 if (id->eh_map)
2322 {
2323 pointer_map_destroy (id->eh_map);
2324 id->eh_map = NULL;
2325 }
2326
e21aff8a
SB
2327 return new_fndecl;
2328}
2329
b5b8b0ac
AO
2330/* Copy the debug STMT using ID. We deal with these statements in a
2331 special way: if any variable in their VALUE expression wasn't
2332 remapped yet, we won't remap it, because that would get decl uids
2333 out of sync, causing codegen differences between -g and -g0. If
2334 this arises, we drop the VALUE expression altogether. */
2335
2336static void
2337copy_debug_stmt (gimple stmt, copy_body_data *id)
2338{
2339 tree t, *n;
2340 struct walk_stmt_info wi;
2341
2342 t = id->block;
2343 if (gimple_block (stmt))
2344 {
b5b8b0ac
AO
2345 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2346 if (n)
2347 t = *n;
2348 }
2349 gimple_set_block (stmt, t);
2350
2351 /* Remap all the operands in COPY. */
2352 memset (&wi, 0, sizeof (wi));
2353 wi.info = id;
2354
2355 processing_debug_stmt = 1;
2356
ddb555ed
JJ
2357 if (gimple_debug_source_bind_p (stmt))
2358 t = gimple_debug_source_bind_get_var (stmt);
2359 else
2360 t = gimple_debug_bind_get_var (stmt);
b5b8b0ac
AO
2361
2362 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2363 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2364 {
2365 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2366 t = *n;
2367 }
d17af147
JJ
2368 else if (TREE_CODE (t) == VAR_DECL
2369 && !TREE_STATIC (t)
2370 && gimple_in_ssa_p (cfun)
2371 && !pointer_map_contains (id->decl_map, t)
2372 && !var_ann (t))
2373 /* T is a non-localized variable. */;
b5b8b0ac
AO
2374 else
2375 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2376
ddb555ed
JJ
2377 if (gimple_debug_bind_p (stmt))
2378 {
2379 gimple_debug_bind_set_var (stmt, t);
b5b8b0ac 2380
ddb555ed
JJ
2381 if (gimple_debug_bind_has_value_p (stmt))
2382 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2383 remap_gimple_op_r, &wi, NULL);
b5b8b0ac 2384
ddb555ed
JJ
2385 /* Punt if any decl couldn't be remapped. */
2386 if (processing_debug_stmt < 0)
2387 gimple_debug_bind_reset_value (stmt);
2388 }
2389 else if (gimple_debug_source_bind_p (stmt))
2390 {
2391 gimple_debug_source_bind_set_var (stmt, t);
2392 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2393 remap_gimple_op_r, &wi, NULL);
2394 }
b5b8b0ac
AO
2395
2396 processing_debug_stmt = 0;
2397
2398 update_stmt (stmt);
2399 if (gimple_in_ssa_p (cfun))
2400 mark_symbols_for_renaming (stmt);
2401}
2402
2403/* Process deferred debug stmts. In order to give values better odds
2404 of being successfully remapped, we delay the processing of debug
2405 stmts until all other stmts that might require remapping are
2406 processed. */
2407
2408static void
2409copy_debug_stmts (copy_body_data *id)
2410{
2411 size_t i;
2412 gimple stmt;
2413
2414 if (!id->debug_stmts)
2415 return;
2416
ac47786e 2417 FOR_EACH_VEC_ELT (gimple, id->debug_stmts, i, stmt)
b5b8b0ac
AO
2418 copy_debug_stmt (stmt, id);
2419
2420 VEC_free (gimple, heap, id->debug_stmts);
2421}
2422
f82a627c
EB
2423/* Make a copy of the body of SRC_FN so that it can be inserted inline in
2424 another function. */
2425
2426static tree
2427copy_tree_body (copy_body_data *id)
2428{
2429 tree fndecl = id->src_fn;
2430 tree body = DECL_SAVED_TREE (fndecl);
2431
2432 walk_tree (&body, copy_tree_body_r, id, NULL);
2433
2434 return body;
2435}
2436
b5b8b0ac
AO
2437/* Make a copy of the body of FN so that it can be inserted inline in
2438 another function. */
2439
e21aff8a 2440static tree
0d63a740 2441copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
91382288
JH
2442 basic_block entry_block_map, basic_block exit_block_map,
2443 bitmap blocks_to_copy, basic_block new_entry)
e21aff8a 2444{
1b369fae 2445 tree fndecl = id->src_fn;
e21aff8a
SB
2446 tree body;
2447
2448 /* If this body has a CFG, walk CFG and copy. */
2449 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
91382288
JH
2450 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2451 blocks_to_copy, new_entry);
b5b8b0ac 2452 copy_debug_stmts (id);
e21aff8a
SB
2453
2454 return body;
2455}
2456
04482133
AO
2457/* Return true if VALUE is an ADDR_EXPR of an automatic variable
2458 defined in function FN, or of a data member thereof. */
2459
2460static bool
2461self_inlining_addr_expr (tree value, tree fn)
2462{
2463 tree var;
2464
2465 if (TREE_CODE (value) != ADDR_EXPR)
2466 return false;
2467
2468 var = get_base_address (TREE_OPERAND (value, 0));
e21aff8a 2469
50886bf1 2470 return var && auto_var_in_fn_p (var, fn);
04482133
AO
2471}
2472
b5b8b0ac
AO
2473/* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2474 lexical block and line number information from base_stmt, if given,
2475 or from the last stmt of the block otherwise. */
2476
2477static gimple
2478insert_init_debug_bind (copy_body_data *id,
2479 basic_block bb, tree var, tree value,
2480 gimple base_stmt)
2481{
2482 gimple note;
2483 gimple_stmt_iterator gsi;
2484 tree tracked_var;
2485
2486 if (!gimple_in_ssa_p (id->src_cfun))
2487 return NULL;
2488
2489 if (!MAY_HAVE_DEBUG_STMTS)
2490 return NULL;
2491
2492 tracked_var = target_for_debug_bind (var);
2493 if (!tracked_var)
2494 return NULL;
2495
2496 if (bb)
2497 {
2498 gsi = gsi_last_bb (bb);
2499 if (!base_stmt && !gsi_end_p (gsi))
2500 base_stmt = gsi_stmt (gsi);
2501 }
2502
2503 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2504
2505 if (bb)
2506 {
2507 if (!gsi_end_p (gsi))
2508 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2509 else
2510 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2511 }
2512
2513 return note;
2514}
2515
6de9cd9a 2516static void
b5b8b0ac 2517insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
0f1961a2 2518{
0f1961a2
JH
2519 /* If VAR represents a zero-sized variable, it's possible that the
2520 assignment statement may result in no gimple statements. */
2521 if (init_stmt)
c2a4718a
JJ
2522 {
2523 gimple_stmt_iterator si = gsi_last_bb (bb);
0f1961a2 2524
bfb0b886
RG
2525 /* We can end up with init statements that store to a non-register
2526 from a rhs with a conversion. Handle that here by forcing the
2527 rhs into a temporary. gimple_regimplify_operands is not
2528 prepared to do this for us. */
b5b8b0ac
AO
2529 if (!is_gimple_debug (init_stmt)
2530 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
bfb0b886
RG
2531 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2532 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2533 {
2534 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2535 gimple_expr_type (init_stmt),
2536 gimple_assign_rhs1 (init_stmt));
2537 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2538 GSI_NEW_STMT);
2539 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2540 gimple_assign_set_rhs1 (init_stmt, rhs);
2541 }
c2a4718a
JJ
2542 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2543 gimple_regimplify_operands (init_stmt, &si);
2544 mark_symbols_for_renaming (init_stmt);
b5b8b0ac
AO
2545
2546 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2547 {
2548 tree var, def = gimple_assign_lhs (init_stmt);
2549
2550 if (TREE_CODE (def) == SSA_NAME)
2551 var = SSA_NAME_VAR (def);
2552 else
2553 var = def;
2554
2555 insert_init_debug_bind (id, bb, var, def, init_stmt);
2556 }
c2a4718a 2557 }
0f1961a2
JH
2558}
2559
2560/* Initialize parameter P with VALUE. If needed, produce init statement
2561 at the end of BB. When BB is NULL, we return init statement to be
2562 output later. */
2563static gimple
1b369fae 2564setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
e21aff8a 2565 basic_block bb, tree *vars)
6de9cd9a 2566{
0f1961a2 2567 gimple init_stmt = NULL;
6de9cd9a 2568 tree var;
f4088621 2569 tree rhs = value;
110cfe1c
JH
2570 tree def = (gimple_in_ssa_p (cfun)
2571 ? gimple_default_def (id->src_cfun, p) : NULL);
6de9cd9a 2572
f4088621
RG
2573 if (value
2574 && value != error_mark_node
2575 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
c54e3854
RG
2576 {
2577 if (fold_convertible_p (TREE_TYPE (p), value))
2578 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2579 else
2580 /* ??? For valid (GIMPLE) programs we should not end up here.
2581 Still if something has gone wrong and we end up with truly
2582 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2583 to not leak invalid GIMPLE to the following passes. */
2584 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2585 }
f4088621 2586
b5b8b0ac
AO
2587 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2588 here since the type of this decl must be visible to the calling
2589 function. */
2590 var = copy_decl_to_var (p, id);
2591
2592 /* We're actually using the newly-created var. */
2593 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
bb7e6d55 2594 add_referenced_var (var);
b5b8b0ac
AO
2595
2596 /* Declare this new variable. */
910ad8de 2597 DECL_CHAIN (var) = *vars;
b5b8b0ac
AO
2598 *vars = var;
2599
2600 /* Make gimplifier happy about this variable. */
2601 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2602
110cfe1c 2603 /* If the parameter is never assigned to, has no SSA_NAMEs created,
b5b8b0ac
AO
2604 we would not need to create a new variable here at all, if it
2605 weren't for debug info. Still, we can just use the argument
2606 value. */
6de9cd9a
DN
2607 if (TREE_READONLY (p)
2608 && !TREE_ADDRESSABLE (p)
110cfe1c
JH
2609 && value && !TREE_SIDE_EFFECTS (value)
2610 && !def)
6de9cd9a 2611 {
84936f6f
RH
2612 /* We may produce non-gimple trees by adding NOPs or introduce
2613 invalid sharing when operand is not really constant.
2614 It is not big deal to prohibit constant propagation here as
2615 we will constant propagate in DOM1 pass anyway. */
2616 if (is_gimple_min_invariant (value)
f4088621
RG
2617 && useless_type_conversion_p (TREE_TYPE (p),
2618 TREE_TYPE (value))
04482133
AO
2619 /* We have to be very careful about ADDR_EXPR. Make sure
2620 the base variable isn't a local variable of the inlined
2621 function, e.g., when doing recursive inlining, direct or
2622 mutually-recursive or whatever, which is why we don't
2623 just test whether fn == current_function_decl. */
2624 && ! self_inlining_addr_expr (value, fn))
6de9cd9a 2625 {
6de9cd9a 2626 insert_decl_map (id, p, value);
b5b8b0ac
AO
2627 insert_debug_decl_map (id, p, var);
2628 return insert_init_debug_bind (id, bb, var, value, NULL);
6de9cd9a
DN
2629 }
2630 }
2631
6de9cd9a
DN
2632 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2633 that way, when the PARM_DECL is encountered, it will be
2634 automatically replaced by the VAR_DECL. */
7c7d3047 2635 insert_decl_map (id, p, var);
6de9cd9a 2636
6de9cd9a
DN
2637 /* Even if P was TREE_READONLY, the new VAR should not be.
2638 In the original code, we would have constructed a
2639 temporary, and then the function body would have never
2640 changed the value of P. However, now, we will be
2641 constructing VAR directly. The constructor body may
2642 change its value multiple times as it is being
2643 constructed. Therefore, it must not be TREE_READONLY;
2644 the back-end assumes that TREE_READONLY variable is
2645 assigned to only once. */
2646 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2647 TREE_READONLY (var) = 0;
2648
110cfe1c
JH
2649 /* If there is no setup required and we are in SSA, take the easy route
2650 replacing all SSA names representing the function parameter by the
2651 SSA name passed to function.
2652
2653 We need to construct map for the variable anyway as it might be used
2654 in different SSA names when parameter is set in function.
2655
8454d27e
JH
2656 Do replacement at -O0 for const arguments replaced by constant.
2657 This is important for builtin_constant_p and other construct requiring
b5b8b0ac 2658 constant argument to be visible in inlined function body. */
110cfe1c 2659 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
8454d27e
JH
2660 && (optimize
2661 || (TREE_READONLY (p)
2662 && is_gimple_min_invariant (rhs)))
110cfe1c 2663 && (TREE_CODE (rhs) == SSA_NAME
9b718f81
JH
2664 || is_gimple_min_invariant (rhs))
2665 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
110cfe1c
JH
2666 {
2667 insert_decl_map (id, def, rhs);
b5b8b0ac 2668 return insert_init_debug_bind (id, bb, var, rhs, NULL);
110cfe1c
JH
2669 }
2670
f6f2da7d
JH
2671 /* If the value of argument is never used, don't care about initializing
2672 it. */
1cf5abb3 2673 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
f6f2da7d
JH
2674 {
2675 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
b5b8b0ac 2676 return insert_init_debug_bind (id, bb, var, rhs, NULL);
f6f2da7d
JH
2677 }
2678
6de9cd9a
DN
2679 /* Initialize this VAR_DECL from the equivalent argument. Convert
2680 the argument to the proper type in case it was promoted. */
2681 if (value)
2682 {
6de9cd9a 2683 if (rhs == error_mark_node)
110cfe1c 2684 {
7c7d3047 2685 insert_decl_map (id, p, var);
b5b8b0ac 2686 return insert_init_debug_bind (id, bb, var, rhs, NULL);
110cfe1c 2687 }
afe08db5 2688
73dab33b 2689 STRIP_USELESS_TYPE_CONVERSION (rhs);
6de9cd9a 2690
726a989a 2691 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
6de9cd9a 2692 keep our trees in gimple form. */
110cfe1c
JH
2693 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2694 {
2695 def = remap_ssa_name (def, id);
726a989a 2696 init_stmt = gimple_build_assign (def, rhs);
110cfe1c
JH
2697 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2698 set_default_def (var, NULL);
2699 }
2700 else
726a989a 2701 init_stmt = gimple_build_assign (var, rhs);
6de9cd9a 2702
0f1961a2 2703 if (bb && init_stmt)
b5b8b0ac 2704 insert_init_stmt (id, bb, init_stmt);
6de9cd9a 2705 }
0f1961a2 2706 return init_stmt;
6de9cd9a
DN
2707}
2708
d4e4baa9 2709/* Generate code to initialize the parameters of the function at the
726a989a 2710 top of the stack in ID from the GIMPLE_CALL STMT. */
d4e4baa9 2711
e21aff8a 2712static void
726a989a 2713initialize_inlined_parameters (copy_body_data *id, gimple stmt,
e21aff8a 2714 tree fn, basic_block bb)
d4e4baa9 2715{
d4e4baa9 2716 tree parms;
726a989a 2717 size_t i;
d4e4baa9 2718 tree p;
d436bff8 2719 tree vars = NULL_TREE;
726a989a 2720 tree static_chain = gimple_call_chain (stmt);
d4e4baa9
AO
2721
2722 /* Figure out what the parameters are. */
18c6ada9 2723 parms = DECL_ARGUMENTS (fn);
d4e4baa9 2724
d4e4baa9
AO
2725 /* Loop through the parameter declarations, replacing each with an
2726 equivalent VAR_DECL, appropriately initialized. */
910ad8de 2727 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
726a989a
RB
2728 {
2729 tree val;
2730 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2731 setup_one_parameter (id, p, val, fn, bb, &vars);
2732 }
ea184343
RG
2733 /* After remapping parameters remap their types. This has to be done
2734 in a second loop over all parameters to appropriately remap
2735 variable sized arrays when the size is specified in a
2736 parameter following the array. */
910ad8de 2737 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
ea184343
RG
2738 {
2739 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2740 if (varp
2741 && TREE_CODE (*varp) == VAR_DECL)
2742 {
72aa3dca 2743 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
ea184343 2744 ? gimple_default_def (id->src_cfun, p) : NULL);
72aa3dca
RG
2745 tree var = *varp;
2746 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
ea184343
RG
2747 /* Also remap the default definition if it was remapped
2748 to the default definition of the parameter replacement
2749 by the parameter setup. */
72aa3dca 2750 if (def)
ea184343
RG
2751 {
2752 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
2753 if (defp
2754 && TREE_CODE (*defp) == SSA_NAME
72aa3dca
RG
2755 && SSA_NAME_VAR (*defp) == var)
2756 TREE_TYPE (*defp) = TREE_TYPE (var);
ea184343
RG
2757 }
2758 }
2759 }
4838c5ee 2760
6de9cd9a
DN
2761 /* Initialize the static chain. */
2762 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
ea99e0be 2763 gcc_assert (fn != current_function_decl);
6de9cd9a
DN
2764 if (p)
2765 {
2766 /* No static chain? Seems like a bug in tree-nested.c. */
1e128c5f 2767 gcc_assert (static_chain);
4838c5ee 2768
e21aff8a 2769 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
4838c5ee
AO
2770 }
2771
e21aff8a 2772 declare_inline_vars (id->block, vars);
d4e4baa9
AO
2773}
2774
726a989a 2775
e21aff8a
SB
2776/* Declare a return variable to replace the RESULT_DECL for the
2777 function we are calling. An appropriate DECL_STMT is returned.
2778 The USE_STMT is filled to contain a use of the declaration to
2779 indicate the return value of the function.
2780
110cfe1c
JH
2781 RETURN_SLOT, if non-null is place where to store the result. It
2782 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
726a989a 2783 was the LHS of the MODIFY_EXPR to which this call is the RHS.
7740f00d 2784
0f900dfa
JJ
2785 The return value is a (possibly null) value that holds the result
2786 as seen by the caller. */
d4e4baa9 2787
d436bff8 2788static tree
6938f93f
JH
2789declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2790 basic_block entry_bb)
d4e4baa9 2791{
1b369fae 2792 tree callee = id->src_fn;
7740f00d
RH
2793 tree result = DECL_RESULT (callee);
2794 tree callee_type = TREE_TYPE (result);
ea2edf88 2795 tree caller_type;
7740f00d 2796 tree var, use;
d4e4baa9 2797
ea2edf88
RG
2798 /* Handle type-mismatches in the function declaration return type
2799 vs. the call expression. */
2800 if (modify_dest)
2801 caller_type = TREE_TYPE (modify_dest);
2802 else
2803 caller_type = TREE_TYPE (TREE_TYPE (callee));
2804
d4e4baa9
AO
2805 /* We don't need to do anything for functions that don't return
2806 anything. */
7740f00d 2807 if (!result || VOID_TYPE_P (callee_type))
0f900dfa 2808 return NULL_TREE;
d4e4baa9 2809
cc77ae10 2810 /* If there was a return slot, then the return value is the
7740f00d 2811 dereferenced address of that object. */
110cfe1c 2812 if (return_slot)
7740f00d 2813 {
110cfe1c 2814 /* The front end shouldn't have used both return_slot and
7740f00d 2815 a modify expression. */
1e128c5f 2816 gcc_assert (!modify_dest);
cc77ae10 2817 if (DECL_BY_REFERENCE (result))
110cfe1c
JH
2818 {
2819 tree return_slot_addr = build_fold_addr_expr (return_slot);
2820 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2821
2822 /* We are going to construct *&return_slot and we can't do that
b8698a0f 2823 for variables believed to be not addressable.
110cfe1c
JH
2824
2825 FIXME: This check possibly can match, because values returned
2826 via return slot optimization are not believed to have address
2827 taken by alias analysis. */
2828 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
110cfe1c
JH
2829 var = return_slot_addr;
2830 }
cc77ae10 2831 else
110cfe1c
JH
2832 {
2833 var = return_slot;
2834 gcc_assert (TREE_CODE (var) != SSA_NAME);
b5ca517c 2835 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
110cfe1c 2836 }
0890b981
AP
2837 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2838 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2839 && !DECL_GIMPLE_REG_P (result)
22918034 2840 && DECL_P (var))
0890b981 2841 DECL_GIMPLE_REG_P (var) = 0;
7740f00d
RH
2842 use = NULL;
2843 goto done;
2844 }
2845
2846 /* All types requiring non-trivial constructors should have been handled. */
1e128c5f 2847 gcc_assert (!TREE_ADDRESSABLE (callee_type));
7740f00d
RH
2848
2849 /* Attempt to avoid creating a new temporary variable. */
110cfe1c
JH
2850 if (modify_dest
2851 && TREE_CODE (modify_dest) != SSA_NAME)
7740f00d
RH
2852 {
2853 bool use_it = false;
2854
2855 /* We can't use MODIFY_DEST if there's type promotion involved. */
f4088621 2856 if (!useless_type_conversion_p (callee_type, caller_type))
7740f00d
RH
2857 use_it = false;
2858
2859 /* ??? If we're assigning to a variable sized type, then we must
2860 reuse the destination variable, because we've no good way to
2861 create variable sized temporaries at this point. */
2862 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2863 use_it = true;
2864
2865 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2866 reuse it as the result of the call directly. Don't do this if
2867 it would promote MODIFY_DEST to addressable. */
e2f9fe42
RH
2868 else if (TREE_ADDRESSABLE (result))
2869 use_it = false;
2870 else
2871 {
2872 tree base_m = get_base_address (modify_dest);
2873
2874 /* If the base isn't a decl, then it's a pointer, and we don't
2875 know where that's going to go. */
2876 if (!DECL_P (base_m))
2877 use_it = false;
2878 else if (is_global_var (base_m))
2879 use_it = false;
0890b981
AP
2880 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2881 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2882 && !DECL_GIMPLE_REG_P (result)
2883 && DECL_GIMPLE_REG_P (base_m))
1d327c16 2884 use_it = false;
e2f9fe42
RH
2885 else if (!TREE_ADDRESSABLE (base_m))
2886 use_it = true;
2887 }
7740f00d
RH
2888
2889 if (use_it)
2890 {
2891 var = modify_dest;
2892 use = NULL;
2893 goto done;
2894 }
2895 }
2896
1e128c5f 2897 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
7740f00d 2898
c08cd4c1 2899 var = copy_result_decl_to_var (result, id);
110cfe1c 2900 if (gimple_in_ssa_p (cfun))
bb7e6d55 2901 add_referenced_var (var);
e21aff8a 2902
7740f00d 2903 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
7740f00d 2904
6de9cd9a 2905 /* Do not have the rest of GCC warn about this variable as it should
471854f8 2906 not be visible to the user. */
6de9cd9a 2907 TREE_NO_WARNING (var) = 1;
d4e4baa9 2908
c08cd4c1
JM
2909 declare_inline_vars (id->block, var);
2910
7740f00d
RH
2911 /* Build the use expr. If the return type of the function was
2912 promoted, convert it back to the expected type. */
2913 use = var;
f4088621 2914 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
7740f00d 2915 use = fold_convert (caller_type, var);
b8698a0f 2916
73dab33b 2917 STRIP_USELESS_TYPE_CONVERSION (use);
7740f00d 2918
c08cd4c1 2919 if (DECL_BY_REFERENCE (result))
32848948
RG
2920 {
2921 TREE_ADDRESSABLE (var) = 1;
2922 var = build_fold_addr_expr (var);
2923 }
c08cd4c1 2924
7740f00d 2925 done:
d4e4baa9
AO
2926 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2927 way, when the RESULT_DECL is encountered, it will be
6938f93f
JH
2928 automatically replaced by the VAR_DECL.
2929
2930 When returning by reference, ensure that RESULT_DECL remaps to
2931 gimple_val. */
2932 if (DECL_BY_REFERENCE (result)
2933 && !is_gimple_val (var))
2934 {
2935 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
72109b25 2936 if (gimple_in_ssa_p (id->src_cfun))
bb7e6d55 2937 add_referenced_var (temp);
6938f93f 2938 insert_decl_map (id, result, temp);
7e97ee1a
JH
2939 /* When RESULT_DECL is in SSA form, we need to use it's default_def
2940 SSA_NAME. */
2941 if (gimple_in_ssa_p (id->src_cfun) && gimple_default_def (id->src_cfun, result))
2942 temp = remap_ssa_name (gimple_default_def (id->src_cfun, result), id);
6938f93f
JH
2943 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
2944 }
2945 else
2946 insert_decl_map (id, result, var);
d4e4baa9 2947
6de9cd9a
DN
2948 /* Remember this so we can ignore it in remap_decls. */
2949 id->retvar = var;
2950
0f900dfa 2951 return use;
d4e4baa9
AO
2952}
2953
27dbd3ac
RH
2954/* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2955 to a local label. */
4838c5ee 2956
27dbd3ac
RH
2957static tree
2958has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
4838c5ee 2959{
27dbd3ac
RH
2960 tree node = *nodep;
2961 tree fn = (tree) fnp;
726a989a 2962
27dbd3ac
RH
2963 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2964 return node;
2965
2966 if (TYPE_P (node))
2967 *walk_subtrees = 0;
2968
2969 return NULL_TREE;
2970}
726a989a 2971
27dbd3ac
RH
2972/* Determine if the function can be copied. If so return NULL. If
2973 not return a string describng the reason for failure. */
2974
2975static const char *
2976copy_forbidden (struct function *fun, tree fndecl)
2977{
2978 const char *reason = fun->cannot_be_copied_reason;
c021f10b
NF
2979 tree decl;
2980 unsigned ix;
27dbd3ac
RH
2981
2982 /* Only examine the function once. */
2983 if (fun->cannot_be_copied_set)
2984 return reason;
2985
2986 /* We cannot copy a function that receives a non-local goto
2987 because we cannot remap the destination label used in the
2988 function that is performing the non-local goto. */
2989 /* ??? Actually, this should be possible, if we work at it.
2990 No doubt there's just a handful of places that simply
2991 assume it doesn't happen and don't substitute properly. */
2992 if (fun->has_nonlocal_label)
2993 {
2994 reason = G_("function %q+F can never be copied "
2995 "because it receives a non-local goto");
2996 goto fail;
2997 }
2998
c021f10b
NF
2999 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3000 if (TREE_CODE (decl) == VAR_DECL
3001 && TREE_STATIC (decl)
3002 && !DECL_EXTERNAL (decl)
3003 && DECL_INITIAL (decl)
3004 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3005 has_label_address_in_static_1,
3006 fndecl))
3007 {
3008 reason = G_("function %q+F can never be copied because it saves "
3009 "address of local label in a static variable");
3010 goto fail;
3011 }
27dbd3ac
RH
3012
3013 fail:
3014 fun->cannot_be_copied_reason = reason;
3015 fun->cannot_be_copied_set = true;
3016 return reason;
3017}
3018
3019
3020static const char *inline_forbidden_reason;
3021
3022/* A callback for walk_gimple_seq to handle statements. Returns non-null
3023 iff a function can not be inlined. Also sets the reason why. */
c986baf6 3024
c986baf6 3025static tree
726a989a
RB
3026inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3027 struct walk_stmt_info *wip)
c986baf6 3028{
726a989a 3029 tree fn = (tree) wip->info;
f08545a8 3030 tree t;
726a989a 3031 gimple stmt = gsi_stmt (*gsi);
c986baf6 3032
726a989a 3033 switch (gimple_code (stmt))
f08545a8 3034 {
726a989a 3035 case GIMPLE_CALL:
3197c4fd
AS
3036 /* Refuse to inline alloca call unless user explicitly forced so as
3037 this may change program's memory overhead drastically when the
3038 function using alloca is called in loop. In GCC present in
3039 SPEC2000 inlining into schedule_block cause it to require 2GB of
63d2a353
MM
3040 RAM instead of 256MB. Don't do so for alloca calls emitted for
3041 VLA objects as those can't cause unbounded growth (they're always
3042 wrapped inside stack_save/stack_restore regions. */
726a989a 3043 if (gimple_alloca_call_p (stmt)
63d2a353 3044 && !gimple_call_alloca_for_var_p (stmt)
f08545a8
JH
3045 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3046 {
ddd2d57e 3047 inline_forbidden_reason
dee15844 3048 = G_("function %q+F can never be inlined because it uses "
ddd2d57e 3049 "alloca (override using the always_inline attribute)");
726a989a
RB
3050 *handled_ops_p = true;
3051 return fn;
f08545a8 3052 }
726a989a
RB
3053
3054 t = gimple_call_fndecl (stmt);
3055 if (t == NULL_TREE)
f08545a8 3056 break;
84f5e1b1 3057
f08545a8
JH
3058 /* We cannot inline functions that call setjmp. */
3059 if (setjmp_call_p (t))
3060 {
ddd2d57e 3061 inline_forbidden_reason
dee15844 3062 = G_("function %q+F can never be inlined because it uses setjmp");
726a989a
RB
3063 *handled_ops_p = true;
3064 return t;
f08545a8
JH
3065 }
3066
6de9cd9a 3067 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3197c4fd 3068 switch (DECL_FUNCTION_CODE (t))
f08545a8 3069 {
3197c4fd
AS
3070 /* We cannot inline functions that take a variable number of
3071 arguments. */
3072 case BUILT_IN_VA_START:
3197c4fd
AS
3073 case BUILT_IN_NEXT_ARG:
3074 case BUILT_IN_VA_END:
6de9cd9a 3075 inline_forbidden_reason
dee15844 3076 = G_("function %q+F can never be inlined because it "
6de9cd9a 3077 "uses variable argument lists");
726a989a
RB
3078 *handled_ops_p = true;
3079 return t;
6de9cd9a 3080
3197c4fd 3081 case BUILT_IN_LONGJMP:
6de9cd9a
DN
3082 /* We can't inline functions that call __builtin_longjmp at
3083 all. The non-local goto machinery really requires the
3084 destination be in a different function. If we allow the
3085 function calling __builtin_longjmp to be inlined into the
3086 function calling __builtin_setjmp, Things will Go Awry. */
3087 inline_forbidden_reason
dee15844 3088 = G_("function %q+F can never be inlined because "
6de9cd9a 3089 "it uses setjmp-longjmp exception handling");
726a989a
RB
3090 *handled_ops_p = true;
3091 return t;
6de9cd9a
DN
3092
3093 case BUILT_IN_NONLOCAL_GOTO:
3094 /* Similarly. */
3095 inline_forbidden_reason
dee15844 3096 = G_("function %q+F can never be inlined because "
6de9cd9a 3097 "it uses non-local goto");
726a989a
RB
3098 *handled_ops_p = true;
3099 return t;
f08545a8 3100
4b284111
JJ
3101 case BUILT_IN_RETURN:
3102 case BUILT_IN_APPLY_ARGS:
3103 /* If a __builtin_apply_args caller would be inlined,
3104 it would be saving arguments of the function it has
3105 been inlined into. Similarly __builtin_return would
3106 return from the function the inline has been inlined into. */
3107 inline_forbidden_reason
dee15844 3108 = G_("function %q+F can never be inlined because "
4b284111 3109 "it uses __builtin_return or __builtin_apply_args");
726a989a
RB
3110 *handled_ops_p = true;
3111 return t;
4b284111 3112
3197c4fd
AS
3113 default:
3114 break;
3115 }
f08545a8
JH
3116 break;
3117
726a989a
RB
3118 case GIMPLE_GOTO:
3119 t = gimple_goto_dest (stmt);
f08545a8
JH
3120
3121 /* We will not inline a function which uses computed goto. The
3122 addresses of its local labels, which may be tucked into
3123 global storage, are of course not constant across
3124 instantiations, which causes unexpected behavior. */
3125 if (TREE_CODE (t) != LABEL_DECL)
3126 {
ddd2d57e 3127 inline_forbidden_reason
dee15844 3128 = G_("function %q+F can never be inlined "
ddd2d57e 3129 "because it contains a computed goto");
726a989a
RB
3130 *handled_ops_p = true;
3131 return t;
f08545a8 3132 }
6de9cd9a 3133 break;
f08545a8 3134
f08545a8
JH
3135 default:
3136 break;
3137 }
3138
726a989a 3139 *handled_ops_p = false;
f08545a8 3140 return NULL_TREE;
84f5e1b1
RH
3141}
3142
726a989a
RB
3143/* Return true if FNDECL is a function that cannot be inlined into
3144 another one. */
3145
3146static bool
f08545a8 3147inline_forbidden_p (tree fndecl)
84f5e1b1 3148{
2092ee7d 3149 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
726a989a
RB
3150 struct walk_stmt_info wi;
3151 struct pointer_set_t *visited_nodes;
3152 basic_block bb;
3153 bool forbidden_p = false;
3154
27dbd3ac
RH
3155 /* First check for shared reasons not to copy the code. */
3156 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3157 if (inline_forbidden_reason != NULL)
3158 return true;
3159
3160 /* Next, walk the statements of the function looking for
3161 constraucts we can't handle, or are non-optimal for inlining. */
726a989a
RB
3162 visited_nodes = pointer_set_create ();
3163 memset (&wi, 0, sizeof (wi));
3164 wi.info = (void *) fndecl;
3165 wi.pset = visited_nodes;
e21aff8a 3166
2092ee7d 3167 FOR_EACH_BB_FN (bb, fun)
726a989a
RB
3168 {
3169 gimple ret;
3170 gimple_seq seq = bb_seq (bb);
27dbd3ac 3171 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
726a989a
RB
3172 forbidden_p = (ret != NULL);
3173 if (forbidden_p)
27dbd3ac 3174 break;
2092ee7d
JJ
3175 }
3176
726a989a 3177 pointer_set_destroy (visited_nodes);
726a989a 3178 return forbidden_p;
84f5e1b1
RH
3179}
3180
b3c3af2f
SB
3181/* Returns nonzero if FN is a function that does not have any
3182 fundamental inline blocking properties. */
d4e4baa9 3183
27dbd3ac
RH
3184bool
3185tree_inlinable_function_p (tree fn)
d4e4baa9 3186{
b3c3af2f 3187 bool inlinable = true;
18177c7e
RG
3188 bool do_warning;
3189 tree always_inline;
d4e4baa9
AO
3190
3191 /* If we've already decided this function shouldn't be inlined,
3192 there's no need to check again. */
3193 if (DECL_UNINLINABLE (fn))
b3c3af2f 3194 return false;
d4e4baa9 3195
18177c7e
RG
3196 /* We only warn for functions declared `inline' by the user. */
3197 do_warning = (warn_inline
18177c7e 3198 && DECL_DECLARED_INLINE_P (fn)
0494626a 3199 && !DECL_NO_INLINE_WARNING_P (fn)
18177c7e
RG
3200 && !DECL_IN_SYSTEM_HEADER (fn));
3201
3202 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3203
e90acd93 3204 if (flag_no_inline
18177c7e
RG
3205 && always_inline == NULL)
3206 {
3207 if (do_warning)
3208 warning (OPT_Winline, "function %q+F can never be inlined because it "
3209 "is suppressed using -fno-inline", fn);
3210 inlinable = false;
3211 }
3212
18177c7e
RG
3213 else if (!function_attribute_inlinable_p (fn))
3214 {
3215 if (do_warning)
3216 warning (OPT_Winline, "function %q+F can never be inlined because it "
3217 "uses attributes conflicting with inlining", fn);
3218 inlinable = false;
3219 }
46c5ad27 3220
f08545a8 3221 else if (inline_forbidden_p (fn))
b3c3af2f
SB
3222 {
3223 /* See if we should warn about uninlinable functions. Previously,
3224 some of these warnings would be issued while trying to expand
3225 the function inline, but that would cause multiple warnings
3226 about functions that would for example call alloca. But since
3227 this a property of the function, just one warning is enough.
3228 As a bonus we can now give more details about the reason why a
18177c7e
RG
3229 function is not inlinable. */
3230 if (always_inline)
c9fc06dc 3231 error (inline_forbidden_reason, fn);
2d327012 3232 else if (do_warning)
d2fcbf6f 3233 warning (OPT_Winline, inline_forbidden_reason, fn);
b3c3af2f
SB
3234
3235 inlinable = false;
3236 }
d4e4baa9
AO
3237
3238 /* Squirrel away the result so that we don't have to check again. */
b3c3af2f 3239 DECL_UNINLINABLE (fn) = !inlinable;
d4e4baa9 3240
b3c3af2f
SB
3241 return inlinable;
3242}
3243
e5c4f28a
RG
3244/* Estimate the cost of a memory move. Use machine dependent
3245 word size and take possible memcpy call into account. */
3246
3247int
3248estimate_move_cost (tree type)
3249{
3250 HOST_WIDE_INT size;
3251
078c3644
JH
3252 gcc_assert (!VOID_TYPE_P (type));
3253
c204d113
L
3254 if (TREE_CODE (type) == VECTOR_TYPE)
3255 {
3256 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3257 enum machine_mode simd
3258 = targetm.vectorize.preferred_simd_mode (inner);
3259 int simd_mode_size = GET_MODE_SIZE (simd);
3260 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3261 / simd_mode_size);
3262 }
3263
e5c4f28a
RG
3264 size = int_size_in_bytes (type);
3265
e04ad03d 3266 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
e5c4f28a
RG
3267 /* Cost of a memcpy call, 3 arguments and the call. */
3268 return 4;
3269 else
3270 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3271}
3272
726a989a 3273/* Returns cost of operation CODE, according to WEIGHTS */
7f9bc51b 3274
726a989a 3275static int
02f0b13a
JH
3276estimate_operator_cost (enum tree_code code, eni_weights *weights,
3277 tree op1 ATTRIBUTE_UNUSED, tree op2)
6de9cd9a 3278{
726a989a 3279 switch (code)
6de9cd9a 3280 {
726a989a
RB
3281 /* These are "free" conversions, or their presumed cost
3282 is folded into other operations. */
61fcaeec 3283 case RANGE_EXPR:
1a87cf0c 3284 CASE_CONVERT:
726a989a
RB
3285 case COMPLEX_EXPR:
3286 case PAREN_EXPR:
d4d92cd3 3287 case VIEW_CONVERT_EXPR:
726a989a 3288 return 0;
6de9cd9a 3289
e5c4f28a
RG
3290 /* Assign cost of 1 to usual operations.
3291 ??? We may consider mapping RTL costs to this. */
6de9cd9a 3292 case COND_EXPR:
4151978d 3293 case VEC_COND_EXPR:
2205ed25 3294 case VEC_PERM_EXPR:
6de9cd9a
DN
3295
3296 case PLUS_EXPR:
5be014d5 3297 case POINTER_PLUS_EXPR:
6de9cd9a
DN
3298 case MINUS_EXPR:
3299 case MULT_EXPR:
16949072 3300 case FMA_EXPR:
6de9cd9a 3301
09e881c9 3302 case ADDR_SPACE_CONVERT_EXPR:
325217ed 3303 case FIXED_CONVERT_EXPR:
6de9cd9a 3304 case FIX_TRUNC_EXPR:
6de9cd9a
DN
3305
3306 case NEGATE_EXPR:
3307 case FLOAT_EXPR:
3308 case MIN_EXPR:
3309 case MAX_EXPR:
3310 case ABS_EXPR:
3311
3312 case LSHIFT_EXPR:
3313 case RSHIFT_EXPR:
3314 case LROTATE_EXPR:
3315 case RROTATE_EXPR:
a6b46ba2
DN
3316 case VEC_LSHIFT_EXPR:
3317 case VEC_RSHIFT_EXPR:
6de9cd9a
DN
3318
3319 case BIT_IOR_EXPR:
3320 case BIT_XOR_EXPR:
3321 case BIT_AND_EXPR:
3322 case BIT_NOT_EXPR:
3323
3324 case TRUTH_ANDIF_EXPR:
3325 case TRUTH_ORIF_EXPR:
3326 case TRUTH_AND_EXPR:
3327 case TRUTH_OR_EXPR:
3328 case TRUTH_XOR_EXPR:
3329 case TRUTH_NOT_EXPR:
3330
3331 case LT_EXPR:
3332 case LE_EXPR:
3333 case GT_EXPR:
3334 case GE_EXPR:
3335 case EQ_EXPR:
3336 case NE_EXPR:
3337 case ORDERED_EXPR:
3338 case UNORDERED_EXPR:
3339
3340 case UNLT_EXPR:
3341 case UNLE_EXPR:
3342 case UNGT_EXPR:
3343 case UNGE_EXPR:
3344 case UNEQ_EXPR:
d1a7edaf 3345 case LTGT_EXPR:
6de9cd9a 3346
6de9cd9a
DN
3347 case CONJ_EXPR:
3348
3349 case PREDECREMENT_EXPR:
3350 case PREINCREMENT_EXPR:
3351 case POSTDECREMENT_EXPR:
3352 case POSTINCREMENT_EXPR:
3353
16630a2c
DN
3354 case REALIGN_LOAD_EXPR:
3355
61d3cdbb
DN
3356 case REDUC_MAX_EXPR:
3357 case REDUC_MIN_EXPR:
3358 case REDUC_PLUS_EXPR:
20f06221 3359 case WIDEN_SUM_EXPR:
726a989a
RB
3360 case WIDEN_MULT_EXPR:
3361 case DOT_PROD_EXPR:
0354c0c7
BS
3362 case WIDEN_MULT_PLUS_EXPR:
3363 case WIDEN_MULT_MINUS_EXPR:
36ba4aae 3364 case WIDEN_LSHIFT_EXPR:
726a989a 3365
89d67cca
DN
3366 case VEC_WIDEN_MULT_HI_EXPR:
3367 case VEC_WIDEN_MULT_LO_EXPR:
3368 case VEC_UNPACK_HI_EXPR:
3369 case VEC_UNPACK_LO_EXPR:
d9987fb4
UB
3370 case VEC_UNPACK_FLOAT_HI_EXPR:
3371 case VEC_UNPACK_FLOAT_LO_EXPR:
8115817b 3372 case VEC_PACK_TRUNC_EXPR:
89d67cca 3373 case VEC_PACK_SAT_EXPR:
d9987fb4 3374 case VEC_PACK_FIX_TRUNC_EXPR:
98b44b0e
IR
3375 case VEC_EXTRACT_EVEN_EXPR:
3376 case VEC_EXTRACT_ODD_EXPR:
3377 case VEC_INTERLEAVE_HIGH_EXPR:
3378 case VEC_INTERLEAVE_LOW_EXPR:
36ba4aae
IR
3379 case VEC_WIDEN_LSHIFT_HI_EXPR:
3380 case VEC_WIDEN_LSHIFT_LO_EXPR:
98b44b0e 3381
726a989a 3382 return 1;
6de9cd9a 3383
1ea7e6ad 3384 /* Few special cases of expensive operations. This is useful
6de9cd9a
DN
3385 to avoid inlining on functions having too many of these. */
3386 case TRUNC_DIV_EXPR:
3387 case CEIL_DIV_EXPR:
3388 case FLOOR_DIV_EXPR:
3389 case ROUND_DIV_EXPR:
3390 case EXACT_DIV_EXPR:
3391 case TRUNC_MOD_EXPR:
3392 case CEIL_MOD_EXPR:
3393 case FLOOR_MOD_EXPR:
3394 case ROUND_MOD_EXPR:
3395 case RDIV_EXPR:
02f0b13a
JH
3396 if (TREE_CODE (op2) != INTEGER_CST)
3397 return weights->div_mod_cost;
3398 return 1;
726a989a
RB
3399
3400 default:
3401 /* We expect a copy assignment with no operator. */
3402 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3403 return 0;
3404 }
3405}
3406
3407
3408/* Estimate number of instructions that will be created by expanding
3409 the statements in the statement sequence STMTS.
3410 WEIGHTS contains weights attributed to various constructs. */
3411
3412static
3413int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3414{
3415 int cost;
3416 gimple_stmt_iterator gsi;
3417
3418 cost = 0;
3419 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3420 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3421
3422 return cost;
3423}
3424
3425
3426/* Estimate number of instructions that will be created by expanding STMT.
3427 WEIGHTS contains weights attributed to various constructs. */
3428
3429int
3430estimate_num_insns (gimple stmt, eni_weights *weights)
3431{
3432 unsigned cost, i;
3433 enum gimple_code code = gimple_code (stmt);
3434 tree lhs;
02f0b13a 3435 tree rhs;
726a989a
RB
3436
3437 switch (code)
3438 {
3439 case GIMPLE_ASSIGN:
3440 /* Try to estimate the cost of assignments. We have three cases to
3441 deal with:
3442 1) Simple assignments to registers;
3443 2) Stores to things that must live in memory. This includes
3444 "normal" stores to scalars, but also assignments of large
3445 structures, or constructors of big arrays;
3446
3447 Let us look at the first two cases, assuming we have "a = b + C":
3448 <GIMPLE_ASSIGN <var_decl "a">
3449 <plus_expr <var_decl "b"> <constant C>>
3450 If "a" is a GIMPLE register, the assignment to it is free on almost
3451 any target, because "a" usually ends up in a real register. Hence
3452 the only cost of this expression comes from the PLUS_EXPR, and we
3453 can ignore the GIMPLE_ASSIGN.
3454 If "a" is not a GIMPLE register, the assignment to "a" will most
3455 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3456 of moving something into "a", which we compute using the function
3457 estimate_move_cost. */
3458 lhs = gimple_assign_lhs (stmt);
02f0b13a
JH
3459 rhs = gimple_assign_rhs1 (stmt);
3460
726a989a
RB
3461 if (is_gimple_reg (lhs))
3462 cost = 0;
3463 else
3464 cost = estimate_move_cost (TREE_TYPE (lhs));
3465
02f0b13a
JH
3466 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3467 cost += estimate_move_cost (TREE_TYPE (rhs));
3468
3469 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3470 gimple_assign_rhs1 (stmt),
3471 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3472 == GIMPLE_BINARY_RHS
3473 ? gimple_assign_rhs2 (stmt) : NULL);
726a989a
RB
3474 break;
3475
3476 case GIMPLE_COND:
02f0b13a
JH
3477 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3478 gimple_op (stmt, 0),
3479 gimple_op (stmt, 1));
726a989a
RB
3480 break;
3481
3482 case GIMPLE_SWITCH:
3483 /* Take into account cost of the switch + guess 2 conditional jumps for
b8698a0f 3484 each case label.
726a989a
RB
3485
3486 TODO: once the switch expansion logic is sufficiently separated, we can
3487 do better job on estimating cost of the switch. */
02f0b13a
JH
3488 if (weights->time_based)
3489 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3490 else
3491 cost = gimple_switch_num_labels (stmt) * 2;
6de9cd9a 3492 break;
726a989a
RB
3493
3494 case GIMPLE_CALL:
6de9cd9a 3495 {
726a989a 3496 tree decl = gimple_call_fndecl (stmt);
9f9ebcdf 3497 struct cgraph_node *node;
6de9cd9a 3498
9bb2f479
JH
3499 /* Do not special case builtins where we see the body.
3500 This just confuse inliner. */
9f9ebcdf 3501 if (!decl || !(node = cgraph_get_node (decl)) || node->analyzed)
e9f7ad79 3502 ;
9bb2f479
JH
3503 /* For buitins that are likely expanded to nothing or
3504 inlined do not account operand costs. */
3505 else if (is_simple_builtin (decl))
bec922f0
SL
3506 return 0;
3507 else if (is_inexpensive_builtin (decl))
9bb2f479 3508 return weights->target_builtin_call_cost;
e9f7ad79
RG
3509 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3510 {
3511 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3512 specialize the cheap expansion we do here.
3513 ??? This asks for a more general solution. */
3514 switch (DECL_FUNCTION_CODE (decl))
3515 {
3516 case BUILT_IN_POW:
3517 case BUILT_IN_POWF:
3518 case BUILT_IN_POWL:
3519 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3520 && REAL_VALUES_EQUAL
3521 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3522 return estimate_operator_cost (MULT_EXPR, weights,
3523 gimple_call_arg (stmt, 0),
3524 gimple_call_arg (stmt, 0));
3525 break;
3526
3527 default:
3528 break;
3529 }
3530 }
b8698a0f 3531
e9f7ad79 3532 cost = weights->call_cost;
ff5d142c
RG
3533 if (gimple_call_lhs (stmt))
3534 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3535 for (i = 0; i < gimple_call_num_args (stmt); i++)
c7f599d0 3536 {
ff5d142c
RG
3537 tree arg = gimple_call_arg (stmt, i);
3538 cost += estimate_move_cost (TREE_TYPE (arg));
c7f599d0 3539 }
6de9cd9a
DN
3540 break;
3541 }
88f4034b 3542
9bb2f479
JH
3543 case GIMPLE_RETURN:
3544 return weights->return_cost;
3545
726a989a
RB
3546 case GIMPLE_GOTO:
3547 case GIMPLE_LABEL:
3548 case GIMPLE_NOP:
3549 case GIMPLE_PHI:
726a989a 3550 case GIMPLE_PREDICT:
b5b8b0ac 3551 case GIMPLE_DEBUG:
726a989a
RB
3552 return 0;
3553
3554 case GIMPLE_ASM:
2bd1d2c8 3555 return asm_str_count (gimple_asm_string (stmt));
726a989a 3556
1d65f45c
RH
3557 case GIMPLE_RESX:
3558 /* This is either going to be an external function call with one
3559 argument, or two register copy statements plus a goto. */
3560 return 2;
3561
3562 case GIMPLE_EH_DISPATCH:
3563 /* ??? This is going to turn into a switch statement. Ideally
3564 we'd have a look at the eh region and estimate the number of
3565 edges involved. */
3566 return 10;
3567
726a989a
RB
3568 case GIMPLE_BIND:
3569 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3570
3571 case GIMPLE_EH_FILTER:
3572 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3573
3574 case GIMPLE_CATCH:
3575 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3576
3577 case GIMPLE_TRY:
3578 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3579 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3580
3581 /* OpenMP directives are generally very expensive. */
3582
3583 case GIMPLE_OMP_RETURN:
3584 case GIMPLE_OMP_SECTIONS_SWITCH:
3585 case GIMPLE_OMP_ATOMIC_STORE:
3586 case GIMPLE_OMP_CONTINUE:
3587 /* ...except these, which are cheap. */
3588 return 0;
3589
3590 case GIMPLE_OMP_ATOMIC_LOAD:
3591 return weights->omp_cost;
3592
3593 case GIMPLE_OMP_FOR:
3594 return (weights->omp_cost
3595 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3596 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3597
3598 case GIMPLE_OMP_PARALLEL:
3599 case GIMPLE_OMP_TASK:
3600 case GIMPLE_OMP_CRITICAL:
3601 case GIMPLE_OMP_MASTER:
3602 case GIMPLE_OMP_ORDERED:
3603 case GIMPLE_OMP_SECTION:
3604 case GIMPLE_OMP_SECTIONS:
3605 case GIMPLE_OMP_SINGLE:
3606 return (weights->omp_cost
3607 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
88f4034b 3608
0a35513e
AH
3609 case GIMPLE_TRANSACTION:
3610 return (weights->tm_cost
3611 + estimate_num_insns_seq (gimple_transaction_body (stmt),
3612 weights));
3613
6de9cd9a 3614 default:
1e128c5f 3615 gcc_unreachable ();
6de9cd9a 3616 }
726a989a
RB
3617
3618 return cost;
6de9cd9a
DN
3619}
3620
726a989a
RB
3621/* Estimate number of instructions that will be created by expanding
3622 function FNDECL. WEIGHTS contains weights attributed to various
3623 constructs. */
aa4a53af 3624
6de9cd9a 3625int
726a989a 3626estimate_num_insns_fn (tree fndecl, eni_weights *weights)
6de9cd9a 3627{
726a989a
RB
3628 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3629 gimple_stmt_iterator bsi;
e21aff8a 3630 basic_block bb;
726a989a 3631 int n = 0;
e21aff8a 3632
726a989a
RB
3633 gcc_assert (my_function && my_function->cfg);
3634 FOR_EACH_BB_FN (bb, my_function)
e21aff8a 3635 {
726a989a
RB
3636 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3637 n += estimate_num_insns (gsi_stmt (bsi), weights);
e21aff8a 3638 }
e21aff8a 3639
726a989a 3640 return n;
7f9bc51b
ZD
3641}
3642
726a989a 3643
7f9bc51b
ZD
3644/* Initializes weights used by estimate_num_insns. */
3645
3646void
3647init_inline_once (void)
3648{
7f9bc51b 3649 eni_size_weights.call_cost = 1;
625a2efb 3650 eni_size_weights.target_builtin_call_cost = 1;
7f9bc51b 3651 eni_size_weights.div_mod_cost = 1;
7f9bc51b 3652 eni_size_weights.omp_cost = 40;
0a35513e 3653 eni_size_weights.tm_cost = 10;
02f0b13a 3654 eni_size_weights.time_based = false;
9bb2f479 3655 eni_size_weights.return_cost = 1;
7f9bc51b
ZD
3656
3657 /* Estimating time for call is difficult, since we have no idea what the
3658 called function does. In the current uses of eni_time_weights,
3659 underestimating the cost does less harm than overestimating it, so
ea2c620c 3660 we choose a rather small value here. */
7f9bc51b 3661 eni_time_weights.call_cost = 10;
9bb2f479 3662 eni_time_weights.target_builtin_call_cost = 1;
7f9bc51b 3663 eni_time_weights.div_mod_cost = 10;
7f9bc51b 3664 eni_time_weights.omp_cost = 40;
0a35513e 3665 eni_time_weights.tm_cost = 40;
02f0b13a 3666 eni_time_weights.time_based = true;
9bb2f479 3667 eni_time_weights.return_cost = 2;
6de9cd9a
DN
3668}
3669
726a989a
RB
3670/* Estimate the number of instructions in a gimple_seq. */
3671
3672int
3673count_insns_seq (gimple_seq seq, eni_weights *weights)
3674{
3675 gimple_stmt_iterator gsi;
3676 int n = 0;
3677 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3678 n += estimate_num_insns (gsi_stmt (gsi), weights);
3679
3680 return n;
3681}
3682
3683
e21aff8a 3684/* Install new lexical TREE_BLOCK underneath 'current_block'. */
726a989a 3685
e21aff8a 3686static void
4a283090 3687prepend_lexical_block (tree current_block, tree new_block)
e21aff8a 3688{
4a283090
JH
3689 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3690 BLOCK_SUBBLOCKS (current_block) = new_block;
e21aff8a 3691 BLOCK_SUPERCONTEXT (new_block) = current_block;
e21aff8a
SB
3692}
3693
c021f10b
NF
3694/* Add local variables from CALLEE to CALLER. */
3695
3696static inline void
3697add_local_variables (struct function *callee, struct function *caller,
3698 copy_body_data *id, bool check_var_ann)
3699{
3700 tree var;
3701 unsigned ix;
3702
3703 FOR_EACH_LOCAL_DECL (callee, ix, var)
3704 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3705 {
3706 if (!check_var_ann
3707 || (var_ann (var) && add_referenced_var (var)))
3708 add_local_decl (caller, var);
3709 }
3710 else if (!can_be_nonlocal (var, id))
42694189
JJ
3711 {
3712 tree new_var = remap_decl (var, id);
3713
3714 /* Remap debug-expressions. */
3715 if (TREE_CODE (new_var) == VAR_DECL
3716 && DECL_DEBUG_EXPR_IS_FROM (new_var)
3717 && new_var != var)
3718 {
3719 tree tem = DECL_DEBUG_EXPR (var);
3720 bool old_regimplify = id->regimplify;
3721 id->remapping_type_depth++;
3722 walk_tree (&tem, copy_tree_body_r, id, NULL);
3723 id->remapping_type_depth--;
3724 id->regimplify = old_regimplify;
3725 SET_DECL_DEBUG_EXPR (new_var, tem);
3726 }
3727 add_local_decl (caller, new_var);
3728 }
c021f10b
NF
3729}
3730
726a989a 3731/* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
d4e4baa9 3732
e21aff8a 3733static bool
726a989a 3734expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
d4e4baa9 3735{
0f900dfa 3736 tree use_retvar;
d436bff8 3737 tree fn;
b5b8b0ac 3738 struct pointer_map_t *st, *dst;
110cfe1c 3739 tree return_slot;
7740f00d 3740 tree modify_dest;
6de9cd9a 3741 location_t saved_location;
e21aff8a 3742 struct cgraph_edge *cg_edge;
61a05df1 3743 cgraph_inline_failed_t reason;
e21aff8a
SB
3744 basic_block return_block;
3745 edge e;
726a989a 3746 gimple_stmt_iterator gsi, stmt_gsi;
e21aff8a 3747 bool successfully_inlined = FALSE;
4f6c2131 3748 bool purge_dead_abnormal_edges;
d4e4baa9 3749
6de9cd9a
DN
3750 /* Set input_location here so we get the right instantiation context
3751 if we call instantiate_decl from inlinable_function_p. */
3752 saved_location = input_location;
726a989a
RB
3753 if (gimple_has_location (stmt))
3754 input_location = gimple_location (stmt);
6de9cd9a 3755
d4e4baa9 3756 /* From here on, we're only interested in CALL_EXPRs. */
726a989a 3757 if (gimple_code (stmt) != GIMPLE_CALL)
6de9cd9a 3758 goto egress;
d4e4baa9 3759
db09f943
MJ
3760 cg_edge = cgraph_edge (id->dst_node, stmt);
3761 gcc_checking_assert (cg_edge);
d4e4baa9
AO
3762 /* First, see if we can figure out what function is being called.
3763 If we cannot, then there is no hope of inlining the function. */
db09f943 3764 if (cg_edge->indirect_unknown_callee)
3949c4a7 3765 goto egress;
db09f943
MJ
3766 fn = cg_edge->callee->decl;
3767 gcc_checking_assert (fn);
b58b1157 3768
726a989a 3769 /* If FN is a declaration of a function in a nested scope that was
a1a0fd4e
AO
3770 globally declared inline, we don't set its DECL_INITIAL.
3771 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3772 C++ front-end uses it for cdtors to refer to their internal
3773 declarations, that are not real functions. Fortunately those
3774 don't have trees to be saved, so we can tell by checking their
726a989a
RB
3775 gimple_body. */
3776 if (!DECL_INITIAL (fn)
a1a0fd4e 3777 && DECL_ABSTRACT_ORIGIN (fn)
39ecc018 3778 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
a1a0fd4e
AO
3779 fn = DECL_ABSTRACT_ORIGIN (fn);
3780
8f4f502f 3781 /* Don't try to inline functions that are not well-suited to inlining. */
e21aff8a 3782 if (!cgraph_inline_p (cg_edge, &reason))
a833faa5 3783 {
3e293154
MJ
3784 /* If this call was originally indirect, we do not want to emit any
3785 inlining related warnings or sorry messages because there are no
3786 guarantees regarding those. */
e33c6cd6 3787 if (cg_edge->indirect_inlining_edge)
3e293154
MJ
3788 goto egress;
3789
7fac66d4
JH
3790 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3791 /* Avoid warnings during early inline pass. */
c9fc06dc
CB
3792 && cgraph_global_info_ready
3793 /* PR 20090218-1_0.c. Body can be provided by another module. */
3794 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
2d327012 3795 {
c9fc06dc
CB
3796 error ("inlining failed in call to always_inline %q+F: %s", fn,
3797 cgraph_inline_failed_string (reason));
3798 error ("called from here");
2d327012 3799 }
ff7037dc
EB
3800 else if (warn_inline
3801 && DECL_DECLARED_INLINE_P (fn)
3802 && !DECL_NO_INLINE_WARNING_P (fn)
2d327012 3803 && !DECL_IN_SYSTEM_HEADER (fn)
61a05df1 3804 && reason != CIF_UNSPECIFIED
d63db217 3805 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
d7d1d041
RG
3806 /* Do not warn about not inlined recursive calls. */
3807 && !cgraph_edge_recursive_p (cg_edge)
d63db217 3808 /* Avoid warnings during early inline pass. */
7e8b322a 3809 && cgraph_global_info_ready)
a833faa5 3810 {
dee15844 3811 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
49c8958b 3812 fn, _(cgraph_inline_failed_string (reason)));
3176a0c2 3813 warning (OPT_Winline, "called from here");
a833faa5 3814 }
6de9cd9a 3815 goto egress;
a833faa5 3816 }
ea99e0be 3817 fn = cg_edge->callee->decl;
d4e4baa9 3818
18c6ada9 3819#ifdef ENABLE_CHECKING
1b369fae 3820 if (cg_edge->callee->decl != id->dst_node->decl)
e21aff8a 3821 verify_cgraph_node (cg_edge->callee);
18c6ada9
JH
3822#endif
3823
e21aff8a 3824 /* We will be inlining this callee. */
1d65f45c 3825 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
e21aff8a 3826
f9417da1
RG
3827 /* Update the callers EH personality. */
3828 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3829 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3830 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3831
726a989a 3832 /* Split the block holding the GIMPLE_CALL. */
e21aff8a
SB
3833 e = split_block (bb, stmt);
3834 bb = e->src;
3835 return_block = e->dest;
3836 remove_edge (e);
3837
4f6c2131
EB
3838 /* split_block splits after the statement; work around this by
3839 moving the call into the second block manually. Not pretty,
3840 but seems easier than doing the CFG manipulation by hand
726a989a
RB
3841 when the GIMPLE_CALL is in the last statement of BB. */
3842 stmt_gsi = gsi_last_bb (bb);
3843 gsi_remove (&stmt_gsi, false);
4f6c2131 3844
726a989a 3845 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4f6c2131
EB
3846 been the source of abnormal edges. In this case, schedule
3847 the removal of dead abnormal edges. */
726a989a
RB
3848 gsi = gsi_start_bb (return_block);
3849 if (gsi_end_p (gsi))
e21aff8a 3850 {
726a989a 3851 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4f6c2131 3852 purge_dead_abnormal_edges = true;
e21aff8a 3853 }
4f6c2131
EB
3854 else
3855 {
726a989a 3856 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4f6c2131
EB
3857 purge_dead_abnormal_edges = false;
3858 }
3859
726a989a 3860 stmt_gsi = gsi_start_bb (return_block);
742a37d5 3861
d436bff8
AH
3862 /* Build a block containing code to initialize the arguments, the
3863 actual inline expansion of the body, and a label for the return
3864 statements within the function to jump to. The type of the
3865 statement expression is the return type of the function call. */
e21aff8a
SB
3866 id->block = make_node (BLOCK);
3867 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3e2844cb 3868 BLOCK_SOURCE_LOCATION (id->block) = input_location;
4a283090 3869 prepend_lexical_block (gimple_block (stmt), id->block);
e21aff8a 3870
d4e4baa9
AO
3871 /* Local declarations will be replaced by their equivalents in this
3872 map. */
3873 st = id->decl_map;
6be42dd4 3874 id->decl_map = pointer_map_create ();
b5b8b0ac
AO
3875 dst = id->debug_map;
3876 id->debug_map = NULL;
d4e4baa9 3877
e21aff8a 3878 /* Record the function we are about to inline. */
1b369fae
RH
3879 id->src_fn = fn;
3880 id->src_node = cg_edge->callee;
110cfe1c 3881 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
726a989a 3882 id->gimple_call = stmt;
1b369fae 3883
3c8da8a5
AO
3884 gcc_assert (!id->src_cfun->after_inlining);
3885
045685a9 3886 id->entry_bb = bb;
7299cb99
JH
3887 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3888 {
3889 gimple_stmt_iterator si = gsi_last_bb (bb);
3890 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3891 NOT_TAKEN),
3892 GSI_NEW_STMT);
3893 }
726a989a 3894 initialize_inlined_parameters (id, stmt, fn, bb);
d4e4baa9 3895
ea99e0be 3896 if (DECL_INITIAL (fn))
4a283090 3897 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
acb8f212 3898
d4e4baa9
AO
3899 /* Return statements in the function body will be replaced by jumps
3900 to the RET_LABEL. */
1e128c5f
GB
3901 gcc_assert (DECL_INITIAL (fn));
3902 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
23700f65 3903
726a989a 3904 /* Find the LHS to which the result of this call is assigned. */
110cfe1c 3905 return_slot = NULL;
726a989a 3906 if (gimple_call_lhs (stmt))
81bafd36 3907 {
726a989a 3908 modify_dest = gimple_call_lhs (stmt);
81bafd36
ILT
3909
3910 /* The function which we are inlining might not return a value,
3911 in which case we should issue a warning that the function
3912 does not return a value. In that case the optimizers will
3913 see that the variable to which the value is assigned was not
3914 initialized. We do not want to issue a warning about that
3915 uninitialized variable. */
3916 if (DECL_P (modify_dest))
3917 TREE_NO_WARNING (modify_dest) = 1;
726a989a
RB
3918
3919 if (gimple_call_return_slot_opt_p (stmt))
fa47911c 3920 {
110cfe1c 3921 return_slot = modify_dest;
fa47911c
JM
3922 modify_dest = NULL;
3923 }
81bafd36 3924 }
7740f00d
RH
3925 else
3926 modify_dest = NULL;
3927
1ea193c2
ILT
3928 /* If we are inlining a call to the C++ operator new, we don't want
3929 to use type based alias analysis on the return value. Otherwise
3930 we may get confused if the compiler sees that the inlined new
3931 function returns a pointer which was just deleted. See bug
3932 33407. */
3933 if (DECL_IS_OPERATOR_NEW (fn))
3934 {
3935 return_slot = NULL;
3936 modify_dest = NULL;
3937 }
3938
d4e4baa9 3939 /* Declare the return variable for the function. */
6938f93f 3940 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
1ea193c2 3941
acb8f212 3942 /* Add local vars in this inlined callee to caller. */
c021f10b 3943 add_local_variables (id->src_cfun, cfun, id, true);
acb8f212 3944
0d63a740
JH
3945 if (dump_file && (dump_flags & TDF_DETAILS))
3946 {
3947 fprintf (dump_file, "Inlining ");
b8698a0f 3948 print_generic_expr (dump_file, id->src_fn, 0);
0d63a740 3949 fprintf (dump_file, " to ");
b8698a0f 3950 print_generic_expr (dump_file, id->dst_fn, 0);
0d63a740
JH
3951 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3952 }
3953
eb50f5f4
JH
3954 /* This is it. Duplicate the callee body. Assume callee is
3955 pre-gimplified. Note that we must not alter the caller
3956 function in any way before this point, as this CALL_EXPR may be
3957 a self-referential call; if we're calling ourselves, we need to
3958 duplicate our body before altering anything. */
0d63a740
JH
3959 copy_body (id, bb->count,
3960 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
91382288 3961 bb, return_block, NULL, NULL);
eb50f5f4 3962
d086d311 3963 /* Reset the escaped solution. */
6b8ed145 3964 if (cfun->gimple_df)
d086d311 3965 pt_solution_reset (&cfun->gimple_df->escaped);
6b8ed145 3966
d4e4baa9 3967 /* Clean up. */
b5b8b0ac
AO
3968 if (id->debug_map)
3969 {
3970 pointer_map_destroy (id->debug_map);
3971 id->debug_map = dst;
3972 }
6be42dd4 3973 pointer_map_destroy (id->decl_map);
d4e4baa9
AO
3974 id->decl_map = st;
3975
5006671f
RG
3976 /* Unlink the calls virtual operands before replacing it. */
3977 unlink_stmt_vdef (stmt);
3978
84936f6f 3979 /* If the inlined function returns a result that we care about,
726a989a
RB
3980 substitute the GIMPLE_CALL with an assignment of the return
3981 variable to the LHS of the call. That is, if STMT was
3982 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3983 if (use_retvar && gimple_call_lhs (stmt))
e21aff8a 3984 {
726a989a
RB
3985 gimple old_stmt = stmt;
3986 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3987 gsi_replace (&stmt_gsi, stmt, false);
110cfe1c 3988 if (gimple_in_ssa_p (cfun))
5006671f 3989 mark_symbols_for_renaming (stmt);
726a989a 3990 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
e21aff8a 3991 }
6de9cd9a 3992 else
110cfe1c 3993 {
726a989a
RB
3994 /* Handle the case of inlining a function with no return
3995 statement, which causes the return value to become undefined. */
3996 if (gimple_call_lhs (stmt)
3997 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
110cfe1c 3998 {
726a989a
RB
3999 tree name = gimple_call_lhs (stmt);
4000 tree var = SSA_NAME_VAR (name);
110cfe1c
JH
4001 tree def = gimple_default_def (cfun, var);
4002
110cfe1c
JH
4003 if (def)
4004 {
726a989a
RB
4005 /* If the variable is used undefined, make this name
4006 undefined via a move. */
4007 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4008 gsi_replace (&stmt_gsi, stmt, true);
110cfe1c 4009 }
110cfe1c
JH
4010 else
4011 {
726a989a
RB
4012 /* Otherwise make this variable undefined. */
4013 gsi_remove (&stmt_gsi, true);
110cfe1c 4014 set_default_def (var, name);
726a989a 4015 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
110cfe1c
JH
4016 }
4017 }
4018 else
726a989a 4019 gsi_remove (&stmt_gsi, true);
110cfe1c 4020 }
d4e4baa9 4021
4f6c2131 4022 if (purge_dead_abnormal_edges)
30fd5881
EB
4023 {
4024 gimple_purge_dead_eh_edges (return_block);
4025 gimple_purge_dead_abnormal_call_edges (return_block);
4026 }
84936f6f 4027
e21aff8a
SB
4028 /* If the value of the new expression is ignored, that's OK. We
4029 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4030 the equivalent inlined version either. */
726a989a
RB
4031 if (is_gimple_assign (stmt))
4032 {
4033 gcc_assert (gimple_assign_single_p (stmt)
1a87cf0c 4034 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
726a989a
RB
4035 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4036 }
84936f6f 4037
1eb3331e
DB
4038 /* Output the inlining info for this abstract function, since it has been
4039 inlined. If we don't do this now, we can lose the information about the
4040 variables in the function when the blocks get blown away as soon as we
4041 remove the cgraph node. */
e21aff8a 4042 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
84936f6f 4043
e72fcfe8 4044 /* Update callgraph if needed. */
e21aff8a 4045 cgraph_remove_node (cg_edge->callee);
e72fcfe8 4046
e21aff8a 4047 id->block = NULL_TREE;
e21aff8a 4048 successfully_inlined = TRUE;
742a37d5 4049
6de9cd9a
DN
4050 egress:
4051 input_location = saved_location;
e21aff8a 4052 return successfully_inlined;
d4e4baa9 4053}
6de9cd9a 4054
e21aff8a
SB
4055/* Expand call statements reachable from STMT_P.
4056 We can only have CALL_EXPRs as the "toplevel" tree code or nested
0a35513e 4057 in a MODIFY_EXPR. */
e21aff8a
SB
4058
4059static bool
1b369fae 4060gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
6de9cd9a 4061{
726a989a 4062 gimple_stmt_iterator gsi;
6de9cd9a 4063
726a989a 4064 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 4065 {
726a989a 4066 gimple stmt = gsi_stmt (gsi);
e21aff8a 4067
726a989a
RB
4068 if (is_gimple_call (stmt)
4069 && expand_call_inline (bb, stmt, id))
4070 return true;
6de9cd9a 4071 }
726a989a 4072
e21aff8a 4073 return false;
6de9cd9a
DN
4074}
4075
726a989a 4076
b8a00a4d
JH
4077/* Walk all basic blocks created after FIRST and try to fold every statement
4078 in the STATEMENTS pointer set. */
726a989a 4079
b8a00a4d
JH
4080static void
4081fold_marked_statements (int first, struct pointer_set_t *statements)
4082{
726a989a 4083 for (; first < n_basic_blocks; first++)
b8a00a4d
JH
4084 if (BASIC_BLOCK (first))
4085 {
726a989a
RB
4086 gimple_stmt_iterator gsi;
4087
4088 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4089 !gsi_end_p (gsi);
4090 gsi_next (&gsi))
4091 if (pointer_set_contains (statements, gsi_stmt (gsi)))
9477eb38 4092 {
726a989a 4093 gimple old_stmt = gsi_stmt (gsi);
4b685e14 4094 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
2bafad93 4095
44e10129
MM
4096 if (old_decl && DECL_BUILT_IN (old_decl))
4097 {
4098 /* Folding builtins can create multiple instructions,
4099 we need to look at all of them. */
4100 gimple_stmt_iterator i2 = gsi;
4101 gsi_prev (&i2);
4102 if (fold_stmt (&gsi))
4103 {
4104 gimple new_stmt;
a9d24544
JJ
4105 /* If a builtin at the end of a bb folded into nothing,
4106 the following loop won't work. */
4107 if (gsi_end_p (gsi))
4108 {
4109 cgraph_update_edges_for_call_stmt (old_stmt,
4110 old_decl, NULL);
4111 break;
4112 }
44e10129
MM
4113 if (gsi_end_p (i2))
4114 i2 = gsi_start_bb (BASIC_BLOCK (first));
4115 else
4116 gsi_next (&i2);
4117 while (1)
4118 {
4119 new_stmt = gsi_stmt (i2);
4120 update_stmt (new_stmt);
4121 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4122 new_stmt);
4123
4124 if (new_stmt == gsi_stmt (gsi))
4125 {
4126 /* It is okay to check only for the very last
4127 of these statements. If it is a throwing
4128 statement nothing will change. If it isn't
4129 this can remove EH edges. If that weren't
4130 correct then because some intermediate stmts
4131 throw, but not the last one. That would mean
4132 we'd have to split the block, which we can't
4133 here and we'd loose anyway. And as builtins
4134 probably never throw, this all
4135 is mood anyway. */
4136 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4137 new_stmt))
4138 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4139 break;
4140 }
4141 gsi_next (&i2);
4142 }
4143 }
4144 }
4145 else if (fold_stmt (&gsi))
9477eb38 4146 {
726a989a
RB
4147 /* Re-read the statement from GSI as fold_stmt() may
4148 have changed it. */
4149 gimple new_stmt = gsi_stmt (gsi);
4150 update_stmt (new_stmt);
4151
4b685e14
JH
4152 if (is_gimple_call (old_stmt)
4153 || is_gimple_call (new_stmt))
44e10129
MM
4154 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4155 new_stmt);
726a989a
RB
4156
4157 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4158 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
9477eb38
JH
4159 }
4160 }
b8a00a4d
JH
4161 }
4162}
4163
1084e689
JH
4164/* Return true if BB has at least one abnormal outgoing edge. */
4165
4166static inline bool
4167has_abnormal_outgoing_edge_p (basic_block bb)
4168{
4169 edge e;
4170 edge_iterator ei;
4171
4172 FOR_EACH_EDGE (e, ei, bb->succs)
4173 if (e->flags & EDGE_ABNORMAL)
4174 return true;
4175
4176 return false;
4177}
4178
d4e4baa9
AO
4179/* Expand calls to inline functions in the body of FN. */
4180
873aa8f5 4181unsigned int
46c5ad27 4182optimize_inline_calls (tree fn)
d4e4baa9 4183{
1b369fae 4184 copy_body_data id;
e21aff8a 4185 basic_block bb;
b8a00a4d 4186 int last = n_basic_blocks;
d406b663 4187 struct gimplify_ctx gctx;
5d7b099c 4188 bool inlined_p = false;
d406b663 4189
c5b6f18e
MM
4190 /* There is no point in performing inlining if errors have already
4191 occurred -- and we might crash if we try to inline invalid
4192 code. */
1da2ed5f 4193 if (seen_error ())
873aa8f5 4194 return 0;
c5b6f18e 4195
d4e4baa9
AO
4196 /* Clear out ID. */
4197 memset (&id, 0, sizeof (id));
4198
581985d7 4199 id.src_node = id.dst_node = cgraph_get_node (fn);
322dd859 4200 gcc_assert (id.dst_node->analyzed);
1b369fae 4201 id.dst_fn = fn;
d4e4baa9 4202 /* Or any functions that aren't finished yet. */
d4e4baa9 4203 if (current_function_decl)
0f900dfa 4204 id.dst_fn = current_function_decl;
1b369fae
RH
4205
4206 id.copy_decl = copy_decl_maybe_to_var;
4207 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4208 id.transform_new_cfg = false;
4209 id.transform_return_to_modify = true;
9ff420f1 4210 id.transform_lang_insert_block = NULL;
b8a00a4d 4211 id.statements_to_fold = pointer_set_create ();
1b369fae 4212
d406b663 4213 push_gimplify_context (&gctx);
d4e4baa9 4214
672987e8
ZD
4215 /* We make no attempts to keep dominance info up-to-date. */
4216 free_dominance_info (CDI_DOMINATORS);
4217 free_dominance_info (CDI_POST_DOMINATORS);
4218
726a989a
RB
4219 /* Register specific gimple functions. */
4220 gimple_register_cfg_hooks ();
4221
e21aff8a
SB
4222 /* Reach the trees by walking over the CFG, and note the
4223 enclosing basic-blocks in the call edges. */
4224 /* We walk the blocks going forward, because inlined function bodies
4225 will split id->current_basic_block, and the new blocks will
4226 follow it; we'll trudge through them, processing their CALL_EXPRs
4227 along the way. */
4228 FOR_EACH_BB (bb)
5d7b099c 4229 inlined_p |= gimple_expand_calls_inline (bb, &id);
d4e4baa9 4230
e21aff8a 4231 pop_gimplify_context (NULL);
6de9cd9a 4232
18c6ada9
JH
4233#ifdef ENABLE_CHECKING
4234 {
4235 struct cgraph_edge *e;
4236
1b369fae 4237 verify_cgraph_node (id.dst_node);
18c6ada9
JH
4238
4239 /* Double check that we inlined everything we are supposed to inline. */
1b369fae 4240 for (e = id.dst_node->callees; e; e = e->next_callee)
1e128c5f 4241 gcc_assert (e->inline_failed);
18c6ada9
JH
4242 }
4243#endif
b8698a0f 4244
5d7b099c 4245 /* Fold queued statements. */
a9eafe81
AP
4246 fold_marked_statements (last, id.statements_to_fold);
4247 pointer_set_destroy (id.statements_to_fold);
b8698a0f 4248
b5b8b0ac
AO
4249 gcc_assert (!id.debug_stmts);
4250
5d7b099c
RG
4251 /* If we didn't inline into the function there is nothing to do. */
4252 if (!inlined_p)
4253 return 0;
4254
a9eafe81
AP
4255 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4256 number_blocks (fn);
b8a00a4d 4257
078c3644
JH
4258 delete_unreachable_blocks_update_callgraph (&id);
4259#ifdef ENABLE_CHECKING
4260 verify_cgraph_node (id.dst_node);
4261#endif
726a989a 4262
110cfe1c
JH
4263 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4264 not possible yet - the IPA passes might make various functions to not
4265 throw and they don't care to proactively update local EH info. This is
4266 done later in fixup_cfg pass that also execute the verification. */
726a989a
RB
4267 return (TODO_update_ssa
4268 | TODO_cleanup_cfg
45a80bb9 4269 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5d7b099c 4270 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
45a80bb9 4271 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
d4e4baa9
AO
4272}
4273
d4e4baa9
AO
4274/* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4275
4276tree
46c5ad27 4277copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
d4e4baa9
AO
4278{
4279 enum tree_code code = TREE_CODE (*tp);
07beea0d 4280 enum tree_code_class cl = TREE_CODE_CLASS (code);
d4e4baa9
AO
4281
4282 /* We make copies of most nodes. */
07beea0d 4283 if (IS_EXPR_CODE_CLASS (cl)
d4e4baa9
AO
4284 || code == TREE_LIST
4285 || code == TREE_VEC
8843c120
DN
4286 || code == TYPE_DECL
4287 || code == OMP_CLAUSE)
d4e4baa9
AO
4288 {
4289 /* Because the chain gets clobbered when we make a copy, we save it
4290 here. */
82d6e6fc 4291 tree chain = NULL_TREE, new_tree;
07beea0d 4292
81f653d6
NF
4293 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4294 chain = TREE_CHAIN (*tp);
d4e4baa9
AO
4295
4296 /* Copy the node. */
82d6e6fc 4297 new_tree = copy_node (*tp);
6de9cd9a
DN
4298
4299 /* Propagate mudflap marked-ness. */
4300 if (flag_mudflap && mf_marked_p (*tp))
82d6e6fc 4301 mf_mark (new_tree);
6de9cd9a 4302
82d6e6fc 4303 *tp = new_tree;
d4e4baa9
AO
4304
4305 /* Now, restore the chain, if appropriate. That will cause
4306 walk_tree to walk into the chain as well. */
50674e96
DN
4307 if (code == PARM_DECL
4308 || code == TREE_LIST
aaf46ef9 4309 || code == OMP_CLAUSE)
d4e4baa9
AO
4310 TREE_CHAIN (*tp) = chain;
4311
4312 /* For now, we don't update BLOCKs when we make copies. So, we
6de9cd9a
DN
4313 have to nullify all BIND_EXPRs. */
4314 if (TREE_CODE (*tp) == BIND_EXPR)
4315 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
d4e4baa9 4316 }
4038c495
GB
4317 else if (code == CONSTRUCTOR)
4318 {
4319 /* CONSTRUCTOR nodes need special handling because
4320 we need to duplicate the vector of elements. */
82d6e6fc 4321 tree new_tree;
4038c495 4322
82d6e6fc 4323 new_tree = copy_node (*tp);
4038c495
GB
4324
4325 /* Propagate mudflap marked-ness. */
4326 if (flag_mudflap && mf_marked_p (*tp))
82d6e6fc 4327 mf_mark (new_tree);
9f63daea 4328
82d6e6fc 4329 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4038c495 4330 CONSTRUCTOR_ELTS (*tp));
82d6e6fc 4331 *tp = new_tree;
4038c495 4332 }
3533b943 4333 else if (code == STATEMENT_LIST)
deb5046b
JM
4334 /* We used to just abort on STATEMENT_LIST, but we can run into them
4335 with statement-expressions (c++/40975). */
4336 copy_statement_list (tp);
6615c446 4337 else if (TREE_CODE_CLASS (code) == tcc_type)
d4e4baa9 4338 *walk_subtrees = 0;
6615c446 4339 else if (TREE_CODE_CLASS (code) == tcc_declaration)
6de9cd9a 4340 *walk_subtrees = 0;
a396f8ae
GK
4341 else if (TREE_CODE_CLASS (code) == tcc_constant)
4342 *walk_subtrees = 0;
d4e4baa9
AO
4343 return NULL_TREE;
4344}
4345
4346/* The SAVE_EXPR pointed to by TP is being copied. If ST contains
aa4a53af 4347 information indicating to what new SAVE_EXPR this one should be mapped,
e21aff8a
SB
4348 use that one. Otherwise, create a new node and enter it in ST. FN is
4349 the function into which the copy will be placed. */
d4e4baa9 4350
892c7e1e 4351static void
82c82743 4352remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
d4e4baa9 4353{
6be42dd4
RG
4354 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4355 tree *n;
5e20bdd7 4356 tree t;
d4e4baa9
AO
4357
4358 /* See if we already encountered this SAVE_EXPR. */
6be42dd4 4359 n = (tree *) pointer_map_contains (st, *tp);
d92b4486 4360
d4e4baa9
AO
4361 /* If we didn't already remap this SAVE_EXPR, do so now. */
4362 if (!n)
4363 {
5e20bdd7 4364 t = copy_node (*tp);
d4e4baa9 4365
d4e4baa9 4366 /* Remember this SAVE_EXPR. */
6be42dd4 4367 *pointer_map_insert (st, *tp) = t;
350ebd54 4368 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
6be42dd4 4369 *pointer_map_insert (st, t) = t;
d4e4baa9
AO
4370 }
4371 else
5e20bdd7
JZ
4372 {
4373 /* We've already walked into this SAVE_EXPR; don't do it again. */
4374 *walk_subtrees = 0;
6be42dd4 4375 t = *n;
5e20bdd7 4376 }
d4e4baa9
AO
4377
4378 /* Replace this SAVE_EXPR with the copy. */
5e20bdd7 4379 *tp = t;
d4e4baa9 4380}
d436bff8 4381
aa4a53af
RK
4382/* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4383 copies the declaration and enters it in the splay_tree in DATA (which is
1b369fae 4384 really an `copy_body_data *'). */
6de9cd9a
DN
4385
4386static tree
4387mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4388 void *data)
4389{
1b369fae 4390 copy_body_data *id = (copy_body_data *) data;
6de9cd9a
DN
4391
4392 /* Don't walk into types. */
350fae66
RK
4393 if (TYPE_P (*tp))
4394 *walk_subtrees = 0;
6de9cd9a 4395
350fae66 4396 else if (TREE_CODE (*tp) == LABEL_EXPR)
6de9cd9a 4397 {
350fae66 4398 tree decl = TREE_OPERAND (*tp, 0);
6de9cd9a 4399
350fae66 4400 /* Copy the decl and remember the copy. */
1b369fae 4401 insert_decl_map (id, decl, id->copy_decl (decl, id));
6de9cd9a
DN
4402 }
4403
4404 return NULL_TREE;
4405}
4406
19114537
EC
4407/* Perform any modifications to EXPR required when it is unsaved. Does
4408 not recurse into EXPR's subtrees. */
4409
4410static void
4411unsave_expr_1 (tree expr)
4412{
4413 switch (TREE_CODE (expr))
4414 {
4415 case TARGET_EXPR:
4416 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4417 It's OK for this to happen if it was part of a subtree that
4418 isn't immediately expanded, such as operand 2 of another
4419 TARGET_EXPR. */
4420 if (TREE_OPERAND (expr, 1))
4421 break;
4422
4423 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4424 TREE_OPERAND (expr, 3) = NULL_TREE;
4425 break;
4426
4427 default:
4428 break;
4429 }
4430}
4431
6de9cd9a
DN
4432/* Called via walk_tree when an expression is unsaved. Using the
4433 splay_tree pointed to by ST (which is really a `splay_tree'),
4434 remaps all local declarations to appropriate replacements. */
d436bff8
AH
4435
4436static tree
6de9cd9a 4437unsave_r (tree *tp, int *walk_subtrees, void *data)
d436bff8 4438{
1b369fae 4439 copy_body_data *id = (copy_body_data *) data;
6be42dd4
RG
4440 struct pointer_map_t *st = id->decl_map;
4441 tree *n;
6de9cd9a
DN
4442
4443 /* Only a local declaration (variable or label). */
4444 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4445 || TREE_CODE (*tp) == LABEL_DECL)
4446 {
4447 /* Lookup the declaration. */
6be42dd4 4448 n = (tree *) pointer_map_contains (st, *tp);
9f63daea 4449
6de9cd9a
DN
4450 /* If it's there, remap it. */
4451 if (n)
6be42dd4 4452 *tp = *n;
6de9cd9a 4453 }
aa4a53af 4454
6de9cd9a 4455 else if (TREE_CODE (*tp) == STATEMENT_LIST)
726a989a 4456 gcc_unreachable ();
6de9cd9a
DN
4457 else if (TREE_CODE (*tp) == BIND_EXPR)
4458 copy_bind_expr (tp, walk_subtrees, id);
a406865a
RG
4459 else if (TREE_CODE (*tp) == SAVE_EXPR
4460 || TREE_CODE (*tp) == TARGET_EXPR)
82c82743 4461 remap_save_expr (tp, st, walk_subtrees);
d436bff8 4462 else
6de9cd9a
DN
4463 {
4464 copy_tree_r (tp, walk_subtrees, NULL);
4465
4466 /* Do whatever unsaving is required. */
4467 unsave_expr_1 (*tp);
4468 }
4469
4470 /* Keep iterating. */
4471 return NULL_TREE;
d436bff8
AH
4472}
4473
19114537
EC
4474/* Copies everything in EXPR and replaces variables, labels
4475 and SAVE_EXPRs local to EXPR. */
6de9cd9a
DN
4476
4477tree
19114537 4478unsave_expr_now (tree expr)
6de9cd9a 4479{
1b369fae 4480 copy_body_data id;
6de9cd9a
DN
4481
4482 /* There's nothing to do for NULL_TREE. */
4483 if (expr == 0)
4484 return expr;
4485
4486 /* Set up ID. */
4487 memset (&id, 0, sizeof (id));
1b369fae
RH
4488 id.src_fn = current_function_decl;
4489 id.dst_fn = current_function_decl;
6be42dd4 4490 id.decl_map = pointer_map_create ();
b5b8b0ac 4491 id.debug_map = NULL;
6de9cd9a 4492
1b369fae
RH
4493 id.copy_decl = copy_decl_no_change;
4494 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4495 id.transform_new_cfg = false;
4496 id.transform_return_to_modify = false;
9ff420f1 4497 id.transform_lang_insert_block = NULL;
1b369fae 4498
6de9cd9a
DN
4499 /* Walk the tree once to find local labels. */
4500 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4501
4502 /* Walk the tree again, copying, remapping, and unsaving. */
4503 walk_tree (&expr, unsave_r, &id, NULL);
4504
4505 /* Clean up. */
6be42dd4 4506 pointer_map_destroy (id.decl_map);
b5b8b0ac
AO
4507 if (id.debug_map)
4508 pointer_map_destroy (id.debug_map);
6de9cd9a
DN
4509
4510 return expr;
4511}
4512
726a989a
RB
4513/* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4514 label, copies the declaration and enters it in the splay_tree in DATA (which
4515 is really a 'copy_body_data *'. */
4516
4517static tree
4518mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4519 bool *handled_ops_p ATTRIBUTE_UNUSED,
4520 struct walk_stmt_info *wi)
4521{
4522 copy_body_data *id = (copy_body_data *) wi->info;
4523 gimple stmt = gsi_stmt (*gsip);
4524
4525 if (gimple_code (stmt) == GIMPLE_LABEL)
4526 {
4527 tree decl = gimple_label_label (stmt);
4528
4529 /* Copy the decl and remember the copy. */
4530 insert_decl_map (id, decl, id->copy_decl (decl, id));
4531 }
4532
4533 return NULL_TREE;
4534}
4535
4536
4537/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4538 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4539 remaps all local declarations to appropriate replacements in gimple
4540 operands. */
4541
4542static tree
4543replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4544{
4545 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4546 copy_body_data *id = (copy_body_data *) wi->info;
4547 struct pointer_map_t *st = id->decl_map;
4548 tree *n;
4549 tree expr = *tp;
4550
4551 /* Only a local declaration (variable or label). */
4552 if ((TREE_CODE (expr) == VAR_DECL
4553 && !TREE_STATIC (expr))
4554 || TREE_CODE (expr) == LABEL_DECL)
4555 {
4556 /* Lookup the declaration. */
4557 n = (tree *) pointer_map_contains (st, expr);
4558
4559 /* If it's there, remap it. */
4560 if (n)
4561 *tp = *n;
4562 *walk_subtrees = 0;
4563 }
4564 else if (TREE_CODE (expr) == STATEMENT_LIST
4565 || TREE_CODE (expr) == BIND_EXPR
4566 || TREE_CODE (expr) == SAVE_EXPR)
4567 gcc_unreachable ();
4568 else if (TREE_CODE (expr) == TARGET_EXPR)
4569 {
4570 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4571 It's OK for this to happen if it was part of a subtree that
4572 isn't immediately expanded, such as operand 2 of another
4573 TARGET_EXPR. */
4574 if (!TREE_OPERAND (expr, 1))
4575 {
4576 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4577 TREE_OPERAND (expr, 3) = NULL_TREE;
4578 }
4579 }
4580
4581 /* Keep iterating. */
4582 return NULL_TREE;
4583}
4584
4585
4586/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4587 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4588 remaps all local declarations to appropriate replacements in gimple
4589 statements. */
4590
4591static tree
4592replace_locals_stmt (gimple_stmt_iterator *gsip,
4593 bool *handled_ops_p ATTRIBUTE_UNUSED,
4594 struct walk_stmt_info *wi)
4595{
4596 copy_body_data *id = (copy_body_data *) wi->info;
4597 gimple stmt = gsi_stmt (*gsip);
4598
4599 if (gimple_code (stmt) == GIMPLE_BIND)
4600 {
4601 tree block = gimple_bind_block (stmt);
4602
4603 if (block)
4604 {
4605 remap_block (&block, id);
4606 gimple_bind_set_block (stmt, block);
4607 }
4608
4609 /* This will remap a lot of the same decls again, but this should be
4610 harmless. */
4611 if (gimple_bind_vars (stmt))
526d73ab 4612 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
726a989a
RB
4613 }
4614
4615 /* Keep iterating. */
4616 return NULL_TREE;
4617}
4618
4619
4620/* Copies everything in SEQ and replaces variables and labels local to
4621 current_function_decl. */
4622
4623gimple_seq
4624copy_gimple_seq_and_replace_locals (gimple_seq seq)
4625{
4626 copy_body_data id;
4627 struct walk_stmt_info wi;
4628 struct pointer_set_t *visited;
4629 gimple_seq copy;
4630
4631 /* There's nothing to do for NULL_TREE. */
4632 if (seq == NULL)
4633 return seq;
4634
4635 /* Set up ID. */
4636 memset (&id, 0, sizeof (id));
4637 id.src_fn = current_function_decl;
4638 id.dst_fn = current_function_decl;
4639 id.decl_map = pointer_map_create ();
b5b8b0ac 4640 id.debug_map = NULL;
726a989a
RB
4641
4642 id.copy_decl = copy_decl_no_change;
4643 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4644 id.transform_new_cfg = false;
4645 id.transform_return_to_modify = false;
4646 id.transform_lang_insert_block = NULL;
4647
4648 /* Walk the tree once to find local labels. */
4649 memset (&wi, 0, sizeof (wi));
4650 visited = pointer_set_create ();
4651 wi.info = &id;
4652 wi.pset = visited;
4653 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4654 pointer_set_destroy (visited);
4655
4656 copy = gimple_seq_copy (seq);
4657
4658 /* Walk the copy, remapping decls. */
4659 memset (&wi, 0, sizeof (wi));
4660 wi.info = &id;
4661 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4662
4663 /* Clean up. */
4664 pointer_map_destroy (id.decl_map);
b5b8b0ac
AO
4665 if (id.debug_map)
4666 pointer_map_destroy (id.debug_map);
726a989a
RB
4667
4668 return copy;
4669}
4670
4671
6de9cd9a 4672/* Allow someone to determine if SEARCH is a child of TOP from gdb. */
aa4a53af 4673
6de9cd9a
DN
4674static tree
4675debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4676{
4677 if (*tp == data)
4678 return (tree) data;
4679 else
4680 return NULL;
4681}
4682
24e47c76 4683DEBUG_FUNCTION bool
6de9cd9a
DN
4684debug_find_tree (tree top, tree search)
4685{
4686 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4687}
4688
e21aff8a 4689
6de9cd9a
DN
4690/* Declare the variables created by the inliner. Add all the variables in
4691 VARS to BIND_EXPR. */
4692
4693static void
e21aff8a 4694declare_inline_vars (tree block, tree vars)
6de9cd9a 4695{
84936f6f 4696 tree t;
910ad8de 4697 for (t = vars; t; t = DECL_CHAIN (t))
9659ce8b
JH
4698 {
4699 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4700 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
c021f10b 4701 add_local_decl (cfun, t);
9659ce8b 4702 }
6de9cd9a 4703
e21aff8a
SB
4704 if (block)
4705 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4706}
4707
19734dd8 4708/* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
1b369fae
RH
4709 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4710 VAR_DECL translation. */
19734dd8 4711
1b369fae
RH
4712static tree
4713copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
19734dd8 4714{
19734dd8
RL
4715 /* Don't generate debug information for the copy if we wouldn't have
4716 generated it for the copy either. */
4717 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4718 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4719
4720 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
b8698a0f 4721 declaration inspired this copy. */
19734dd8
RL
4722 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4723
4724 /* The new variable/label has no RTL, yet. */
68a976f2
RL
4725 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4726 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
2eb79bbb 4727 SET_DECL_RTL (copy, 0);
b8698a0f 4728
19734dd8
RL
4729 /* These args would always appear unused, if not for this. */
4730 TREE_USED (copy) = 1;
4731
4732 /* Set the context for the new declaration. */
4733 if (!DECL_CONTEXT (decl))
4734 /* Globals stay global. */
4735 ;
1b369fae 4736 else if (DECL_CONTEXT (decl) != id->src_fn)
19734dd8
RL
4737 /* Things that weren't in the scope of the function we're inlining
4738 from aren't in the scope we're inlining to, either. */
4739 ;
4740 else if (TREE_STATIC (decl))
4741 /* Function-scoped static variables should stay in the original
4742 function. */
4743 ;
4744 else
4745 /* Ordinary automatic local variables are now in the scope of the
4746 new function. */
1b369fae 4747 DECL_CONTEXT (copy) = id->dst_fn;
19734dd8 4748
bb7e6d55
AO
4749 if (TREE_CODE (decl) == VAR_DECL
4750 /* C++ clones functions during parsing, before
4751 referenced_vars. */
4752 && gimple_referenced_vars (DECL_STRUCT_FUNCTION (id->src_fn))
4753 && referenced_var_lookup (DECL_STRUCT_FUNCTION (id->src_fn),
4754 DECL_UID (decl)))
4755 add_referenced_var (copy);
4756
19734dd8
RL
4757 return copy;
4758}
4759
1b369fae
RH
4760static tree
4761copy_decl_to_var (tree decl, copy_body_data *id)
4762{
4763 tree copy, type;
4764
4765 gcc_assert (TREE_CODE (decl) == PARM_DECL
4766 || TREE_CODE (decl) == RESULT_DECL);
4767
4768 type = TREE_TYPE (decl);
4769
c2255bc4
AH
4770 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4771 VAR_DECL, DECL_NAME (decl), type);
25a6a873
RG
4772 if (DECL_PT_UID_SET_P (decl))
4773 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
1b369fae
RH
4774 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4775 TREE_READONLY (copy) = TREE_READONLY (decl);
4776 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
0890b981 4777 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
1b369fae
RH
4778
4779 return copy_decl_for_dup_finish (id, decl, copy);
4780}
4781
c08cd4c1
JM
4782/* Like copy_decl_to_var, but create a return slot object instead of a
4783 pointer variable for return by invisible reference. */
4784
4785static tree
4786copy_result_decl_to_var (tree decl, copy_body_data *id)
4787{
4788 tree copy, type;
4789
4790 gcc_assert (TREE_CODE (decl) == PARM_DECL
4791 || TREE_CODE (decl) == RESULT_DECL);
4792
4793 type = TREE_TYPE (decl);
4794 if (DECL_BY_REFERENCE (decl))
4795 type = TREE_TYPE (type);
4796
c2255bc4
AH
4797 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4798 VAR_DECL, DECL_NAME (decl), type);
25a6a873
RG
4799 if (DECL_PT_UID_SET_P (decl))
4800 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
c08cd4c1
JM
4801 TREE_READONLY (copy) = TREE_READONLY (decl);
4802 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4803 if (!DECL_BY_REFERENCE (decl))
4804 {
4805 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
0890b981 4806 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
c08cd4c1
JM
4807 }
4808
4809 return copy_decl_for_dup_finish (id, decl, copy);
4810}
4811
9ff420f1 4812tree
1b369fae
RH
4813copy_decl_no_change (tree decl, copy_body_data *id)
4814{
4815 tree copy;
4816
4817 copy = copy_node (decl);
4818
4819 /* The COPY is not abstract; it will be generated in DST_FN. */
4820 DECL_ABSTRACT (copy) = 0;
4821 lang_hooks.dup_lang_specific_decl (copy);
4822
4823 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4824 been taken; it's for internal bookkeeping in expand_goto_internal. */
4825 if (TREE_CODE (copy) == LABEL_DECL)
4826 {
4827 TREE_ADDRESSABLE (copy) = 0;
4828 LABEL_DECL_UID (copy) = -1;
4829 }
4830
4831 return copy_decl_for_dup_finish (id, decl, copy);
4832}
4833
4834static tree
4835copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4836{
4837 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4838 return copy_decl_to_var (decl, id);
4839 else
4840 return copy_decl_no_change (decl, id);
4841}
4842
19734dd8
RL
4843/* Return a copy of the function's argument tree. */
4844static tree
c6f7cfc1
JH
4845copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4846 bitmap args_to_skip, tree *vars)
19734dd8 4847{
c6f7cfc1
JH
4848 tree arg, *parg;
4849 tree new_parm = NULL;
4850 int i = 0;
19734dd8 4851
c6f7cfc1
JH
4852 parg = &new_parm;
4853
910ad8de 4854 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
c6f7cfc1
JH
4855 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4856 {
4857 tree new_tree = remap_decl (arg, id);
d7da5cc8
MJ
4858 if (TREE_CODE (new_tree) != PARM_DECL)
4859 new_tree = id->copy_decl (arg, id);
c6f7cfc1
JH
4860 lang_hooks.dup_lang_specific_decl (new_tree);
4861 *parg = new_tree;
910ad8de 4862 parg = &DECL_CHAIN (new_tree);
c6f7cfc1 4863 }
eb50f5f4 4864 else if (!pointer_map_contains (id->decl_map, arg))
c6f7cfc1
JH
4865 {
4866 /* Make an equivalent VAR_DECL. If the argument was used
4867 as temporary variable later in function, the uses will be
4868 replaced by local variable. */
4869 tree var = copy_decl_to_var (arg, id);
c6f7cfc1
JH
4870 add_referenced_var (var);
4871 insert_decl_map (id, arg, var);
4872 /* Declare this new variable. */
910ad8de 4873 DECL_CHAIN (var) = *vars;
c6f7cfc1
JH
4874 *vars = var;
4875 }
4876 return new_parm;
19734dd8
RL
4877}
4878
4879/* Return a copy of the function's static chain. */
4880static tree
1b369fae 4881copy_static_chain (tree static_chain, copy_body_data * id)
19734dd8
RL
4882{
4883 tree *chain_copy, *pvar;
4884
4885 chain_copy = &static_chain;
910ad8de 4886 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
19734dd8 4887 {
82d6e6fc
KG
4888 tree new_tree = remap_decl (*pvar, id);
4889 lang_hooks.dup_lang_specific_decl (new_tree);
910ad8de 4890 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
82d6e6fc 4891 *pvar = new_tree;
19734dd8
RL
4892 }
4893 return static_chain;
4894}
4895
4896/* Return true if the function is allowed to be versioned.
4897 This is a guard for the versioning functionality. */
27dbd3ac 4898
19734dd8
RL
4899bool
4900tree_versionable_function_p (tree fndecl)
4901{
86631ea3
MJ
4902 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4903 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
19734dd8
RL
4904}
4905
9187e02d
JH
4906/* Delete all unreachable basic blocks and update callgraph.
4907 Doing so is somewhat nontrivial because we need to update all clones and
4908 remove inline function that become unreachable. */
9f5e9983 4909
9187e02d
JH
4910static bool
4911delete_unreachable_blocks_update_callgraph (copy_body_data *id)
9f5e9983 4912{
9187e02d
JH
4913 bool changed = false;
4914 basic_block b, next_bb;
4915
4916 find_unreachable_blocks ();
4917
4918 /* Delete all unreachable basic blocks. */
4919
4920 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4921 {
4922 next_bb = b->next_bb;
4923
4924 if (!(b->flags & BB_REACHABLE))
4925 {
4926 gimple_stmt_iterator bsi;
4927
4928 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4929 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4930 {
4931 struct cgraph_edge *e;
4932 struct cgraph_node *node;
4933
4934 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4935 {
4936 if (!e->inline_failed)
4937 cgraph_remove_node_and_inline_clones (e->callee);
4938 else
4939 cgraph_remove_edge (e);
4940 }
4941 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4942 && id->dst_node->clones)
4943 for (node = id->dst_node->clones; node != id->dst_node;)
4944 {
4945 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4946 {
4947 if (!e->inline_failed)
4948 cgraph_remove_node_and_inline_clones (e->callee);
4949 else
4950 cgraph_remove_edge (e);
4951 }
b8698a0f 4952
9187e02d
JH
4953 if (node->clones)
4954 node = node->clones;
4955 else if (node->next_sibling_clone)
4956 node = node->next_sibling_clone;
4957 else
4958 {
4959 while (node != id->dst_node && !node->next_sibling_clone)
4960 node = node->clone_of;
4961 if (node != id->dst_node)
4962 node = node->next_sibling_clone;
4963 }
4964 }
4965 }
4966 delete_basic_block (b);
4967 changed = true;
4968 }
4969 }
4970
9187e02d 4971 return changed;
9f5e9983
JJ
4972}
4973
08ad1d6d
JH
4974/* Update clone info after duplication. */
4975
4976static void
4977update_clone_info (copy_body_data * id)
4978{
4979 struct cgraph_node *node;
4980 if (!id->dst_node->clones)
4981 return;
4982 for (node = id->dst_node->clones; node != id->dst_node;)
4983 {
4984 /* First update replace maps to match the new body. */
4985 if (node->clone.tree_map)
4986 {
4987 unsigned int i;
4988 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4989 {
4990 struct ipa_replace_map *replace_info;
4991 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4992 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4993 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4994 }
4995 }
4996 if (node->clones)
4997 node = node->clones;
4998 else if (node->next_sibling_clone)
4999 node = node->next_sibling_clone;
5000 else
5001 {
5002 while (node != id->dst_node && !node->next_sibling_clone)
5003 node = node->clone_of;
5004 if (node != id->dst_node)
5005 node = node->next_sibling_clone;
5006 }
5007 }
5008}
5009
19734dd8
RL
5010/* Create a copy of a function's tree.
5011 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5012 of the original function and the new copied function
b8698a0f
L
5013 respectively. In case we want to replace a DECL
5014 tree with another tree while duplicating the function's
5015 body, TREE_MAP represents the mapping between these
ea99e0be 5016 trees. If UPDATE_CLONES is set, the call_stmt fields
91382288
JH
5017 of edges of clones of the function will be updated.
5018
5019 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5020 from new version.
5021 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5022 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5023*/
19734dd8 5024void
27dbd3ac
RH
5025tree_function_versioning (tree old_decl, tree new_decl,
5026 VEC(ipa_replace_map_p,gc)* tree_map,
91382288
JH
5027 bool update_clones, bitmap args_to_skip,
5028 bitmap blocks_to_copy, basic_block new_entry)
19734dd8
RL
5029{
5030 struct cgraph_node *old_version_node;
5031 struct cgraph_node *new_version_node;
1b369fae 5032 copy_body_data id;
110cfe1c 5033 tree p;
19734dd8
RL
5034 unsigned i;
5035 struct ipa_replace_map *replace_info;
b5b8b0ac 5036 basic_block old_entry_block, bb;
0f1961a2
JH
5037 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
5038
873aa8f5 5039 tree old_current_function_decl = current_function_decl;
0f1961a2 5040 tree vars = NULL_TREE;
19734dd8
RL
5041
5042 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5043 && TREE_CODE (new_decl) == FUNCTION_DECL);
5044 DECL_POSSIBLY_INLINED (old_decl) = 1;
5045
fe660d7b
MJ
5046 old_version_node = cgraph_get_node (old_decl);
5047 gcc_checking_assert (old_version_node);
5048 new_version_node = cgraph_get_node (new_decl);
5049 gcc_checking_assert (new_version_node);
19734dd8 5050
ddb555ed
JJ
5051 /* Copy over debug args. */
5052 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5053 {
5054 VEC(tree, gc) **new_debug_args, **old_debug_args;
5055 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5056 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5057 old_debug_args = decl_debug_args_lookup (old_decl);
5058 if (old_debug_args)
5059 {
5060 new_debug_args = decl_debug_args_insert (new_decl);
5061 *new_debug_args = VEC_copy (tree, gc, *old_debug_args);
5062 }
5063 }
5064
a3aadcc5
JH
5065 /* Output the inlining info for this abstract function, since it has been
5066 inlined. If we don't do this now, we can lose the information about the
5067 variables in the function when the blocks get blown away as soon as we
5068 remove the cgraph node. */
5069 (*debug_hooks->outlining_inline_function) (old_decl);
5070
19734dd8
RL
5071 DECL_ARTIFICIAL (new_decl) = 1;
5072 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
f9417da1 5073 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
19734dd8 5074
3d283195
JH
5075 /* Prepare the data structures for the tree copy. */
5076 memset (&id, 0, sizeof (id));
5077
19734dd8 5078 /* Generate a new name for the new version. */
9187e02d 5079 id.statements_to_fold = pointer_set_create ();
b5b8b0ac 5080
6be42dd4 5081 id.decl_map = pointer_map_create ();
b5b8b0ac 5082 id.debug_map = NULL;
1b369fae
RH
5083 id.src_fn = old_decl;
5084 id.dst_fn = new_decl;
5085 id.src_node = old_version_node;
5086 id.dst_node = new_version_node;
5087 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
0e3776db
JH
5088 if (id.src_node->ipa_transforms_to_apply)
5089 {
5090 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
5091 unsigned int i;
5092
5093 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
5094 id.src_node->ipa_transforms_to_apply);
5095 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
5096 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
5097 VEC_index (ipa_opt_pass,
5098 old_transforms_to_apply,
5099 i));
5100 }
b8698a0f 5101
1b369fae
RH
5102 id.copy_decl = copy_decl_no_change;
5103 id.transform_call_graph_edges
5104 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5105 id.transform_new_cfg = true;
5106 id.transform_return_to_modify = false;
9ff420f1 5107 id.transform_lang_insert_block = NULL;
1b369fae 5108
19734dd8 5109 current_function_decl = new_decl;
110cfe1c
JH
5110 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5111 (DECL_STRUCT_FUNCTION (old_decl));
5112 initialize_cfun (new_decl, old_decl,
0d63a740 5113 old_entry_block->count);
1755aad0
RG
5114 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5115 = id.src_cfun->gimple_df->ipa_pta;
110cfe1c 5116 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
b8698a0f 5117
19734dd8
RL
5118 /* Copy the function's static chain. */
5119 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5120 if (p)
5121 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5122 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5123 &id);
b8698a0f 5124
19734dd8
RL
5125 /* If there's a tree_map, prepare for substitution. */
5126 if (tree_map)
9187e02d 5127 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
19734dd8 5128 {
0f1961a2 5129 gimple init;
9187e02d 5130 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
1b369fae 5131 if (replace_info->replace_p)
00fc2333 5132 {
657c0925 5133 tree op = replace_info->new_tree;
922f15c2
JH
5134 if (!replace_info->old_tree)
5135 {
5136 int i = replace_info->parm_num;
5137 tree parm;
910ad8de 5138 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
922f15c2
JH
5139 i --;
5140 replace_info->old_tree = parm;
5141 }
5142
657c0925
JH
5143
5144 STRIP_NOPS (op);
5145
5146 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
5147 op = TREE_OPERAND (op, 0);
b8698a0f 5148
657c0925 5149 if (TREE_CODE (op) == ADDR_EXPR)
00fc2333 5150 {
657c0925 5151 op = TREE_OPERAND (op, 0);
00fc2333
JH
5152 while (handled_component_p (op))
5153 op = TREE_OPERAND (op, 0);
5154 if (TREE_CODE (op) == VAR_DECL)
5155 add_referenced_var (op);
5156 }
0f1961a2
JH
5157 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5158 init = setup_one_parameter (&id, replace_info->old_tree,
5159 replace_info->new_tree, id.src_fn,
5160 NULL,
5161 &vars);
5162 if (init)
5163 VEC_safe_push (gimple, heap, init_stmts, init);
00fc2333 5164 }
19734dd8 5165 }
eb50f5f4
JH
5166 /* Copy the function's arguments. */
5167 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5168 DECL_ARGUMENTS (new_decl) =
5169 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5170 args_to_skip, &vars);
b8698a0f 5171
eb50f5f4 5172 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
474086eb 5173 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
b8698a0f 5174
0f1961a2 5175 declare_inline_vars (DECL_INITIAL (new_decl), vars);
9187e02d 5176
c021f10b 5177 if (!VEC_empty (tree, DECL_STRUCT_FUNCTION (old_decl)->local_decls))
19734dd8 5178 /* Add local vars. */
c021f10b 5179 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id, false);
b8698a0f 5180
19734dd8
RL
5181 if (DECL_RESULT (old_decl) != NULL_TREE)
5182 {
6ff38230
RG
5183 tree old_name;
5184 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
19734dd8 5185 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6ff38230
RG
5186 if (gimple_in_ssa_p (id.src_cfun)
5187 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5188 && (old_name
5189 = gimple_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5190 {
5191 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5192 insert_decl_map (&id, old_name, new_name);
5193 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5194 set_default_def (DECL_RESULT (new_decl), new_name);
5195 }
19734dd8 5196 }
b8698a0f 5197
6ff38230
RG
5198 /* Copy the Function's body. */
5199 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5200 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
5201
19734dd8
RL
5202 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5203 number_blocks (new_decl);
5204
b5b8b0ac
AO
5205 /* We want to create the BB unconditionally, so that the addition of
5206 debug stmts doesn't affect BB count, which may in the end cause
5207 codegen differences. */
5208 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5209 while (VEC_length (gimple, init_stmts))
5210 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
08ad1d6d 5211 update_clone_info (&id);
0f1961a2 5212
27dbd3ac
RH
5213 /* Remap the nonlocal_goto_save_area, if any. */
5214 if (cfun->nonlocal_goto_save_area)
5215 {
5216 struct walk_stmt_info wi;
5217
5218 memset (&wi, 0, sizeof (wi));
5219 wi.info = &id;
5220 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5221 }
5222
19734dd8 5223 /* Clean up. */
6be42dd4 5224 pointer_map_destroy (id.decl_map);
b5b8b0ac
AO
5225 if (id.debug_map)
5226 pointer_map_destroy (id.debug_map);
5006671f
RG
5227 free_dominance_info (CDI_DOMINATORS);
5228 free_dominance_info (CDI_POST_DOMINATORS);
9187e02d
JH
5229
5230 fold_marked_statements (0, id.statements_to_fold);
5231 pointer_set_destroy (id.statements_to_fold);
5232 fold_cond_expr_cond ();
5233 delete_unreachable_blocks_update_callgraph (&id);
99b766fc
JH
5234 if (id.dst_node->analyzed)
5235 cgraph_rebuild_references ();
9187e02d 5236 update_ssa (TODO_update_ssa);
b35366ce
JH
5237
5238 /* After partial cloning we need to rescale frequencies, so they are
5239 within proper range in the cloned function. */
5240 if (new_entry)
5241 {
5242 struct cgraph_edge *e;
5243 rebuild_frequencies ();
5244
5245 new_version_node->count = ENTRY_BLOCK_PTR->count;
5246 for (e = new_version_node->callees; e; e = e->next_callee)
5247 {
5248 basic_block bb = gimple_bb (e->call_stmt);
02ec6988
MJ
5249 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5250 bb);
5251 e->count = bb->count;
5252 }
5253 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5254 {
5255 basic_block bb = gimple_bb (e->call_stmt);
5256 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5257 bb);
b35366ce
JH
5258 e->count = bb->count;
5259 }
5260 }
5261
9187e02d
JH
5262 free_dominance_info (CDI_DOMINATORS);
5263 free_dominance_info (CDI_POST_DOMINATORS);
5264
b5b8b0ac 5265 gcc_assert (!id.debug_stmts);
0f1961a2 5266 VEC_free (gimple, heap, init_stmts);
110cfe1c 5267 pop_cfun ();
873aa8f5
JH
5268 current_function_decl = old_current_function_decl;
5269 gcc_assert (!current_function_decl
5270 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
19734dd8
RL
5271 return;
5272}
5273
f82a627c
EB
5274/* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5275 the callee and return the inlined body on success. */
5276
5277tree
5278maybe_inline_call_in_expr (tree exp)
5279{
5280 tree fn = get_callee_fndecl (exp);
5281
5282 /* We can only try to inline "const" functions. */
5283 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5284 {
5285 struct pointer_map_t *decl_map = pointer_map_create ();
5286 call_expr_arg_iterator iter;
5287 copy_body_data id;
5288 tree param, arg, t;
5289
5290 /* Remap the parameters. */
5291 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5292 param;
910ad8de 5293 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
f82a627c
EB
5294 *pointer_map_insert (decl_map, param) = arg;
5295
5296 memset (&id, 0, sizeof (id));
5297 id.src_fn = fn;
5298 id.dst_fn = current_function_decl;
5299 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5300 id.decl_map = decl_map;
5301
5302 id.copy_decl = copy_decl_no_change;
5303 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5304 id.transform_new_cfg = false;
5305 id.transform_return_to_modify = true;
267ffce3 5306 id.transform_lang_insert_block = NULL;
f82a627c
EB
5307
5308 /* Make sure not to unshare trees behind the front-end's back
5309 since front-end specific mechanisms may rely on sharing. */
5310 id.regimplify = false;
5311 id.do_not_unshare = true;
5312
5313 /* We're not inside any EH region. */
1d65f45c 5314 id.eh_lp_nr = 0;
f82a627c
EB
5315
5316 t = copy_tree_body (&id);
5317 pointer_map_destroy (decl_map);
5318
5319 /* We can only return something suitable for use in a GENERIC
5320 expression tree. */
5321 if (TREE_CODE (t) == MODIFY_EXPR)
5322 return TREE_OPERAND (t, 1);
5323 }
5324
5325 return NULL_TREE;
5326}
5327
52dd234b
RH
5328/* Duplicate a type, fields and all. */
5329
5330tree
5331build_duplicate_type (tree type)
5332{
1b369fae 5333 struct copy_body_data id;
52dd234b
RH
5334
5335 memset (&id, 0, sizeof (id));
1b369fae
RH
5336 id.src_fn = current_function_decl;
5337 id.dst_fn = current_function_decl;
5338 id.src_cfun = cfun;
6be42dd4 5339 id.decl_map = pointer_map_create ();
b5b8b0ac 5340 id.debug_map = NULL;
4009f2e7 5341 id.copy_decl = copy_decl_no_change;
52dd234b
RH
5342
5343 type = remap_type_1 (type, &id);
5344
6be42dd4 5345 pointer_map_destroy (id.decl_map);
b5b8b0ac
AO
5346 if (id.debug_map)
5347 pointer_map_destroy (id.debug_map);
52dd234b 5348
f31c9f09
DG
5349 TYPE_CANONICAL (type) = type;
5350
52dd234b
RH
5351 return type;
5352}