]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-inline.c
i386-c.c: Do not include rtl.h.
[thirdparty/gcc.git] / gcc / tree-inline.c
CommitLineData
ac534736 1/* Tree inlining.
082ab5ff 2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
ebb07520 3 Free Software Foundation, Inc.
588d3ade
AO
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
5
54a7b573 6This file is part of GCC.
588d3ade 7
54a7b573 8GCC is free software; you can redistribute it and/or modify
588d3ade 9it under the terms of the GNU General Public License as published by
9dcd6f09 10the Free Software Foundation; either version 3, or (at your option)
588d3ade
AO
11any later version.
12
54a7b573 13GCC is distributed in the hope that it will be useful,
588d3ade
AO
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
588d3ade
AO
21
22#include "config.h"
23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
69dcadff 26#include "toplev.h"
588d3ade
AO
27#include "tree.h"
28#include "tree-inline.h"
d4e4baa9
AO
29#include "expr.h"
30#include "flags.h"
31#include "params.h"
32#include "input.h"
33#include "insn-config.h"
d4e4baa9 34#include "hashtab.h"
d23c55c2 35#include "langhooks.h"
e21aff8a
SB
36#include "basic-block.h"
37#include "tree-iterator.h"
1c4a429a 38#include "cgraph.h"
ddd2d57e 39#include "intl.h"
6de9cd9a 40#include "tree-mudflap.h"
089efaa4 41#include "tree-flow.h"
18c6ada9 42#include "function.h"
e21aff8a 43#include "tree-flow.h"
6de9cd9a 44#include "diagnostic.h"
cf835838 45#include "tree-pretty-print.h"
e21aff8a 46#include "except.h"
1eb3331e 47#include "debug.h"
e21aff8a 48#include "pointer-set.h"
19734dd8 49#include "ipa-prop.h"
6946b3f7 50#include "value-prof.h"
110cfe1c 51#include "tree-pass.h"
18177c7e
RG
52#include "target.h"
53#include "integrate.h"
d4e4baa9 54
6de9cd9a
DN
55/* I'm not real happy about this, but we need to handle gimple and
56 non-gimple trees. */
726a989a 57#include "gimple.h"
588d3ade 58
1b369fae 59/* Inlining, Cloning, Versioning, Parallelization
e21aff8a
SB
60
61 Inlining: a function body is duplicated, but the PARM_DECLs are
62 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
726a989a 63 MODIFY_EXPRs that store to a dedicated returned-value variable.
e21aff8a
SB
64 The duplicated eh_region info of the copy will later be appended
65 to the info for the caller; the eh_region info in copied throwing
1d65f45c 66 statements and RESX statements are adjusted accordingly.
e21aff8a 67
e21aff8a
SB
68 Cloning: (only in C++) We have one body for a con/de/structor, and
69 multiple function decls, each with a unique parameter list.
70 Duplicate the body, using the given splay tree; some parameters
71 will become constants (like 0 or 1).
72
1b369fae
RH
73 Versioning: a function body is duplicated and the result is a new
74 function rather than into blocks of an existing function as with
75 inlining. Some parameters will become constants.
76
77 Parallelization: a region of a function is duplicated resulting in
78 a new function. Variables may be replaced with complex expressions
79 to enable shared variable semantics.
80
e21aff8a
SB
81 All of these will simultaneously lookup any callgraph edges. If
82 we're going to inline the duplicated function body, and the given
83 function has some cloned callgraph nodes (one for each place this
84 function will be inlined) those callgraph edges will be duplicated.
1b369fae 85 If we're cloning the body, those callgraph edges will be
e21aff8a
SB
86 updated to point into the new body. (Note that the original
87 callgraph node and edge list will not be altered.)
88
726a989a 89 See the CALL_EXPR handling case in copy_tree_body_r (). */
e21aff8a 90
d4e4baa9
AO
91/* To Do:
92
93 o In order to make inlining-on-trees work, we pessimized
94 function-local static constants. In particular, they are now
95 always output, even when not addressed. Fix this by treating
96 function-local static constants just like global static
97 constants; the back-end already knows not to output them if they
98 are not needed.
99
100 o Provide heuristics to clamp inlining of recursive template
101 calls? */
102
7f9bc51b 103
7f9bc51b
ZD
104/* Weights that estimate_num_insns uses to estimate the size of the
105 produced code. */
106
107eni_weights eni_size_weights;
108
109/* Weights that estimate_num_insns uses to estimate the time necessary
110 to execute the produced code. */
111
112eni_weights eni_time_weights;
113
d4e4baa9
AO
114/* Prototypes. */
115
0f900dfa 116static tree declare_return_variable (copy_body_data *, tree, tree);
1b369fae 117static void remap_block (tree *, copy_body_data *);
1b369fae 118static void copy_bind_expr (tree *, int *, copy_body_data *);
6de9cd9a 119static tree mark_local_for_remap_r (tree *, int *, void *);
19114537 120static void unsave_expr_1 (tree);
6de9cd9a 121static tree unsave_r (tree *, int *, void *);
e21aff8a 122static void declare_inline_vars (tree, tree);
892c7e1e 123static void remap_save_expr (tree *, void *, int *);
4a283090 124static void prepend_lexical_block (tree current_block, tree new_block);
1b369fae 125static tree copy_decl_to_var (tree, copy_body_data *);
c08cd4c1 126static tree copy_result_decl_to_var (tree, copy_body_data *);
1b369fae 127static tree copy_decl_maybe_to_var (tree, copy_body_data *);
726a989a 128static gimple remap_gimple_stmt (gimple, copy_body_data *);
078c3644 129static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
e21aff8a 130
5e20bdd7
JZ
131/* Insert a tree->tree mapping for ID. Despite the name suggests
132 that the trees should be variables, it is used for more than that. */
133
1b369fae
RH
134void
135insert_decl_map (copy_body_data *id, tree key, tree value)
5e20bdd7 136{
6be42dd4 137 *pointer_map_insert (id->decl_map, key) = value;
5e20bdd7
JZ
138
139 /* Always insert an identity map as well. If we see this same new
140 node again, we won't want to duplicate it a second time. */
141 if (key != value)
6be42dd4 142 *pointer_map_insert (id->decl_map, value) = value;
5e20bdd7
JZ
143}
144
b5b8b0ac
AO
145/* Insert a tree->tree mapping for ID. This is only used for
146 variables. */
147
148static void
149insert_debug_decl_map (copy_body_data *id, tree key, tree value)
150{
151 if (!gimple_in_ssa_p (id->src_cfun))
152 return;
153
154 if (!MAY_HAVE_DEBUG_STMTS)
155 return;
156
157 if (!target_for_debug_bind (key))
158 return;
159
160 gcc_assert (TREE_CODE (key) == PARM_DECL);
161 gcc_assert (TREE_CODE (value) == VAR_DECL);
162
163 if (!id->debug_map)
164 id->debug_map = pointer_map_create ();
165
166 *pointer_map_insert (id->debug_map, key) = value;
167}
168
082ab5ff
JJ
169/* If nonzero, we're remapping the contents of inlined debug
170 statements. If negative, an error has occurred, such as a
171 reference to a variable that isn't available in the inlined
172 context. */
173static int processing_debug_stmt = 0;
174
110cfe1c
JH
175/* Construct new SSA name for old NAME. ID is the inline context. */
176
177static tree
178remap_ssa_name (tree name, copy_body_data *id)
179{
82d6e6fc 180 tree new_tree;
6be42dd4 181 tree *n;
110cfe1c
JH
182
183 gcc_assert (TREE_CODE (name) == SSA_NAME);
184
6be42dd4 185 n = (tree *) pointer_map_contains (id->decl_map, name);
110cfe1c 186 if (n)
129a37fc 187 return unshare_expr (*n);
110cfe1c 188
082ab5ff
JJ
189 if (processing_debug_stmt)
190 {
191 processing_debug_stmt = -1;
192 return name;
193 }
194
110cfe1c
JH
195 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
196 in copy_bb. */
82d6e6fc 197 new_tree = remap_decl (SSA_NAME_VAR (name), id);
726a989a 198
110cfe1c 199 /* We might've substituted constant or another SSA_NAME for
b8698a0f 200 the variable.
110cfe1c
JH
201
202 Replace the SSA name representing RESULT_DECL by variable during
203 inlining: this saves us from need to introduce PHI node in a case
204 return value is just partly initialized. */
82d6e6fc 205 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
110cfe1c
JH
206 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
207 || !id->transform_return_to_modify))
208 {
25a6a873 209 struct ptr_info_def *pi;
82d6e6fc
KG
210 new_tree = make_ssa_name (new_tree, NULL);
211 insert_decl_map (id, name, new_tree);
212 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
110cfe1c 213 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
82d6e6fc 214 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
25a6a873
RG
215 /* At least IPA points-to info can be directly transferred. */
216 if (id->src_cfun->gimple_df
217 && id->src_cfun->gimple_df->ipa_pta
218 && (pi = SSA_NAME_PTR_INFO (name))
219 && !pi->pt.anything)
220 {
221 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
222 new_pi->pt = pi->pt;
223 }
726a989a 224 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
045685a9
JH
225 {
226 /* By inlining function having uninitialized variable, we might
227 extend the lifetime (variable might get reused). This cause
228 ICE in the case we end up extending lifetime of SSA name across
fa10beec 229 abnormal edge, but also increase register pressure.
045685a9 230
726a989a
RB
231 We simply initialize all uninitialized vars by 0 except
232 for case we are inlining to very first BB. We can avoid
233 this for all BBs that are not inside strongly connected
234 regions of the CFG, but this is expensive to test. */
235 if (id->entry_bb
236 && is_gimple_reg (SSA_NAME_VAR (name))
045685a9 237 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
0723b99a 238 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
045685a9
JH
239 || EDGE_COUNT (id->entry_bb->preds) != 1))
240 {
726a989a
RB
241 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
242 gimple init_stmt;
b8698a0f 243
82d6e6fc
KG
244 init_stmt = gimple_build_assign (new_tree,
245 fold_convert (TREE_TYPE (new_tree),
045685a9 246 integer_zero_node));
726a989a 247 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
82d6e6fc 248 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
045685a9
JH
249 }
250 else
251 {
82d6e6fc 252 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
726a989a
RB
253 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
254 == name)
82d6e6fc 255 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
045685a9
JH
256 }
257 }
110cfe1c
JH
258 }
259 else
82d6e6fc
KG
260 insert_decl_map (id, name, new_tree);
261 return new_tree;
110cfe1c
JH
262}
263
e21aff8a 264/* Remap DECL during the copying of the BLOCK tree for the function. */
d4e4baa9 265
1b369fae
RH
266tree
267remap_decl (tree decl, copy_body_data *id)
d4e4baa9 268{
6be42dd4 269 tree *n;
e21aff8a
SB
270
271 /* We only remap local variables in the current function. */
3c2a7a6a 272
e21aff8a
SB
273 /* See if we have remapped this declaration. */
274
6be42dd4 275 n = (tree *) pointer_map_contains (id->decl_map, decl);
e21aff8a 276
b5b8b0ac
AO
277 if (!n && processing_debug_stmt)
278 {
279 processing_debug_stmt = -1;
280 return decl;
281 }
282
e21aff8a
SB
283 /* If we didn't already have an equivalent for this declaration,
284 create one now. */
d4e4baa9
AO
285 if (!n)
286 {
d4e4baa9 287 /* Make a copy of the variable or label. */
1b369fae 288 tree t = id->copy_decl (decl, id);
b8698a0f 289
596b98ce
AO
290 /* Remember it, so that if we encounter this local entity again
291 we can reuse this copy. Do this early because remap_type may
292 need this decl for TYPE_STUB_DECL. */
293 insert_decl_map (id, decl, t);
294
1b369fae
RH
295 if (!DECL_P (t))
296 return t;
297
3c2a7a6a
RH
298 /* Remap types, if necessary. */
299 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
300 if (TREE_CODE (t) == TYPE_DECL)
301 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
3c2a7a6a
RH
302
303 /* Remap sizes as necessary. */
726a989a
RB
304 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
305 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
d4e4baa9 306
8c27b7d4 307 /* If fields, do likewise for offset and qualifier. */
5377d5ba
RK
308 if (TREE_CODE (t) == FIELD_DECL)
309 {
726a989a 310 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
5377d5ba 311 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
726a989a 312 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
5377d5ba
RK
313 }
314
110cfe1c
JH
315 if (cfun && gimple_in_ssa_p (cfun)
316 && (TREE_CODE (t) == VAR_DECL
317 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
318 {
110cfe1c 319 get_var_ann (t);
110cfe1c
JH
320 add_referenced_var (t);
321 }
5e20bdd7 322 return t;
d4e4baa9
AO
323 }
324
f82a627c
EB
325 if (id->do_not_unshare)
326 return *n;
327 else
328 return unshare_expr (*n);
d4e4baa9
AO
329}
330
3c2a7a6a 331static tree
1b369fae 332remap_type_1 (tree type, copy_body_data *id)
3c2a7a6a 333{
82d6e6fc 334 tree new_tree, t;
3c2a7a6a 335
ed397c43
RK
336 /* We do need a copy. build and register it now. If this is a pointer or
337 reference type, remap the designated type and make a new pointer or
338 reference type. */
339 if (TREE_CODE (type) == POINTER_TYPE)
340 {
82d6e6fc 341 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
ed397c43
RK
342 TYPE_MODE (type),
343 TYPE_REF_CAN_ALIAS_ALL (type));
040c6d51
MM
344 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
345 new_tree = build_type_attribute_qual_variant (new_tree,
346 TYPE_ATTRIBUTES (type),
347 TYPE_QUALS (type));
82d6e6fc
KG
348 insert_decl_map (id, type, new_tree);
349 return new_tree;
ed397c43
RK
350 }
351 else if (TREE_CODE (type) == REFERENCE_TYPE)
352 {
82d6e6fc 353 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
ed397c43
RK
354 TYPE_MODE (type),
355 TYPE_REF_CAN_ALIAS_ALL (type));
040c6d51
MM
356 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
357 new_tree = build_type_attribute_qual_variant (new_tree,
358 TYPE_ATTRIBUTES (type),
359 TYPE_QUALS (type));
82d6e6fc
KG
360 insert_decl_map (id, type, new_tree);
361 return new_tree;
ed397c43
RK
362 }
363 else
82d6e6fc 364 new_tree = copy_node (type);
ed397c43 365
82d6e6fc 366 insert_decl_map (id, type, new_tree);
3c2a7a6a
RH
367
368 /* This is a new type, not a copy of an old type. Need to reassociate
369 variants. We can handle everything except the main variant lazily. */
370 t = TYPE_MAIN_VARIANT (type);
371 if (type != t)
372 {
373 t = remap_type (t, id);
82d6e6fc
KG
374 TYPE_MAIN_VARIANT (new_tree) = t;
375 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
376 TYPE_NEXT_VARIANT (t) = new_tree;
3c2a7a6a
RH
377 }
378 else
379 {
82d6e6fc
KG
380 TYPE_MAIN_VARIANT (new_tree) = new_tree;
381 TYPE_NEXT_VARIANT (new_tree) = NULL;
3c2a7a6a
RH
382 }
383
596b98ce 384 if (TYPE_STUB_DECL (type))
82d6e6fc 385 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
596b98ce 386
3c2a7a6a 387 /* Lazily create pointer and reference types. */
82d6e6fc
KG
388 TYPE_POINTER_TO (new_tree) = NULL;
389 TYPE_REFERENCE_TO (new_tree) = NULL;
3c2a7a6a 390
82d6e6fc 391 switch (TREE_CODE (new_tree))
3c2a7a6a
RH
392 {
393 case INTEGER_TYPE:
394 case REAL_TYPE:
325217ed 395 case FIXED_POINT_TYPE:
3c2a7a6a
RH
396 case ENUMERAL_TYPE:
397 case BOOLEAN_TYPE:
82d6e6fc 398 t = TYPE_MIN_VALUE (new_tree);
3c2a7a6a 399 if (t && TREE_CODE (t) != INTEGER_CST)
82d6e6fc 400 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
1c9766da 401
82d6e6fc 402 t = TYPE_MAX_VALUE (new_tree);
3c2a7a6a 403 if (t && TREE_CODE (t) != INTEGER_CST)
82d6e6fc
KG
404 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
405 return new_tree;
9f63daea 406
3c2a7a6a 407 case FUNCTION_TYPE:
82d6e6fc
KG
408 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
409 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
410 return new_tree;
3c2a7a6a
RH
411
412 case ARRAY_TYPE:
82d6e6fc
KG
413 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
414 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
3c2a7a6a
RH
415 break;
416
417 case RECORD_TYPE:
418 case UNION_TYPE:
419 case QUAL_UNION_TYPE:
52dd234b
RH
420 {
421 tree f, nf = NULL;
422
82d6e6fc 423 for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
52dd234b
RH
424 {
425 t = remap_decl (f, id);
82d6e6fc 426 DECL_CONTEXT (t) = new_tree;
52dd234b
RH
427 TREE_CHAIN (t) = nf;
428 nf = t;
429 }
82d6e6fc 430 TYPE_FIELDS (new_tree) = nreverse (nf);
52dd234b 431 }
3c2a7a6a
RH
432 break;
433
3c2a7a6a
RH
434 case OFFSET_TYPE:
435 default:
436 /* Shouldn't have been thought variable sized. */
1e128c5f 437 gcc_unreachable ();
3c2a7a6a
RH
438 }
439
82d6e6fc
KG
440 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
441 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
3c2a7a6a 442
82d6e6fc 443 return new_tree;
3c2a7a6a
RH
444}
445
1b369fae
RH
446tree
447remap_type (tree type, copy_body_data *id)
52dd234b 448{
6be42dd4 449 tree *node;
4f5c64b8 450 tree tmp;
52dd234b
RH
451
452 if (type == NULL)
453 return type;
454
455 /* See if we have remapped this type. */
6be42dd4 456 node = (tree *) pointer_map_contains (id->decl_map, type);
52dd234b 457 if (node)
6be42dd4 458 return *node;
52dd234b
RH
459
460 /* The type only needs remapping if it's variably modified. */
1b369fae 461 if (! variably_modified_type_p (type, id->src_fn))
52dd234b
RH
462 {
463 insert_decl_map (id, type, type);
464 return type;
465 }
466
4f5c64b8
RG
467 id->remapping_type_depth++;
468 tmp = remap_type_1 (type, id);
469 id->remapping_type_depth--;
470
471 return tmp;
52dd234b
RH
472}
473
13e4e36e
L
474/* Return previously remapped type of TYPE in ID. Return NULL if TYPE
475 is NULL or TYPE has not been remapped before. */
476
477static tree
478remapped_type (tree type, copy_body_data *id)
479{
480 tree *node;
481
482 if (type == NULL)
483 return type;
484
485 /* See if we have remapped this type. */
486 node = (tree *) pointer_map_contains (id->decl_map, type);
487 if (node)
488 return *node;
489 else
490 return NULL;
491}
492
493 /* The type only needs remapping if it's variably modified. */
526d73ab 494/* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
b8698a0f 495
526d73ab
JH
496static bool
497can_be_nonlocal (tree decl, copy_body_data *id)
498{
499 /* We can not duplicate function decls. */
500 if (TREE_CODE (decl) == FUNCTION_DECL)
501 return true;
502
503 /* Local static vars must be non-local or we get multiple declaration
504 problems. */
505 if (TREE_CODE (decl) == VAR_DECL
506 && !auto_var_in_fn_p (decl, id->src_fn))
507 return true;
508
509 /* At the moment dwarf2out can handle only these types of nodes. We
510 can support more later. */
511 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
512 return false;
513
13e4e36e
L
514 /* We must use global type. We call remapped_type instead of
515 remap_type since we don't want to remap this type here if it
516 hasn't been remapped before. */
517 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
526d73ab
JH
518 return false;
519
520 /* Wihtout SSA we can't tell if variable is used. */
521 if (!gimple_in_ssa_p (cfun))
522 return false;
523
524 /* Live variables must be copied so we can attach DECL_RTL. */
525 if (var_ann (decl))
526 return false;
527
528 return true;
529}
530
6de9cd9a 531static tree
526d73ab 532remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
d4e4baa9 533{
6de9cd9a
DN
534 tree old_var;
535 tree new_decls = NULL_TREE;
d4e4baa9 536
6de9cd9a
DN
537 /* Remap its variables. */
538 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
d4e4baa9 539 {
6de9cd9a
DN
540 tree new_var;
541
526d73ab 542 if (can_be_nonlocal (old_var, id))
30be951a 543 {
526d73ab 544 if (TREE_CODE (old_var) == VAR_DECL
5c3ec539 545 && ! DECL_EXTERNAL (old_var)
526d73ab
JH
546 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
547 cfun->local_decls = tree_cons (NULL_TREE, old_var,
548 cfun->local_decls);
9e6aced0 549 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
526d73ab
JH
550 && !DECL_IGNORED_P (old_var)
551 && nonlocalized_list)
70235ab9 552 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
30be951a
JH
553 continue;
554 }
555
6de9cd9a
DN
556 /* Remap the variable. */
557 new_var = remap_decl (old_var, id);
558
726a989a 559 /* If we didn't remap this variable, we can't mess with its
6de9cd9a
DN
560 TREE_CHAIN. If we remapped this variable to the return slot, it's
561 already declared somewhere else, so don't declare it here. */
b8698a0f 562
526d73ab 563 if (new_var == id->retvar)
6de9cd9a 564 ;
526d73ab
JH
565 else if (!new_var)
566 {
9e6aced0 567 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
526d73ab
JH
568 && !DECL_IGNORED_P (old_var)
569 && nonlocalized_list)
70235ab9 570 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
526d73ab 571 }
d4e4baa9
AO
572 else
573 {
1e128c5f 574 gcc_assert (DECL_P (new_var));
6de9cd9a
DN
575 TREE_CHAIN (new_var) = new_decls;
576 new_decls = new_var;
60a5d78a
JJ
577
578 /* Also copy value-expressions. */
579 if (TREE_CODE (new_var) == VAR_DECL
580 && DECL_HAS_VALUE_EXPR_P (new_var))
581 {
582 tree tem = DECL_VALUE_EXPR (new_var);
583 bool old_regimplify = id->regimplify;
584 id->remapping_type_depth++;
585 walk_tree (&tem, copy_tree_body_r, id, NULL);
586 id->remapping_type_depth--;
587 id->regimplify = old_regimplify;
588 SET_DECL_VALUE_EXPR (new_var, tem);
589 }
d4e4baa9 590 }
d4e4baa9 591 }
d4e4baa9 592
6de9cd9a
DN
593 return nreverse (new_decls);
594}
595
596/* Copy the BLOCK to contain remapped versions of the variables
597 therein. And hook the new block into the block-tree. */
598
599static void
1b369fae 600remap_block (tree *block, copy_body_data *id)
6de9cd9a 601{
d436bff8
AH
602 tree old_block;
603 tree new_block;
d436bff8
AH
604
605 /* Make the new block. */
606 old_block = *block;
607 new_block = make_node (BLOCK);
608 TREE_USED (new_block) = TREE_USED (old_block);
609 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
3e2844cb 610 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
526d73ab
JH
611 BLOCK_NONLOCALIZED_VARS (new_block)
612 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
d436bff8
AH
613 *block = new_block;
614
615 /* Remap its variables. */
526d73ab
JH
616 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
617 &BLOCK_NONLOCALIZED_VARS (new_block),
618 id);
d436bff8 619
1b369fae 620 if (id->transform_lang_insert_block)
9ff420f1 621 id->transform_lang_insert_block (new_block);
1b369fae 622
d436bff8 623 /* Remember the remapped block. */
6de9cd9a 624 insert_decl_map (id, old_block, new_block);
d4e4baa9
AO
625}
626
acb8f212
JH
627/* Copy the whole block tree and root it in id->block. */
628static tree
1b369fae 629remap_blocks (tree block, copy_body_data *id)
acb8f212
JH
630{
631 tree t;
82d6e6fc 632 tree new_tree = block;
acb8f212
JH
633
634 if (!block)
635 return NULL;
636
82d6e6fc
KG
637 remap_block (&new_tree, id);
638 gcc_assert (new_tree != block);
acb8f212 639 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
4a283090
JH
640 prepend_lexical_block (new_tree, remap_blocks (t, id));
641 /* Blocks are in arbitrary order, but make things slightly prettier and do
642 not swap order when producing a copy. */
643 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
82d6e6fc 644 return new_tree;
acb8f212
JH
645}
646
d4e4baa9 647static void
6de9cd9a 648copy_statement_list (tree *tp)
d4e4baa9 649{
6de9cd9a 650 tree_stmt_iterator oi, ni;
82d6e6fc 651 tree new_tree;
6de9cd9a 652
82d6e6fc
KG
653 new_tree = alloc_stmt_list ();
654 ni = tsi_start (new_tree);
6de9cd9a 655 oi = tsi_start (*tp);
b1d82db0 656 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
82d6e6fc 657 *tp = new_tree;
6de9cd9a
DN
658
659 for (; !tsi_end_p (oi); tsi_next (&oi))
a406865a
RG
660 {
661 tree stmt = tsi_stmt (oi);
662 if (TREE_CODE (stmt) == STATEMENT_LIST)
663 copy_statement_list (&stmt);
664 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
665 }
6de9cd9a 666}
d4e4baa9 667
6de9cd9a 668static void
1b369fae 669copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
6de9cd9a
DN
670{
671 tree block = BIND_EXPR_BLOCK (*tp);
d4e4baa9
AO
672 /* Copy (and replace) the statement. */
673 copy_tree_r (tp, walk_subtrees, NULL);
6de9cd9a
DN
674 if (block)
675 {
676 remap_block (&block, id);
677 BIND_EXPR_BLOCK (*tp) = block;
678 }
d4e4baa9 679
6de9cd9a 680 if (BIND_EXPR_VARS (*tp))
60a5d78a
JJ
681 /* This will remap a lot of the same decls again, but this should be
682 harmless. */
683 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
d4e4baa9
AO
684}
685
726a989a
RB
686
687/* Create a new gimple_seq by remapping all the statements in BODY
688 using the inlining information in ID. */
689
b34fd25c 690static gimple_seq
726a989a
RB
691remap_gimple_seq (gimple_seq body, copy_body_data *id)
692{
693 gimple_stmt_iterator si;
694 gimple_seq new_body = NULL;
695
696 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
697 {
698 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
699 gimple_seq_add_stmt (&new_body, new_stmt);
700 }
701
702 return new_body;
703}
704
705
706/* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
707 block using the mapping information in ID. */
708
709static gimple
710copy_gimple_bind (gimple stmt, copy_body_data *id)
711{
712 gimple new_bind;
713 tree new_block, new_vars;
714 gimple_seq body, new_body;
715
716 /* Copy the statement. Note that we purposely don't use copy_stmt
717 here because we need to remap statements as we copy. */
718 body = gimple_bind_body (stmt);
719 new_body = remap_gimple_seq (body, id);
720
721 new_block = gimple_bind_block (stmt);
722 if (new_block)
723 remap_block (&new_block, id);
724
725 /* This will remap a lot of the same decls again, but this should be
726 harmless. */
727 new_vars = gimple_bind_vars (stmt);
728 if (new_vars)
526d73ab 729 new_vars = remap_decls (new_vars, NULL, id);
726a989a
RB
730
731 new_bind = gimple_build_bind (new_vars, new_body, new_block);
732
733 return new_bind;
734}
735
736
737/* Remap the GIMPLE operand pointed to by *TP. DATA is really a
738 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
739 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
740 recursing into the children nodes of *TP. */
741
742static tree
743remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
744{
745 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
746 copy_body_data *id = (copy_body_data *) wi_p->info;
747 tree fn = id->src_fn;
748
749 if (TREE_CODE (*tp) == SSA_NAME)
750 {
751 *tp = remap_ssa_name (*tp, id);
752 *walk_subtrees = 0;
753 return NULL;
754 }
755 else if (auto_var_in_fn_p (*tp, fn))
756 {
757 /* Local variables and labels need to be replaced by equivalent
758 variables. We don't want to copy static variables; there's
759 only one of those, no matter how many times we inline the
760 containing function. Similarly for globals from an outer
761 function. */
762 tree new_decl;
763
764 /* Remap the declaration. */
765 new_decl = remap_decl (*tp, id);
766 gcc_assert (new_decl);
767 /* Replace this variable with the copy. */
768 STRIP_TYPE_NOPS (new_decl);
211ca15c
RG
769 /* ??? The C++ frontend uses void * pointer zero to initialize
770 any other type. This confuses the middle-end type verification.
771 As cloned bodies do not go through gimplification again the fixup
772 there doesn't trigger. */
773 if (TREE_CODE (new_decl) == INTEGER_CST
774 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
775 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
726a989a
RB
776 *tp = new_decl;
777 *walk_subtrees = 0;
778 }
779 else if (TREE_CODE (*tp) == STATEMENT_LIST)
780 gcc_unreachable ();
781 else if (TREE_CODE (*tp) == SAVE_EXPR)
782 gcc_unreachable ();
783 else if (TREE_CODE (*tp) == LABEL_DECL
784 && (!DECL_CONTEXT (*tp)
785 || decl_function_context (*tp) == id->src_fn))
786 /* These may need to be remapped for EH handling. */
787 *tp = remap_decl (*tp, id);
788 else if (TYPE_P (*tp))
789 /* Types may need remapping as well. */
790 *tp = remap_type (*tp, id);
791 else if (CONSTANT_CLASS_P (*tp))
792 {
793 /* If this is a constant, we have to copy the node iff the type
794 will be remapped. copy_tree_r will not copy a constant. */
795 tree new_type = remap_type (TREE_TYPE (*tp), id);
796
797 if (new_type == TREE_TYPE (*tp))
798 *walk_subtrees = 0;
799
800 else if (TREE_CODE (*tp) == INTEGER_CST)
801 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
802 TREE_INT_CST_HIGH (*tp));
803 else
804 {
805 *tp = copy_node (*tp);
806 TREE_TYPE (*tp) = new_type;
807 }
808 }
809 else
810 {
811 /* Otherwise, just copy the node. Note that copy_tree_r already
812 knows not to copy VAR_DECLs, etc., so this is safe. */
813 if (TREE_CODE (*tp) == INDIRECT_REF)
814 {
815 /* Get rid of *& from inline substitutions that can happen when a
816 pointer argument is an ADDR_EXPR. */
817 tree decl = TREE_OPERAND (*tp, 0);
818 tree *n;
819
820 n = (tree *) pointer_map_contains (id->decl_map, decl);
821 if (n)
822 {
82d6e6fc 823 tree type, new_tree, old;
726a989a
RB
824
825 /* If we happen to get an ADDR_EXPR in n->value, strip
826 it manually here as we'll eventually get ADDR_EXPRs
827 which lie about their types pointed to. In this case
828 build_fold_indirect_ref wouldn't strip the
829 INDIRECT_REF, but we absolutely rely on that. As
830 fold_indirect_ref does other useful transformations,
831 try that first, though. */
832 type = TREE_TYPE (TREE_TYPE (*n));
82d6e6fc 833 new_tree = unshare_expr (*n);
726a989a 834 old = *tp;
82d6e6fc 835 *tp = gimple_fold_indirect_ref (new_tree);
726a989a
RB
836 if (!*tp)
837 {
82d6e6fc 838 if (TREE_CODE (new_tree) == ADDR_EXPR)
726a989a 839 {
db3927fb
AH
840 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
841 type, new_tree);
726a989a
RB
842 /* ??? We should either assert here or build
843 a VIEW_CONVERT_EXPR instead of blindly leaking
844 incompatible types to our IL. */
845 if (! *tp)
82d6e6fc 846 *tp = TREE_OPERAND (new_tree, 0);
726a989a
RB
847 }
848 else
849 {
82d6e6fc 850 *tp = build1 (INDIRECT_REF, type, new_tree);
726a989a 851 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
ce1b6498 852 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
726a989a
RB
853 }
854 }
855 *walk_subtrees = 0;
856 return NULL;
857 }
858 }
859
860 /* Here is the "usual case". Copy this tree node, and then
861 tweak some special cases. */
862 copy_tree_r (tp, walk_subtrees, NULL);
863
864 /* Global variables we haven't seen yet need to go into referenced
865 vars. If not referenced from types only. */
866 if (gimple_in_ssa_p (cfun)
867 && TREE_CODE (*tp) == VAR_DECL
b5b8b0ac
AO
868 && id->remapping_type_depth == 0
869 && !processing_debug_stmt)
726a989a
RB
870 add_referenced_var (*tp);
871
872 /* We should never have TREE_BLOCK set on non-statements. */
873 if (EXPR_P (*tp))
874 gcc_assert (!TREE_BLOCK (*tp));
875
876 if (TREE_CODE (*tp) != OMP_CLAUSE)
877 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
878
879 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
880 {
881 /* The copied TARGET_EXPR has never been expanded, even if the
882 original node was expanded already. */
883 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
884 TREE_OPERAND (*tp, 3) = NULL_TREE;
885 }
886 else if (TREE_CODE (*tp) == ADDR_EXPR)
887 {
888 /* Variable substitution need not be simple. In particular,
889 the INDIRECT_REF substitution above. Make sure that
890 TREE_CONSTANT and friends are up-to-date. But make sure
891 to not improperly set TREE_BLOCK on some sub-expressions. */
892 int invariant = is_gimple_min_invariant (*tp);
893 tree block = id->block;
894 id->block = NULL_TREE;
895 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
896 id->block = block;
897
898 /* Handle the case where we substituted an INDIRECT_REF
899 into the operand of the ADDR_EXPR. */
900 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
901 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
902 else
903 recompute_tree_invariant_for_addr_expr (*tp);
904
905 /* If this used to be invariant, but is not any longer,
906 then regimplification is probably needed. */
907 if (invariant && !is_gimple_min_invariant (*tp))
908 id->regimplify = true;
909
910 *walk_subtrees = 0;
911 }
912 }
913
914 /* Keep iterating. */
915 return NULL_TREE;
916}
917
918
919/* Called from copy_body_id via walk_tree. DATA is really a
1b369fae 920 `copy_body_data *'. */
aa4a53af 921
1b369fae 922tree
726a989a 923copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
d4e4baa9 924{
1b369fae
RH
925 copy_body_data *id = (copy_body_data *) data;
926 tree fn = id->src_fn;
acb8f212 927 tree new_block;
d4e4baa9 928
e21aff8a
SB
929 /* Begin by recognizing trees that we'll completely rewrite for the
930 inlining context. Our output for these trees is completely
931 different from out input (e.g. RETURN_EXPR is deleted, and morphs
932 into an edge). Further down, we'll handle trees that get
933 duplicated and/or tweaked. */
d4e4baa9 934
1b369fae 935 /* When requested, RETURN_EXPRs should be transformed to just the
726a989a 936 contained MODIFY_EXPR. The branch semantics of the return will
1b369fae
RH
937 be handled elsewhere by manipulating the CFG rather than a statement. */
938 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
d4e4baa9 939 {
e21aff8a 940 tree assignment = TREE_OPERAND (*tp, 0);
d4e4baa9
AO
941
942 /* If we're returning something, just turn that into an
e21aff8a
SB
943 assignment into the equivalent of the original RESULT_DECL.
944 If the "assignment" is just the result decl, the result
945 decl has already been set (e.g. a recent "foo (&result_decl,
946 ...)"); just toss the entire RETURN_EXPR. */
726a989a 947 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
e21aff8a
SB
948 {
949 /* Replace the RETURN_EXPR with (a copy of) the
726a989a 950 MODIFY_EXPR hanging underneath. */
e21aff8a
SB
951 *tp = copy_node (assignment);
952 }
953 else /* Else the RETURN_EXPR returns no value. */
954 {
955 *tp = NULL;
cceb1885 956 return (tree) (void *)1;
e21aff8a 957 }
d4e4baa9 958 }
110cfe1c
JH
959 else if (TREE_CODE (*tp) == SSA_NAME)
960 {
961 *tp = remap_ssa_name (*tp, id);
962 *walk_subtrees = 0;
963 return NULL;
964 }
e21aff8a 965
d4e4baa9
AO
966 /* Local variables and labels need to be replaced by equivalent
967 variables. We don't want to copy static variables; there's only
968 one of those, no matter how many times we inline the containing
5377d5ba 969 function. Similarly for globals from an outer function. */
50886bf1 970 else if (auto_var_in_fn_p (*tp, fn))
d4e4baa9
AO
971 {
972 tree new_decl;
973
974 /* Remap the declaration. */
975 new_decl = remap_decl (*tp, id);
1e128c5f 976 gcc_assert (new_decl);
d4e4baa9
AO
977 /* Replace this variable with the copy. */
978 STRIP_TYPE_NOPS (new_decl);
979 *tp = new_decl;
e4cf29ae 980 *walk_subtrees = 0;
d4e4baa9 981 }
6de9cd9a
DN
982 else if (TREE_CODE (*tp) == STATEMENT_LIST)
983 copy_statement_list (tp);
a406865a
RG
984 else if (TREE_CODE (*tp) == SAVE_EXPR
985 || TREE_CODE (*tp) == TARGET_EXPR)
82c82743 986 remap_save_expr (tp, id->decl_map, walk_subtrees);
17acc01a
JH
987 else if (TREE_CODE (*tp) == LABEL_DECL
988 && (! DECL_CONTEXT (*tp)
1b369fae 989 || decl_function_context (*tp) == id->src_fn))
e21aff8a 990 /* These may need to be remapped for EH handling. */
17acc01a 991 *tp = remap_decl (*tp, id);
6de9cd9a
DN
992 else if (TREE_CODE (*tp) == BIND_EXPR)
993 copy_bind_expr (tp, walk_subtrees, id);
3c2a7a6a
RH
994 /* Types may need remapping as well. */
995 else if (TYPE_P (*tp))
996 *tp = remap_type (*tp, id);
997
bb04998a
RK
998 /* If this is a constant, we have to copy the node iff the type will be
999 remapped. copy_tree_r will not copy a constant. */
3cf11075 1000 else if (CONSTANT_CLASS_P (*tp))
bb04998a
RK
1001 {
1002 tree new_type = remap_type (TREE_TYPE (*tp), id);
1003
1004 if (new_type == TREE_TYPE (*tp))
1005 *walk_subtrees = 0;
1006
1007 else if (TREE_CODE (*tp) == INTEGER_CST)
1008 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1009 TREE_INT_CST_HIGH (*tp));
1010 else
1011 {
1012 *tp = copy_node (*tp);
1013 TREE_TYPE (*tp) = new_type;
1014 }
1015 }
1016
d4e4baa9
AO
1017 /* Otherwise, just copy the node. Note that copy_tree_r already
1018 knows not to copy VAR_DECLs, etc., so this is safe. */
1019 else
1020 {
e21aff8a
SB
1021 /* Here we handle trees that are not completely rewritten.
1022 First we detect some inlining-induced bogosities for
1023 discarding. */
726a989a
RB
1024 if (TREE_CODE (*tp) == MODIFY_EXPR
1025 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1026 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
d4e4baa9
AO
1027 {
1028 /* Some assignments VAR = VAR; don't generate any rtl code
1029 and thus don't count as variable modification. Avoid
1030 keeping bogosities like 0 = 0. */
726a989a 1031 tree decl = TREE_OPERAND (*tp, 0), value;
6be42dd4 1032 tree *n;
d4e4baa9 1033
6be42dd4 1034 n = (tree *) pointer_map_contains (id->decl_map, decl);
d4e4baa9
AO
1035 if (n)
1036 {
6be42dd4 1037 value = *n;
d4e4baa9 1038 STRIP_TYPE_NOPS (value);
becfd6e5 1039 if (TREE_CONSTANT (value) || TREE_READONLY (value))
68594ce7 1040 {
c2255bc4 1041 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
726a989a 1042 return copy_tree_body_r (tp, walk_subtrees, data);
68594ce7 1043 }
d4e4baa9
AO
1044 }
1045 }
1b369fae 1046 else if (TREE_CODE (*tp) == INDIRECT_REF)
6de9cd9a
DN
1047 {
1048 /* Get rid of *& from inline substitutions that can happen when a
1049 pointer argument is an ADDR_EXPR. */
81cfbbc2 1050 tree decl = TREE_OPERAND (*tp, 0);
6be42dd4 1051 tree *n;
6de9cd9a 1052
6be42dd4 1053 n = (tree *) pointer_map_contains (id->decl_map, decl);
6de9cd9a
DN
1054 if (n)
1055 {
82d6e6fc 1056 tree new_tree;
d84b37b0 1057 tree old;
30d2e943
RG
1058 /* If we happen to get an ADDR_EXPR in n->value, strip
1059 it manually here as we'll eventually get ADDR_EXPRs
1060 which lie about their types pointed to. In this case
1061 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
095ecc24
RG
1062 but we absolutely rely on that. As fold_indirect_ref
1063 does other useful transformations, try that first, though. */
6be42dd4 1064 tree type = TREE_TYPE (TREE_TYPE (*n));
f82a627c
EB
1065 if (id->do_not_unshare)
1066 new_tree = *n;
1067 else
1068 new_tree = unshare_expr (*n);
d84b37b0 1069 old = *tp;
82d6e6fc 1070 *tp = gimple_fold_indirect_ref (new_tree);
095ecc24
RG
1071 if (! *tp)
1072 {
82d6e6fc 1073 if (TREE_CODE (new_tree) == ADDR_EXPR)
de4af523 1074 {
db3927fb
AH
1075 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1076 type, new_tree);
de4af523
JJ
1077 /* ??? We should either assert here or build
1078 a VIEW_CONVERT_EXPR instead of blindly leaking
1079 incompatible types to our IL. */
1080 if (! *tp)
82d6e6fc 1081 *tp = TREE_OPERAND (new_tree, 0);
de4af523 1082 }
095ecc24 1083 else
d84b37b0 1084 {
82d6e6fc 1085 *tp = build1 (INDIRECT_REF, type, new_tree);
d84b37b0 1086 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
955f6531 1087 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
d84b37b0 1088 }
095ecc24 1089 }
81cfbbc2
JH
1090 *walk_subtrees = 0;
1091 return NULL;
68594ce7
JM
1092 }
1093 }
1094
e21aff8a
SB
1095 /* Here is the "usual case". Copy this tree node, and then
1096 tweak some special cases. */
1b369fae 1097 copy_tree_r (tp, walk_subtrees, NULL);
110cfe1c 1098
4f5c64b8 1099 /* Global variables we haven't seen yet needs to go into referenced
b5b8b0ac 1100 vars. If not referenced from types or debug stmts only. */
726a989a
RB
1101 if (gimple_in_ssa_p (cfun)
1102 && TREE_CODE (*tp) == VAR_DECL
b5b8b0ac
AO
1103 && id->remapping_type_depth == 0
1104 && !processing_debug_stmt)
110cfe1c 1105 add_referenced_var (*tp);
b8698a0f 1106
acb8f212
JH
1107 /* If EXPR has block defined, map it to newly constructed block.
1108 When inlining we want EXPRs without block appear in the block
ee0192a2 1109 of function call if we are not remapping a type. */
726a989a 1110 if (EXPR_P (*tp))
acb8f212 1111 {
ee0192a2 1112 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
acb8f212
JH
1113 if (TREE_BLOCK (*tp))
1114 {
6be42dd4
RG
1115 tree *n;
1116 n = (tree *) pointer_map_contains (id->decl_map,
1117 TREE_BLOCK (*tp));
60a5d78a
JJ
1118 gcc_assert (n || id->remapping_type_depth != 0);
1119 if (n)
1120 new_block = *n;
acb8f212
JH
1121 }
1122 TREE_BLOCK (*tp) = new_block;
1123 }
68594ce7 1124
726a989a 1125 if (TREE_CODE (*tp) != OMP_CLAUSE)
07beea0d 1126 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
3c2a7a6a 1127
68594ce7
JM
1128 /* The copied TARGET_EXPR has never been expanded, even if the
1129 original node was expanded already. */
1130 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1131 {
1132 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1133 TREE_OPERAND (*tp, 3) = NULL_TREE;
1134 }
84cce55d
RH
1135
1136 /* Variable substitution need not be simple. In particular, the
1137 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1138 and friends are up-to-date. */
1139 else if (TREE_CODE (*tp) == ADDR_EXPR)
1140 {
ad6003f2 1141 int invariant = is_gimple_min_invariant (*tp);
726a989a
RB
1142 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1143
8e85fd14
RG
1144 /* Handle the case where we substituted an INDIRECT_REF
1145 into the operand of the ADDR_EXPR. */
1146 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1147 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1148 else
1149 recompute_tree_invariant_for_addr_expr (*tp);
726a989a 1150
416c991f
JJ
1151 /* If this used to be invariant, but is not any longer,
1152 then regimplification is probably needed. */
ad6003f2 1153 if (invariant && !is_gimple_min_invariant (*tp))
416c991f 1154 id->regimplify = true;
726a989a 1155
84cce55d
RH
1156 *walk_subtrees = 0;
1157 }
d4e4baa9
AO
1158 }
1159
1160 /* Keep iterating. */
1161 return NULL_TREE;
1162}
1163
1d65f45c
RH
1164/* Helper for remap_gimple_stmt. Given an EH region number for the
1165 source function, map that to the duplicate EH region number in
1166 the destination function. */
1167
1168static int
1169remap_eh_region_nr (int old_nr, copy_body_data *id)
1170{
1171 eh_region old_r, new_r;
1172 void **slot;
1173
1174 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1175 slot = pointer_map_contains (id->eh_map, old_r);
1176 new_r = (eh_region) *slot;
1177
1178 return new_r->index;
1179}
1180
1181/* Similar, but operate on INTEGER_CSTs. */
1182
1183static tree
1184remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1185{
1186 int old_nr, new_nr;
1187
1188 old_nr = tree_low_cst (old_t_nr, 0);
1189 new_nr = remap_eh_region_nr (old_nr, id);
1190
1191 return build_int_cst (NULL, new_nr);
1192}
726a989a
RB
1193
1194/* Helper for copy_bb. Remap statement STMT using the inlining
1195 information in ID. Return the new statement copy. */
1196
1197static gimple
1198remap_gimple_stmt (gimple stmt, copy_body_data *id)
1199{
1200 gimple copy = NULL;
1201 struct walk_stmt_info wi;
1202 tree new_block;
5a6e26b7 1203 bool skip_first = false;
726a989a
RB
1204
1205 /* Begin by recognizing trees that we'll completely rewrite for the
1206 inlining context. Our output for these trees is completely
1207 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1208 into an edge). Further down, we'll handle trees that get
1209 duplicated and/or tweaked. */
1210
1211 /* When requested, GIMPLE_RETURNs should be transformed to just the
1212 contained GIMPLE_ASSIGN. The branch semantics of the return will
1213 be handled elsewhere by manipulating the CFG rather than the
1214 statement. */
1215 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1216 {
1217 tree retval = gimple_return_retval (stmt);
1218
1219 /* If we're returning something, just turn that into an
1220 assignment into the equivalent of the original RESULT_DECL.
1221 If RETVAL is just the result decl, the result decl has
1222 already been set (e.g. a recent "foo (&result_decl, ...)");
1223 just toss the entire GIMPLE_RETURN. */
1224 if (retval && TREE_CODE (retval) != RESULT_DECL)
5a6e26b7
JH
1225 {
1226 copy = gimple_build_assign (id->retvar, retval);
1227 /* id->retvar is already substituted. Skip it on later remapping. */
1228 skip_first = true;
1229 }
726a989a
RB
1230 else
1231 return gimple_build_nop ();
1232 }
1233 else if (gimple_has_substatements (stmt))
1234 {
1235 gimple_seq s1, s2;
1236
1237 /* When cloning bodies from the C++ front end, we will be handed bodies
1238 in High GIMPLE form. Handle here all the High GIMPLE statements that
1239 have embedded statements. */
1240 switch (gimple_code (stmt))
1241 {
1242 case GIMPLE_BIND:
1243 copy = copy_gimple_bind (stmt, id);
1244 break;
1245
1246 case GIMPLE_CATCH:
1247 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1248 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1249 break;
1250
1251 case GIMPLE_EH_FILTER:
1252 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1253 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1254 break;
1255
1256 case GIMPLE_TRY:
1257 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1258 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
b8698a0f 1259 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
726a989a
RB
1260 break;
1261
1262 case GIMPLE_WITH_CLEANUP_EXPR:
1263 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1264 copy = gimple_build_wce (s1);
1265 break;
1266
1267 case GIMPLE_OMP_PARALLEL:
1268 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1269 copy = gimple_build_omp_parallel
1270 (s1,
1271 gimple_omp_parallel_clauses (stmt),
1272 gimple_omp_parallel_child_fn (stmt),
1273 gimple_omp_parallel_data_arg (stmt));
1274 break;
1275
1276 case GIMPLE_OMP_TASK:
1277 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1278 copy = gimple_build_omp_task
1279 (s1,
1280 gimple_omp_task_clauses (stmt),
1281 gimple_omp_task_child_fn (stmt),
1282 gimple_omp_task_data_arg (stmt),
1283 gimple_omp_task_copy_fn (stmt),
1284 gimple_omp_task_arg_size (stmt),
1285 gimple_omp_task_arg_align (stmt));
1286 break;
1287
1288 case GIMPLE_OMP_FOR:
1289 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1290 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1291 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1292 gimple_omp_for_collapse (stmt), s2);
1293 {
1294 size_t i;
1295 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1296 {
1297 gimple_omp_for_set_index (copy, i,
1298 gimple_omp_for_index (stmt, i));
1299 gimple_omp_for_set_initial (copy, i,
1300 gimple_omp_for_initial (stmt, i));
1301 gimple_omp_for_set_final (copy, i,
1302 gimple_omp_for_final (stmt, i));
1303 gimple_omp_for_set_incr (copy, i,
1304 gimple_omp_for_incr (stmt, i));
1305 gimple_omp_for_set_cond (copy, i,
1306 gimple_omp_for_cond (stmt, i));
1307 }
1308 }
1309 break;
1310
1311 case GIMPLE_OMP_MASTER:
1312 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1313 copy = gimple_build_omp_master (s1);
1314 break;
1315
1316 case GIMPLE_OMP_ORDERED:
1317 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1318 copy = gimple_build_omp_ordered (s1);
1319 break;
1320
1321 case GIMPLE_OMP_SECTION:
1322 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1323 copy = gimple_build_omp_section (s1);
1324 break;
1325
1326 case GIMPLE_OMP_SECTIONS:
1327 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1328 copy = gimple_build_omp_sections
1329 (s1, gimple_omp_sections_clauses (stmt));
1330 break;
1331
1332 case GIMPLE_OMP_SINGLE:
1333 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1334 copy = gimple_build_omp_single
1335 (s1, gimple_omp_single_clauses (stmt));
1336 break;
1337
05a26161
JJ
1338 case GIMPLE_OMP_CRITICAL:
1339 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1340 copy
1341 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1342 break;
1343
726a989a
RB
1344 default:
1345 gcc_unreachable ();
1346 }
1347 }
1348 else
1349 {
1350 if (gimple_assign_copy_p (stmt)
1351 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1352 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1353 {
1354 /* Here we handle statements that are not completely rewritten.
1355 First we detect some inlining-induced bogosities for
1356 discarding. */
1357
1358 /* Some assignments VAR = VAR; don't generate any rtl code
1359 and thus don't count as variable modification. Avoid
1360 keeping bogosities like 0 = 0. */
1361 tree decl = gimple_assign_lhs (stmt), value;
1362 tree *n;
1363
1364 n = (tree *) pointer_map_contains (id->decl_map, decl);
1365 if (n)
1366 {
1367 value = *n;
1368 STRIP_TYPE_NOPS (value);
1369 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1370 return gimple_build_nop ();
1371 }
1372 }
1373
b5b8b0ac
AO
1374 if (gimple_debug_bind_p (stmt))
1375 {
1376 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1377 gimple_debug_bind_get_value (stmt),
1378 stmt);
1379 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1380 return copy;
1381 }
1d65f45c
RH
1382
1383 /* Create a new deep copy of the statement. */
1384 copy = gimple_copy (stmt);
1385
1386 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1387 RESX and EH_DISPATCH. */
1388 if (id->eh_map)
1389 switch (gimple_code (copy))
1390 {
1391 case GIMPLE_CALL:
1392 {
1393 tree r, fndecl = gimple_call_fndecl (copy);
1394 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1395 switch (DECL_FUNCTION_CODE (fndecl))
1396 {
1397 case BUILT_IN_EH_COPY_VALUES:
1398 r = gimple_call_arg (copy, 1);
1399 r = remap_eh_region_tree_nr (r, id);
1400 gimple_call_set_arg (copy, 1, r);
1401 /* FALLTHRU */
1402
1403 case BUILT_IN_EH_POINTER:
1404 case BUILT_IN_EH_FILTER:
1405 r = gimple_call_arg (copy, 0);
1406 r = remap_eh_region_tree_nr (r, id);
1407 gimple_call_set_arg (copy, 0, r);
1408 break;
1409
1410 default:
1411 break;
1412 }
d086d311 1413
25a6a873
RG
1414 /* Reset alias info if we didn't apply measures to
1415 keep it valid over inlining by setting DECL_PT_UID. */
1416 if (!id->src_cfun->gimple_df
1417 || !id->src_cfun->gimple_df->ipa_pta)
1418 gimple_call_reset_alias_info (copy);
1d65f45c
RH
1419 }
1420 break;
1421
1422 case GIMPLE_RESX:
1423 {
1424 int r = gimple_resx_region (copy);
1425 r = remap_eh_region_nr (r, id);
1426 gimple_resx_set_region (copy, r);
1427 }
1428 break;
1429
1430 case GIMPLE_EH_DISPATCH:
1431 {
1432 int r = gimple_eh_dispatch_region (copy);
1433 r = remap_eh_region_nr (r, id);
1434 gimple_eh_dispatch_set_region (copy, r);
1435 }
1436 break;
1437
1438 default:
1439 break;
1440 }
726a989a
RB
1441 }
1442
1443 /* If STMT has a block defined, map it to the newly constructed
1444 block. When inlining we want statements without a block to
1445 appear in the block of the function call. */
1446 new_block = id->block;
1447 if (gimple_block (copy))
1448 {
1449 tree *n;
1450 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1451 gcc_assert (n);
1452 new_block = *n;
1453 }
1454
1455 gimple_set_block (copy, new_block);
1456
b5b8b0ac
AO
1457 if (gimple_debug_bind_p (copy))
1458 return copy;
1459
726a989a
RB
1460 /* Remap all the operands in COPY. */
1461 memset (&wi, 0, sizeof (wi));
1462 wi.info = id;
5a6e26b7
JH
1463 if (skip_first)
1464 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1465 else
b8698a0f 1466 walk_gimple_op (copy, remap_gimple_op_r, &wi);
726a989a 1467
5006671f
RG
1468 /* Clear the copied virtual operands. We are not remapping them here
1469 but are going to recreate them from scratch. */
1470 if (gimple_has_mem_ops (copy))
1471 {
1472 gimple_set_vdef (copy, NULL_TREE);
1473 gimple_set_vuse (copy, NULL_TREE);
1474 }
1475
726a989a
RB
1476 return copy;
1477}
1478
1479
e21aff8a
SB
1480/* Copy basic block, scale profile accordingly. Edges will be taken care of
1481 later */
1482
1483static basic_block
0178d644
VR
1484copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1485 gcov_type count_scale)
e21aff8a 1486{
c2a4718a 1487 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
e21aff8a 1488 basic_block copy_basic_block;
726a989a 1489 tree decl;
0d63a740 1490 gcov_type freq;
e21aff8a
SB
1491
1492 /* create_basic_block() will append every new block to
1493 basic_block_info automatically. */
cceb1885
GDR
1494 copy_basic_block = create_basic_block (NULL, (void *) 0,
1495 (basic_block) bb->prev_bb->aux);
e21aff8a 1496 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
45a80bb9 1497
726a989a
RB
1498 /* We are going to rebuild frequencies from scratch. These values
1499 have just small importance to drive canonicalize_loop_headers. */
0d63a740 1500 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
726a989a 1501
0d63a740
JH
1502 /* We recompute frequencies after inlining, so this is quite safe. */
1503 if (freq > BB_FREQ_MAX)
1504 freq = BB_FREQ_MAX;
1505 copy_basic_block->frequency = freq;
e21aff8a 1506
726a989a
RB
1507 copy_gsi = gsi_start_bb (copy_basic_block);
1508
1509 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
e21aff8a 1510 {
726a989a
RB
1511 gimple stmt = gsi_stmt (gsi);
1512 gimple orig_stmt = stmt;
e21aff8a 1513
416c991f 1514 id->regimplify = false;
726a989a
RB
1515 stmt = remap_gimple_stmt (stmt, id);
1516 if (gimple_nop_p (stmt))
1517 continue;
1518
1519 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
c2a4718a 1520 seq_gsi = copy_gsi;
726a989a
RB
1521
1522 /* With return slot optimization we can end up with
1523 non-gimple (foo *)&this->m, fix that here. */
4c29307d
JJ
1524 if (is_gimple_assign (stmt)
1525 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1526 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
e21aff8a 1527 {
726a989a 1528 tree new_rhs;
c2a4718a 1529 new_rhs = force_gimple_operand_gsi (&seq_gsi,
4a2b7f24
JJ
1530 gimple_assign_rhs1 (stmt),
1531 true, NULL, false, GSI_NEW_STMT);
726a989a 1532 gimple_assign_set_rhs1 (stmt, new_rhs);
c2a4718a 1533 id->regimplify = false;
726a989a 1534 }
2b65dae5 1535
c2a4718a
JJ
1536 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1537
1538 if (id->regimplify)
1539 gimple_regimplify_operands (stmt, &seq_gsi);
1540
1541 /* If copy_basic_block has been empty at the start of this iteration,
1542 call gsi_start_bb again to get at the newly added statements. */
1543 if (gsi_end_p (copy_gsi))
1544 copy_gsi = gsi_start_bb (copy_basic_block);
1545 else
1546 gsi_next (&copy_gsi);
110cfe1c 1547
726a989a
RB
1548 /* Process the new statement. The call to gimple_regimplify_operands
1549 possibly turned the statement into multiple statements, we
1550 need to process all of them. */
c2a4718a 1551 do
726a989a 1552 {
9187e02d
JH
1553 tree fn;
1554
c2a4718a 1555 stmt = gsi_stmt (copy_gsi);
726a989a
RB
1556 if (is_gimple_call (stmt)
1557 && gimple_call_va_arg_pack_p (stmt)
1558 && id->gimple_call)
1559 {
1560 /* __builtin_va_arg_pack () should be replaced by
1561 all arguments corresponding to ... in the caller. */
1562 tree p;
1563 gimple new_call;
1564 VEC(tree, heap) *argarray;
1565 size_t nargs = gimple_call_num_args (id->gimple_call);
1566 size_t n;
1567
1568 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1569 nargs--;
1570
1571 /* Create the new array of arguments. */
1572 n = nargs + gimple_call_num_args (stmt);
1573 argarray = VEC_alloc (tree, heap, n);
1574 VEC_safe_grow (tree, heap, argarray, n);
1575
1576 /* Copy all the arguments before '...' */
1577 memcpy (VEC_address (tree, argarray),
1578 gimple_call_arg_ptr (stmt, 0),
1579 gimple_call_num_args (stmt) * sizeof (tree));
1580
1581 /* Append the arguments passed in '...' */
1582 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1583 gimple_call_arg_ptr (id->gimple_call, 0)
1584 + (gimple_call_num_args (id->gimple_call) - nargs),
1585 nargs * sizeof (tree));
1586
1587 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1588 argarray);
1589
1590 VEC_free (tree, heap, argarray);
1591
1592 /* Copy all GIMPLE_CALL flags, location and block, except
1593 GF_CALL_VA_ARG_PACK. */
1594 gimple_call_copy_flags (new_call, stmt);
1595 gimple_call_set_va_arg_pack (new_call, false);
1596 gimple_set_location (new_call, gimple_location (stmt));
1597 gimple_set_block (new_call, gimple_block (stmt));
1598 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1599
1600 gsi_replace (&copy_gsi, new_call, false);
9cfa22be 1601 gimple_set_bb (stmt, NULL);
726a989a
RB
1602 stmt = new_call;
1603 }
1604 else if (is_gimple_call (stmt)
1605 && id->gimple_call
1606 && (decl = gimple_call_fndecl (stmt))
1607 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1608 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
e0704a46 1609 {
726a989a
RB
1610 /* __builtin_va_arg_pack_len () should be replaced by
1611 the number of anonymous arguments. */
1612 size_t nargs = gimple_call_num_args (id->gimple_call);
1613 tree count, p;
1614 gimple new_stmt;
1615
1616 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1617 nargs--;
1618
1619 count = build_int_cst (integer_type_node, nargs);
1620 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1621 gsi_replace (&copy_gsi, new_stmt, false);
1622 stmt = new_stmt;
1623 }
b8a00a4d 1624
726a989a
RB
1625 /* Statements produced by inlining can be unfolded, especially
1626 when we constant propagated some operands. We can't fold
1627 them right now for two reasons:
1628 1) folding require SSA_NAME_DEF_STMTs to be correct
1629 2) we can't change function calls to builtins.
1630 So we just mark statement for later folding. We mark
1631 all new statements, instead just statements that has changed
1632 by some nontrivial substitution so even statements made
1633 foldable indirectly are updated. If this turns out to be
1634 expensive, copy_body can be told to watch for nontrivial
1635 changes. */
1636 if (id->statements_to_fold)
1637 pointer_set_insert (id->statements_to_fold, stmt);
1638
1639 /* We're duplicating a CALL_EXPR. Find any corresponding
1640 callgraph edges and update or duplicate them. */
1641 if (is_gimple_call (stmt))
1642 {
9b2a5ef7 1643 struct cgraph_edge *edge;
f618d33e 1644 int flags;
6ef5231b 1645
726a989a 1646 switch (id->transform_call_graph_edges)
e0704a46 1647 {
9b2a5ef7
RH
1648 case CB_CGE_DUPLICATE:
1649 edge = cgraph_edge (id->src_node, orig_stmt);
1650 if (edge)
0d63a740
JH
1651 {
1652 int edge_freq = edge->frequency;
1653 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1654 gimple_uid (stmt),
1655 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1656 edge->frequency, true);
1657 /* We could also just rescale the frequency, but
1658 doing so would introduce roundoff errors and make
1659 verifier unhappy. */
b8698a0f 1660 edge->frequency
0d63a740
JH
1661 = compute_call_stmt_bb_frequency (id->dst_node->decl,
1662 copy_basic_block);
1663 if (dump_file
1664 && profile_status_for_function (cfun) != PROFILE_ABSENT
1665 && (edge_freq > edge->frequency + 10
1666 || edge_freq < edge->frequency - 10))
1667 {
1668 fprintf (dump_file, "Edge frequency estimated by "
1669 "cgraph %i diverge from inliner's estimate %i\n",
1670 edge_freq,
1671 edge->frequency);
1672 fprintf (dump_file,
1673 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1674 bb->index,
1675 bb->frequency,
1676 copy_basic_block->frequency);
1677 }
8132a837 1678 stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
0d63a740 1679 }
9b2a5ef7
RH
1680 break;
1681
1682 case CB_CGE_MOVE_CLONES:
1683 cgraph_set_call_stmt_including_clones (id->dst_node,
1684 orig_stmt, stmt);
1685 edge = cgraph_edge (id->dst_node, stmt);
1686 break;
1687
1688 case CB_CGE_MOVE:
1689 edge = cgraph_edge (id->dst_node, orig_stmt);
1690 if (edge)
1691 cgraph_set_call_stmt (edge, stmt);
1692 break;
1693
1694 default:
1695 gcc_unreachable ();
110cfe1c 1696 }
f618d33e 1697
9b2a5ef7
RH
1698 /* Constant propagation on argument done during inlining
1699 may create new direct call. Produce an edge for it. */
b8698a0f 1700 if ((!edge
e33c6cd6 1701 || (edge->indirect_inlining_edge
9b2a5ef7 1702 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
9b2a5ef7
RH
1703 && (fn = gimple_call_fndecl (stmt)) != NULL)
1704 {
1705 struct cgraph_node *dest = cgraph_node (fn);
1706
1707 /* We have missing edge in the callgraph. This can happen
1708 when previous inlining turned an indirect call into a
0e3776db
JH
1709 direct call by constant propagating arguments or we are
1710 producing dead clone (for further clonning). In all
9b2a5ef7
RH
1711 other cases we hit a bug (incorrect node sharing is the
1712 most common reason for missing edges). */
0e3776db 1713 gcc_assert (dest->needed || !dest->analyzed
bd3cdcc0 1714 || dest->address_taken
0e3776db 1715 || !id->src_node->analyzed);
9b2a5ef7
RH
1716 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1717 cgraph_create_edge_including_clones
47cb0d7d 1718 (id->dst_node, dest, orig_stmt, stmt, bb->count,
b8698a0f 1719 compute_call_stmt_bb_frequency (id->dst_node->decl,
0d63a740 1720 copy_basic_block),
9b2a5ef7
RH
1721 bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
1722 else
1723 cgraph_create_edge (id->dst_node, dest, stmt,
47cb0d7d
JH
1724 bb->count,
1725 compute_call_stmt_bb_frequency
1726 (id->dst_node->decl, copy_basic_block),
9b2a5ef7
RH
1727 bb->loop_depth)->inline_failed
1728 = CIF_ORIGINALLY_INDIRECT_CALL;
1729 if (dump_file)
1730 {
1731 fprintf (dump_file, "Created new direct edge to %s",
1732 cgraph_node_name (dest));
1733 }
1734 }
9187e02d 1735
f618d33e 1736 flags = gimple_call_flags (stmt);
f618d33e
MJ
1737 if (flags & ECF_MAY_BE_ALLOCA)
1738 cfun->calls_alloca = true;
1739 if (flags & ECF_RETURNS_TWICE)
1740 cfun->calls_setjmp = true;
726a989a 1741 }
e21aff8a 1742
1d65f45c
RH
1743 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1744 id->eh_map, id->eh_lp_nr);
726a989a 1745
b5b8b0ac 1746 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
726a989a
RB
1747 {
1748 ssa_op_iter i;
1749 tree def;
1750
1751 find_new_referenced_vars (gsi_stmt (copy_gsi));
1752 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1753 if (TREE_CODE (def) == SSA_NAME)
1754 SSA_NAME_DEF_STMT (def) = stmt;
1755 }
1756
1757 gsi_next (&copy_gsi);
e21aff8a 1758 }
c2a4718a 1759 while (!gsi_end_p (copy_gsi));
726a989a
RB
1760
1761 copy_gsi = gsi_last_bb (copy_basic_block);
e21aff8a 1762 }
726a989a 1763
e21aff8a
SB
1764 return copy_basic_block;
1765}
1766
110cfe1c
JH
1767/* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1768 form is quite easy, since dominator relationship for old basic blocks does
1769 not change.
1770
1771 There is however exception where inlining might change dominator relation
1772 across EH edges from basic block within inlined functions destinating
5305a4cb 1773 to landing pads in function we inline into.
110cfe1c 1774
e9705dc5
AO
1775 The function fills in PHI_RESULTs of such PHI nodes if they refer
1776 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1777 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1778 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1779 set, and this means that there will be no overlapping live ranges
110cfe1c
JH
1780 for the underlying symbol.
1781
1782 This might change in future if we allow redirecting of EH edges and
1783 we might want to change way build CFG pre-inlining to include
1784 all the possible edges then. */
1785static void
e9705dc5
AO
1786update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1787 bool can_throw, bool nonlocal_goto)
110cfe1c
JH
1788{
1789 edge e;
1790 edge_iterator ei;
1791
1792 FOR_EACH_EDGE (e, ei, bb->succs)
1793 if (!e->dest->aux
1794 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1795 {
726a989a
RB
1796 gimple phi;
1797 gimple_stmt_iterator si;
110cfe1c 1798
e9705dc5
AO
1799 if (!nonlocal_goto)
1800 gcc_assert (e->flags & EDGE_EH);
726a989a 1801
e9705dc5
AO
1802 if (!can_throw)
1803 gcc_assert (!(e->flags & EDGE_EH));
726a989a
RB
1804
1805 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
110cfe1c 1806 {
e9705dc5
AO
1807 edge re;
1808
726a989a
RB
1809 phi = gsi_stmt (si);
1810
e9705dc5
AO
1811 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1812 gcc_assert (!e->dest->aux);
1813
496a4ef5
JH
1814 gcc_assert ((e->flags & EDGE_EH)
1815 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
e9705dc5
AO
1816
1817 if (!is_gimple_reg (PHI_RESULT (phi)))
1818 {
726a989a 1819 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
e9705dc5
AO
1820 continue;
1821 }
1822
1823 re = find_edge (ret_bb, e->dest);
1432b19f 1824 gcc_assert (re);
e9705dc5
AO
1825 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1826 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1827
1828 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1829 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
110cfe1c
JH
1830 }
1831 }
1832}
1833
726a989a 1834
128a79fb
KH
1835/* Copy edges from BB into its copy constructed earlier, scale profile
1836 accordingly. Edges will be taken care of later. Assume aux
1837 pointers to point to the copies of each BB. */
726a989a 1838
e21aff8a 1839static void
0178d644 1840copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
e21aff8a 1841{
cceb1885 1842 basic_block new_bb = (basic_block) bb->aux;
e21aff8a
SB
1843 edge_iterator ei;
1844 edge old_edge;
726a989a 1845 gimple_stmt_iterator si;
e21aff8a
SB
1846 int flags;
1847
1848 /* Use the indices from the original blocks to create edges for the
1849 new ones. */
1850 FOR_EACH_EDGE (old_edge, ei, bb->succs)
e0704a46
JH
1851 if (!(old_edge->flags & EDGE_EH))
1852 {
82d6e6fc 1853 edge new_edge;
e21aff8a 1854
e0704a46 1855 flags = old_edge->flags;
e21aff8a 1856
e0704a46
JH
1857 /* Return edges do get a FALLTHRU flag when the get inlined. */
1858 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1859 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1860 flags |= EDGE_FALLTHRU;
82d6e6fc
KG
1861 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1862 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1863 new_edge->probability = old_edge->probability;
e0704a46 1864 }
e21aff8a
SB
1865
1866 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1867 return;
1868
726a989a 1869 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
e21aff8a 1870 {
726a989a 1871 gimple copy_stmt;
e9705dc5 1872 bool can_throw, nonlocal_goto;
e21aff8a 1873
726a989a 1874 copy_stmt = gsi_stmt (si);
b5b8b0ac
AO
1875 if (!is_gimple_debug (copy_stmt))
1876 {
1877 update_stmt (copy_stmt);
1878 if (gimple_in_ssa_p (cfun))
1879 mark_symbols_for_renaming (copy_stmt);
1880 }
726a989a 1881
e21aff8a 1882 /* Do this before the possible split_block. */
726a989a 1883 gsi_next (&si);
e21aff8a
SB
1884
1885 /* If this tree could throw an exception, there are two
1886 cases where we need to add abnormal edge(s): the
1887 tree wasn't in a region and there is a "current
1888 region" in the caller; or the original tree had
1889 EH edges. In both cases split the block after the tree,
1890 and add abnormal edge(s) as needed; we need both
1891 those from the callee and the caller.
1892 We check whether the copy can throw, because the const
1893 propagation can change an INDIRECT_REF which throws
1894 into a COMPONENT_REF which doesn't. If the copy
1895 can throw, the original could also throw. */
726a989a
RB
1896 can_throw = stmt_can_throw_internal (copy_stmt);
1897 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
e9705dc5
AO
1898
1899 if (can_throw || nonlocal_goto)
e21aff8a 1900 {
726a989a 1901 if (!gsi_end_p (si))
e21aff8a
SB
1902 /* Note that bb's predecessor edges aren't necessarily
1903 right at this point; split_block doesn't care. */
1904 {
1905 edge e = split_block (new_bb, copy_stmt);
110cfe1c 1906
e21aff8a 1907 new_bb = e->dest;
110cfe1c 1908 new_bb->aux = e->src->aux;
726a989a 1909 si = gsi_start_bb (new_bb);
e21aff8a 1910 }
e9705dc5 1911 }
e21aff8a 1912
1d65f45c
RH
1913 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1914 make_eh_dispatch_edges (copy_stmt);
1915 else if (can_throw)
e9705dc5 1916 make_eh_edges (copy_stmt);
110cfe1c 1917
e9705dc5 1918 if (nonlocal_goto)
726a989a 1919 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
e9705dc5
AO
1920
1921 if ((can_throw || nonlocal_goto)
1922 && gimple_in_ssa_p (cfun))
726a989a 1923 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
e9705dc5 1924 can_throw, nonlocal_goto);
110cfe1c
JH
1925 }
1926}
1927
1928/* Copy the PHIs. All blocks and edges are copied, some blocks
1929 was possibly split and new outgoing EH edges inserted.
1930 BB points to the block of original function and AUX pointers links
1931 the original and newly copied blocks. */
1932
1933static void
1934copy_phis_for_bb (basic_block bb, copy_body_data *id)
1935{
3d9a9f94 1936 basic_block const new_bb = (basic_block) bb->aux;
110cfe1c 1937 edge_iterator ei;
726a989a
RB
1938 gimple phi;
1939 gimple_stmt_iterator si;
110cfe1c 1940
726a989a 1941 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
110cfe1c 1942 {
726a989a
RB
1943 tree res, new_res;
1944 gimple new_phi;
110cfe1c
JH
1945 edge new_edge;
1946
726a989a
RB
1947 phi = gsi_stmt (si);
1948 res = PHI_RESULT (phi);
1949 new_res = res;
110cfe1c
JH
1950 if (is_gimple_reg (res))
1951 {
726a989a 1952 walk_tree (&new_res, copy_tree_body_r, id, NULL);
110cfe1c
JH
1953 SSA_NAME_DEF_STMT (new_res)
1954 = new_phi = create_phi_node (new_res, new_bb);
1955 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1956 {
726a989a
RB
1957 edge const old_edge
1958 = find_edge ((basic_block) new_edge->src->aux, bb);
110cfe1c
JH
1959 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1960 tree new_arg = arg;
726a989a
RB
1961 tree block = id->block;
1962 id->block = NULL_TREE;
1963 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1964 id->block = block;
110cfe1c 1965 gcc_assert (new_arg);
36b6e793
JJ
1966 /* With return slot optimization we can end up with
1967 non-gimple (foo *)&this->m, fix that here. */
1968 if (TREE_CODE (new_arg) != SSA_NAME
1969 && TREE_CODE (new_arg) != FUNCTION_DECL
1970 && !is_gimple_val (new_arg))
1971 {
726a989a
RB
1972 gimple_seq stmts = NULL;
1973 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1974 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
36b6e793 1975 }
b8698a0f 1976 add_phi_arg (new_phi, new_arg, new_edge,
f5045c96 1977 gimple_phi_arg_location_from_edge (phi, old_edge));
110cfe1c 1978 }
e21aff8a
SB
1979 }
1980 }
1981}
1982
726a989a 1983
e21aff8a 1984/* Wrapper for remap_decl so it can be used as a callback. */
726a989a 1985
e21aff8a
SB
1986static tree
1987remap_decl_1 (tree decl, void *data)
1988{
1b369fae 1989 return remap_decl (decl, (copy_body_data *) data);
e21aff8a
SB
1990}
1991
110cfe1c
JH
1992/* Build struct function and associated datastructures for the new clone
1993 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1994
1995static void
0d63a740 1996initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
110cfe1c 1997{
110cfe1c 1998 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
0d63a740 1999 gcov_type count_scale;
110cfe1c
JH
2000
2001 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2002 count_scale = (REG_BR_PROB_BASE * count
2003 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2004 else
0d63a740 2005 count_scale = REG_BR_PROB_BASE;
110cfe1c
JH
2006
2007 /* Register specific tree functions. */
726a989a 2008 gimple_register_cfg_hooks ();
39ecc018
JH
2009
2010 /* Get clean struct function. */
2011 push_struct_function (new_fndecl);
2012
2013 /* We will rebuild these, so just sanity check that they are empty. */
2014 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2015 gcc_assert (cfun->local_decls == NULL);
2016 gcc_assert (cfun->cfg == NULL);
2017 gcc_assert (cfun->decl == new_fndecl);
2018
39ecc018
JH
2019 /* Copy items we preserve during clonning. */
2020 cfun->static_chain_decl = src_cfun->static_chain_decl;
2021 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2022 cfun->function_end_locus = src_cfun->function_end_locus;
2023 cfun->curr_properties = src_cfun->curr_properties;
2024 cfun->last_verified = src_cfun->last_verified;
39ecc018
JH
2025 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2026 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
39ecc018
JH
2027 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2028 cfun->stdarg = src_cfun->stdarg;
2029 cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
2030 cfun->after_inlining = src_cfun->after_inlining;
2031 cfun->returns_struct = src_cfun->returns_struct;
2032 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2033 cfun->after_tree_profile = src_cfun->after_tree_profile;
2034
110cfe1c
JH
2035 init_empty_tree_cfg ();
2036
0d63a740 2037 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
110cfe1c
JH
2038 ENTRY_BLOCK_PTR->count =
2039 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2040 REG_BR_PROB_BASE);
0d63a740
JH
2041 ENTRY_BLOCK_PTR->frequency
2042 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
110cfe1c
JH
2043 EXIT_BLOCK_PTR->count =
2044 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2045 REG_BR_PROB_BASE);
2046 EXIT_BLOCK_PTR->frequency =
0d63a740 2047 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
110cfe1c
JH
2048 if (src_cfun->eh)
2049 init_eh_for_function ();
2050
2051 if (src_cfun->gimple_df)
2052 {
5db9ba0c 2053 init_tree_ssa (cfun);
110cfe1c
JH
2054 cfun->gimple_df->in_ssa_p = true;
2055 init_ssa_operands ();
2056 }
2057 pop_cfun ();
2058}
2059
e21aff8a
SB
2060/* Make a copy of the body of FN so that it can be inserted inline in
2061 another function. Walks FN via CFG, returns new fndecl. */
2062
2063static tree
0d63a740 2064copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
e21aff8a
SB
2065 basic_block entry_block_map, basic_block exit_block_map)
2066{
1b369fae 2067 tree callee_fndecl = id->src_fn;
e21aff8a 2068 /* Original cfun for the callee, doesn't change. */
1b369fae 2069 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
110cfe1c 2070 struct function *cfun_to_copy;
e21aff8a
SB
2071 basic_block bb;
2072 tree new_fndecl = NULL;
0d63a740 2073 gcov_type count_scale;
110cfe1c 2074 int last;
e21aff8a 2075
1b369fae 2076 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
e21aff8a 2077 count_scale = (REG_BR_PROB_BASE * count
1b369fae 2078 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
e21aff8a 2079 else
0d63a740 2080 count_scale = REG_BR_PROB_BASE;
e21aff8a
SB
2081
2082 /* Register specific tree functions. */
726a989a 2083 gimple_register_cfg_hooks ();
e21aff8a
SB
2084
2085 /* Must have a CFG here at this point. */
2086 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2087 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2088
110cfe1c
JH
2089 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2090
e21aff8a
SB
2091 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2092 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
110cfe1c
JH
2093 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2094 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
e21aff8a 2095
e21aff8a
SB
2096 /* Duplicate any exception-handling regions. */
2097 if (cfun->eh)
1d65f45c
RH
2098 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2099 remap_decl_1, id);
726a989a 2100
e21aff8a
SB
2101 /* Use aux pointers to map the original blocks to copy. */
2102 FOR_EACH_BB_FN (bb, cfun_to_copy)
110cfe1c 2103 {
82d6e6fc
KG
2104 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2105 bb->aux = new_bb;
2106 new_bb->aux = bb;
110cfe1c
JH
2107 }
2108
7c57be85 2109 last = last_basic_block;
726a989a 2110
e21aff8a
SB
2111 /* Now that we've duplicated the blocks, duplicate their edges. */
2112 FOR_ALL_BB_FN (bb, cfun_to_copy)
e9705dc5 2113 copy_edges_for_bb (bb, count_scale, exit_block_map);
726a989a 2114
110cfe1c
JH
2115 if (gimple_in_ssa_p (cfun))
2116 FOR_ALL_BB_FN (bb, cfun_to_copy)
2117 copy_phis_for_bb (bb, id);
726a989a 2118
e21aff8a 2119 FOR_ALL_BB_FN (bb, cfun_to_copy)
110cfe1c
JH
2120 {
2121 ((basic_block)bb->aux)->aux = NULL;
2122 bb->aux = NULL;
2123 }
726a989a 2124
110cfe1c
JH
2125 /* Zero out AUX fields of newly created block during EH edge
2126 insertion. */
7c57be85 2127 for (; last < last_basic_block; last++)
110cfe1c
JH
2128 BASIC_BLOCK (last)->aux = NULL;
2129 entry_block_map->aux = NULL;
2130 exit_block_map->aux = NULL;
e21aff8a 2131
1d65f45c
RH
2132 if (id->eh_map)
2133 {
2134 pointer_map_destroy (id->eh_map);
2135 id->eh_map = NULL;
2136 }
2137
e21aff8a
SB
2138 return new_fndecl;
2139}
2140
b5b8b0ac
AO
2141/* Copy the debug STMT using ID. We deal with these statements in a
2142 special way: if any variable in their VALUE expression wasn't
2143 remapped yet, we won't remap it, because that would get decl uids
2144 out of sync, causing codegen differences between -g and -g0. If
2145 this arises, we drop the VALUE expression altogether. */
2146
2147static void
2148copy_debug_stmt (gimple stmt, copy_body_data *id)
2149{
2150 tree t, *n;
2151 struct walk_stmt_info wi;
2152
2153 t = id->block;
2154 if (gimple_block (stmt))
2155 {
2156 tree *n;
2157 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2158 if (n)
2159 t = *n;
2160 }
2161 gimple_set_block (stmt, t);
2162
2163 /* Remap all the operands in COPY. */
2164 memset (&wi, 0, sizeof (wi));
2165 wi.info = id;
2166
2167 processing_debug_stmt = 1;
2168
2169 t = gimple_debug_bind_get_var (stmt);
2170
2171 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2172 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2173 {
2174 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2175 t = *n;
2176 }
d17af147
JJ
2177 else if (TREE_CODE (t) == VAR_DECL
2178 && !TREE_STATIC (t)
2179 && gimple_in_ssa_p (cfun)
2180 && !pointer_map_contains (id->decl_map, t)
2181 && !var_ann (t))
2182 /* T is a non-localized variable. */;
b5b8b0ac
AO
2183 else
2184 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2185
2186 gimple_debug_bind_set_var (stmt, t);
2187
2188 if (gimple_debug_bind_has_value_p (stmt))
2189 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2190 remap_gimple_op_r, &wi, NULL);
2191
2192 /* Punt if any decl couldn't be remapped. */
2193 if (processing_debug_stmt < 0)
2194 gimple_debug_bind_reset_value (stmt);
2195
2196 processing_debug_stmt = 0;
2197
2198 update_stmt (stmt);
2199 if (gimple_in_ssa_p (cfun))
2200 mark_symbols_for_renaming (stmt);
2201}
2202
2203/* Process deferred debug stmts. In order to give values better odds
2204 of being successfully remapped, we delay the processing of debug
2205 stmts until all other stmts that might require remapping are
2206 processed. */
2207
2208static void
2209copy_debug_stmts (copy_body_data *id)
2210{
2211 size_t i;
2212 gimple stmt;
2213
2214 if (!id->debug_stmts)
2215 return;
2216
2217 for (i = 0; VEC_iterate (gimple, id->debug_stmts, i, stmt); i++)
2218 copy_debug_stmt (stmt, id);
2219
2220 VEC_free (gimple, heap, id->debug_stmts);
2221}
2222
f82a627c
EB
2223/* Make a copy of the body of SRC_FN so that it can be inserted inline in
2224 another function. */
2225
2226static tree
2227copy_tree_body (copy_body_data *id)
2228{
2229 tree fndecl = id->src_fn;
2230 tree body = DECL_SAVED_TREE (fndecl);
2231
2232 walk_tree (&body, copy_tree_body_r, id, NULL);
2233
2234 return body;
2235}
2236
b5b8b0ac
AO
2237/* Make a copy of the body of FN so that it can be inserted inline in
2238 another function. */
2239
e21aff8a 2240static tree
0d63a740 2241copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
e21aff8a
SB
2242 basic_block entry_block_map, basic_block exit_block_map)
2243{
1b369fae 2244 tree fndecl = id->src_fn;
e21aff8a
SB
2245 tree body;
2246
2247 /* If this body has a CFG, walk CFG and copy. */
2248 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
0d63a740 2249 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map);
b5b8b0ac 2250 copy_debug_stmts (id);
e21aff8a
SB
2251
2252 return body;
2253}
2254
04482133
AO
2255/* Return true if VALUE is an ADDR_EXPR of an automatic variable
2256 defined in function FN, or of a data member thereof. */
2257
2258static bool
2259self_inlining_addr_expr (tree value, tree fn)
2260{
2261 tree var;
2262
2263 if (TREE_CODE (value) != ADDR_EXPR)
2264 return false;
2265
2266 var = get_base_address (TREE_OPERAND (value, 0));
e21aff8a 2267
50886bf1 2268 return var && auto_var_in_fn_p (var, fn);
04482133
AO
2269}
2270
b5b8b0ac
AO
2271/* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2272 lexical block and line number information from base_stmt, if given,
2273 or from the last stmt of the block otherwise. */
2274
2275static gimple
2276insert_init_debug_bind (copy_body_data *id,
2277 basic_block bb, tree var, tree value,
2278 gimple base_stmt)
2279{
2280 gimple note;
2281 gimple_stmt_iterator gsi;
2282 tree tracked_var;
2283
2284 if (!gimple_in_ssa_p (id->src_cfun))
2285 return NULL;
2286
2287 if (!MAY_HAVE_DEBUG_STMTS)
2288 return NULL;
2289
2290 tracked_var = target_for_debug_bind (var);
2291 if (!tracked_var)
2292 return NULL;
2293
2294 if (bb)
2295 {
2296 gsi = gsi_last_bb (bb);
2297 if (!base_stmt && !gsi_end_p (gsi))
2298 base_stmt = gsi_stmt (gsi);
2299 }
2300
2301 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2302
2303 if (bb)
2304 {
2305 if (!gsi_end_p (gsi))
2306 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2307 else
2308 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2309 }
2310
2311 return note;
2312}
2313
6de9cd9a 2314static void
b5b8b0ac 2315insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
0f1961a2 2316{
0f1961a2
JH
2317 /* If VAR represents a zero-sized variable, it's possible that the
2318 assignment statement may result in no gimple statements. */
2319 if (init_stmt)
c2a4718a
JJ
2320 {
2321 gimple_stmt_iterator si = gsi_last_bb (bb);
0f1961a2 2322
bfb0b886
RG
2323 /* We can end up with init statements that store to a non-register
2324 from a rhs with a conversion. Handle that here by forcing the
2325 rhs into a temporary. gimple_regimplify_operands is not
2326 prepared to do this for us. */
b5b8b0ac
AO
2327 if (!is_gimple_debug (init_stmt)
2328 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
bfb0b886
RG
2329 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2330 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2331 {
2332 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2333 gimple_expr_type (init_stmt),
2334 gimple_assign_rhs1 (init_stmt));
2335 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2336 GSI_NEW_STMT);
2337 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2338 gimple_assign_set_rhs1 (init_stmt, rhs);
2339 }
c2a4718a
JJ
2340 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2341 gimple_regimplify_operands (init_stmt, &si);
2342 mark_symbols_for_renaming (init_stmt);
b5b8b0ac
AO
2343
2344 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2345 {
2346 tree var, def = gimple_assign_lhs (init_stmt);
2347
2348 if (TREE_CODE (def) == SSA_NAME)
2349 var = SSA_NAME_VAR (def);
2350 else
2351 var = def;
2352
2353 insert_init_debug_bind (id, bb, var, def, init_stmt);
2354 }
c2a4718a 2355 }
0f1961a2
JH
2356}
2357
2358/* Initialize parameter P with VALUE. If needed, produce init statement
2359 at the end of BB. When BB is NULL, we return init statement to be
2360 output later. */
2361static gimple
1b369fae 2362setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
e21aff8a 2363 basic_block bb, tree *vars)
6de9cd9a 2364{
0f1961a2 2365 gimple init_stmt = NULL;
6de9cd9a 2366 tree var;
f4088621 2367 tree rhs = value;
110cfe1c
JH
2368 tree def = (gimple_in_ssa_p (cfun)
2369 ? gimple_default_def (id->src_cfun, p) : NULL);
6de9cd9a 2370
f4088621
RG
2371 if (value
2372 && value != error_mark_node
2373 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
c54e3854
RG
2374 {
2375 if (fold_convertible_p (TREE_TYPE (p), value))
2376 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2377 else
2378 /* ??? For valid (GIMPLE) programs we should not end up here.
2379 Still if something has gone wrong and we end up with truly
2380 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2381 to not leak invalid GIMPLE to the following passes. */
2382 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2383 }
f4088621 2384
b5b8b0ac
AO
2385 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2386 here since the type of this decl must be visible to the calling
2387 function. */
2388 var = copy_decl_to_var (p, id);
2389
2390 /* We're actually using the newly-created var. */
2391 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2392 {
2393 get_var_ann (var);
2394 add_referenced_var (var);
2395 }
2396
2397 /* Declare this new variable. */
2398 TREE_CHAIN (var) = *vars;
2399 *vars = var;
2400
2401 /* Make gimplifier happy about this variable. */
2402 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2403
110cfe1c 2404 /* If the parameter is never assigned to, has no SSA_NAMEs created,
b5b8b0ac
AO
2405 we would not need to create a new variable here at all, if it
2406 weren't for debug info. Still, we can just use the argument
2407 value. */
6de9cd9a
DN
2408 if (TREE_READONLY (p)
2409 && !TREE_ADDRESSABLE (p)
110cfe1c
JH
2410 && value && !TREE_SIDE_EFFECTS (value)
2411 && !def)
6de9cd9a 2412 {
84936f6f
RH
2413 /* We may produce non-gimple trees by adding NOPs or introduce
2414 invalid sharing when operand is not really constant.
2415 It is not big deal to prohibit constant propagation here as
2416 we will constant propagate in DOM1 pass anyway. */
2417 if (is_gimple_min_invariant (value)
f4088621
RG
2418 && useless_type_conversion_p (TREE_TYPE (p),
2419 TREE_TYPE (value))
04482133
AO
2420 /* We have to be very careful about ADDR_EXPR. Make sure
2421 the base variable isn't a local variable of the inlined
2422 function, e.g., when doing recursive inlining, direct or
2423 mutually-recursive or whatever, which is why we don't
2424 just test whether fn == current_function_decl. */
2425 && ! self_inlining_addr_expr (value, fn))
6de9cd9a 2426 {
6de9cd9a 2427 insert_decl_map (id, p, value);
b5b8b0ac
AO
2428 insert_debug_decl_map (id, p, var);
2429 return insert_init_debug_bind (id, bb, var, value, NULL);
6de9cd9a
DN
2430 }
2431 }
2432
6de9cd9a
DN
2433 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2434 that way, when the PARM_DECL is encountered, it will be
2435 automatically replaced by the VAR_DECL. */
7c7d3047 2436 insert_decl_map (id, p, var);
6de9cd9a 2437
6de9cd9a
DN
2438 /* Even if P was TREE_READONLY, the new VAR should not be.
2439 In the original code, we would have constructed a
2440 temporary, and then the function body would have never
2441 changed the value of P. However, now, we will be
2442 constructing VAR directly. The constructor body may
2443 change its value multiple times as it is being
2444 constructed. Therefore, it must not be TREE_READONLY;
2445 the back-end assumes that TREE_READONLY variable is
2446 assigned to only once. */
2447 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2448 TREE_READONLY (var) = 0;
2449
110cfe1c
JH
2450 /* If there is no setup required and we are in SSA, take the easy route
2451 replacing all SSA names representing the function parameter by the
2452 SSA name passed to function.
2453
2454 We need to construct map for the variable anyway as it might be used
2455 in different SSA names when parameter is set in function.
2456
8454d27e
JH
2457 Do replacement at -O0 for const arguments replaced by constant.
2458 This is important for builtin_constant_p and other construct requiring
b5b8b0ac 2459 constant argument to be visible in inlined function body. */
110cfe1c 2460 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
8454d27e
JH
2461 && (optimize
2462 || (TREE_READONLY (p)
2463 && is_gimple_min_invariant (rhs)))
110cfe1c 2464 && (TREE_CODE (rhs) == SSA_NAME
9b718f81
JH
2465 || is_gimple_min_invariant (rhs))
2466 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
110cfe1c
JH
2467 {
2468 insert_decl_map (id, def, rhs);
b5b8b0ac 2469 return insert_init_debug_bind (id, bb, var, rhs, NULL);
110cfe1c
JH
2470 }
2471
f6f2da7d
JH
2472 /* If the value of argument is never used, don't care about initializing
2473 it. */
1cf5abb3 2474 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
f6f2da7d
JH
2475 {
2476 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
b5b8b0ac 2477 return insert_init_debug_bind (id, bb, var, rhs, NULL);
f6f2da7d
JH
2478 }
2479
6de9cd9a
DN
2480 /* Initialize this VAR_DECL from the equivalent argument. Convert
2481 the argument to the proper type in case it was promoted. */
2482 if (value)
2483 {
6de9cd9a 2484 if (rhs == error_mark_node)
110cfe1c 2485 {
7c7d3047 2486 insert_decl_map (id, p, var);
b5b8b0ac 2487 return insert_init_debug_bind (id, bb, var, rhs, NULL);
110cfe1c 2488 }
afe08db5 2489
73dab33b 2490 STRIP_USELESS_TYPE_CONVERSION (rhs);
6de9cd9a 2491
726a989a 2492 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
6de9cd9a 2493 keep our trees in gimple form. */
110cfe1c
JH
2494 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2495 {
2496 def = remap_ssa_name (def, id);
726a989a 2497 init_stmt = gimple_build_assign (def, rhs);
110cfe1c
JH
2498 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2499 set_default_def (var, NULL);
2500 }
2501 else
726a989a 2502 init_stmt = gimple_build_assign (var, rhs);
6de9cd9a 2503
0f1961a2 2504 if (bb && init_stmt)
b5b8b0ac 2505 insert_init_stmt (id, bb, init_stmt);
6de9cd9a 2506 }
0f1961a2 2507 return init_stmt;
6de9cd9a
DN
2508}
2509
d4e4baa9 2510/* Generate code to initialize the parameters of the function at the
726a989a 2511 top of the stack in ID from the GIMPLE_CALL STMT. */
d4e4baa9 2512
e21aff8a 2513static void
726a989a 2514initialize_inlined_parameters (copy_body_data *id, gimple stmt,
e21aff8a 2515 tree fn, basic_block bb)
d4e4baa9 2516{
d4e4baa9 2517 tree parms;
726a989a 2518 size_t i;
d4e4baa9 2519 tree p;
d436bff8 2520 tree vars = NULL_TREE;
726a989a 2521 tree static_chain = gimple_call_chain (stmt);
d4e4baa9
AO
2522
2523 /* Figure out what the parameters are. */
18c6ada9 2524 parms = DECL_ARGUMENTS (fn);
d4e4baa9 2525
d4e4baa9
AO
2526 /* Loop through the parameter declarations, replacing each with an
2527 equivalent VAR_DECL, appropriately initialized. */
726a989a
RB
2528 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2529 {
2530 tree val;
2531 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2532 setup_one_parameter (id, p, val, fn, bb, &vars);
2533 }
4838c5ee 2534
6de9cd9a
DN
2535 /* Initialize the static chain. */
2536 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
ea99e0be 2537 gcc_assert (fn != current_function_decl);
6de9cd9a
DN
2538 if (p)
2539 {
2540 /* No static chain? Seems like a bug in tree-nested.c. */
1e128c5f 2541 gcc_assert (static_chain);
4838c5ee 2542
e21aff8a 2543 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
4838c5ee
AO
2544 }
2545
e21aff8a 2546 declare_inline_vars (id->block, vars);
d4e4baa9
AO
2547}
2548
726a989a 2549
e21aff8a
SB
2550/* Declare a return variable to replace the RESULT_DECL for the
2551 function we are calling. An appropriate DECL_STMT is returned.
2552 The USE_STMT is filled to contain a use of the declaration to
2553 indicate the return value of the function.
2554
110cfe1c
JH
2555 RETURN_SLOT, if non-null is place where to store the result. It
2556 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
726a989a 2557 was the LHS of the MODIFY_EXPR to which this call is the RHS.
7740f00d 2558
0f900dfa
JJ
2559 The return value is a (possibly null) value that holds the result
2560 as seen by the caller. */
d4e4baa9 2561
d436bff8 2562static tree
0f900dfa 2563declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest)
d4e4baa9 2564{
1b369fae
RH
2565 tree callee = id->src_fn;
2566 tree caller = id->dst_fn;
7740f00d
RH
2567 tree result = DECL_RESULT (callee);
2568 tree callee_type = TREE_TYPE (result);
ea2edf88 2569 tree caller_type;
7740f00d 2570 tree var, use;
d4e4baa9 2571
ea2edf88
RG
2572 /* Handle type-mismatches in the function declaration return type
2573 vs. the call expression. */
2574 if (modify_dest)
2575 caller_type = TREE_TYPE (modify_dest);
2576 else
2577 caller_type = TREE_TYPE (TREE_TYPE (callee));
2578
d4e4baa9
AO
2579 /* We don't need to do anything for functions that don't return
2580 anything. */
7740f00d 2581 if (!result || VOID_TYPE_P (callee_type))
0f900dfa 2582 return NULL_TREE;
d4e4baa9 2583
cc77ae10 2584 /* If there was a return slot, then the return value is the
7740f00d 2585 dereferenced address of that object. */
110cfe1c 2586 if (return_slot)
7740f00d 2587 {
110cfe1c 2588 /* The front end shouldn't have used both return_slot and
7740f00d 2589 a modify expression. */
1e128c5f 2590 gcc_assert (!modify_dest);
cc77ae10 2591 if (DECL_BY_REFERENCE (result))
110cfe1c
JH
2592 {
2593 tree return_slot_addr = build_fold_addr_expr (return_slot);
2594 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2595
2596 /* We are going to construct *&return_slot and we can't do that
b8698a0f 2597 for variables believed to be not addressable.
110cfe1c
JH
2598
2599 FIXME: This check possibly can match, because values returned
2600 via return slot optimization are not believed to have address
2601 taken by alias analysis. */
2602 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2603 if (gimple_in_ssa_p (cfun))
2604 {
2605 HOST_WIDE_INT bitsize;
2606 HOST_WIDE_INT bitpos;
2607 tree offset;
2608 enum machine_mode mode;
2609 int unsignedp;
2610 int volatilep;
2611 tree base;
2612 base = get_inner_reference (return_slot, &bitsize, &bitpos,
2613 &offset,
2614 &mode, &unsignedp, &volatilep,
2615 false);
2616 if (TREE_CODE (base) == INDIRECT_REF)
2617 base = TREE_OPERAND (base, 0);
2618 if (TREE_CODE (base) == SSA_NAME)
2619 base = SSA_NAME_VAR (base);
2620 mark_sym_for_renaming (base);
2621 }
2622 var = return_slot_addr;
2623 }
cc77ae10 2624 else
110cfe1c
JH
2625 {
2626 var = return_slot;
2627 gcc_assert (TREE_CODE (var) != SSA_NAME);
b5ca517c 2628 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
110cfe1c 2629 }
0890b981
AP
2630 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2631 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2632 && !DECL_GIMPLE_REG_P (result)
22918034 2633 && DECL_P (var))
0890b981 2634 DECL_GIMPLE_REG_P (var) = 0;
7740f00d
RH
2635 use = NULL;
2636 goto done;
2637 }
2638
2639 /* All types requiring non-trivial constructors should have been handled. */
1e128c5f 2640 gcc_assert (!TREE_ADDRESSABLE (callee_type));
7740f00d
RH
2641
2642 /* Attempt to avoid creating a new temporary variable. */
110cfe1c
JH
2643 if (modify_dest
2644 && TREE_CODE (modify_dest) != SSA_NAME)
7740f00d
RH
2645 {
2646 bool use_it = false;
2647
2648 /* We can't use MODIFY_DEST if there's type promotion involved. */
f4088621 2649 if (!useless_type_conversion_p (callee_type, caller_type))
7740f00d
RH
2650 use_it = false;
2651
2652 /* ??? If we're assigning to a variable sized type, then we must
2653 reuse the destination variable, because we've no good way to
2654 create variable sized temporaries at this point. */
2655 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2656 use_it = true;
2657
2658 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2659 reuse it as the result of the call directly. Don't do this if
2660 it would promote MODIFY_DEST to addressable. */
e2f9fe42
RH
2661 else if (TREE_ADDRESSABLE (result))
2662 use_it = false;
2663 else
2664 {
2665 tree base_m = get_base_address (modify_dest);
2666
2667 /* If the base isn't a decl, then it's a pointer, and we don't
2668 know where that's going to go. */
2669 if (!DECL_P (base_m))
2670 use_it = false;
2671 else if (is_global_var (base_m))
2672 use_it = false;
0890b981
AP
2673 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2674 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2675 && !DECL_GIMPLE_REG_P (result)
2676 && DECL_GIMPLE_REG_P (base_m))
1d327c16 2677 use_it = false;
e2f9fe42
RH
2678 else if (!TREE_ADDRESSABLE (base_m))
2679 use_it = true;
2680 }
7740f00d
RH
2681
2682 if (use_it)
2683 {
2684 var = modify_dest;
2685 use = NULL;
2686 goto done;
2687 }
2688 }
2689
1e128c5f 2690 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
7740f00d 2691
c08cd4c1 2692 var = copy_result_decl_to_var (result, id);
110cfe1c
JH
2693 if (gimple_in_ssa_p (cfun))
2694 {
2695 get_var_ann (var);
2696 add_referenced_var (var);
2697 }
e21aff8a 2698
7740f00d 2699 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
cb91fab0 2700 DECL_STRUCT_FUNCTION (caller)->local_decls
7740f00d 2701 = tree_cons (NULL_TREE, var,
cb91fab0 2702 DECL_STRUCT_FUNCTION (caller)->local_decls);
7740f00d 2703
6de9cd9a 2704 /* Do not have the rest of GCC warn about this variable as it should
471854f8 2705 not be visible to the user. */
6de9cd9a 2706 TREE_NO_WARNING (var) = 1;
d4e4baa9 2707
c08cd4c1
JM
2708 declare_inline_vars (id->block, var);
2709
7740f00d
RH
2710 /* Build the use expr. If the return type of the function was
2711 promoted, convert it back to the expected type. */
2712 use = var;
f4088621 2713 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
7740f00d 2714 use = fold_convert (caller_type, var);
b8698a0f 2715
73dab33b 2716 STRIP_USELESS_TYPE_CONVERSION (use);
7740f00d 2717
c08cd4c1 2718 if (DECL_BY_REFERENCE (result))
32848948
RG
2719 {
2720 TREE_ADDRESSABLE (var) = 1;
2721 var = build_fold_addr_expr (var);
2722 }
c08cd4c1 2723
7740f00d 2724 done:
d4e4baa9
AO
2725 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2726 way, when the RESULT_DECL is encountered, it will be
2727 automatically replaced by the VAR_DECL. */
5e20bdd7 2728 insert_decl_map (id, result, var);
d4e4baa9 2729
6de9cd9a
DN
2730 /* Remember this so we can ignore it in remap_decls. */
2731 id->retvar = var;
2732
0f900dfa 2733 return use;
d4e4baa9
AO
2734}
2735
27dbd3ac
RH
2736/* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2737 to a local label. */
4838c5ee 2738
27dbd3ac
RH
2739static tree
2740has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
4838c5ee 2741{
27dbd3ac
RH
2742 tree node = *nodep;
2743 tree fn = (tree) fnp;
726a989a 2744
27dbd3ac
RH
2745 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2746 return node;
2747
2748 if (TYPE_P (node))
2749 *walk_subtrees = 0;
2750
2751 return NULL_TREE;
2752}
726a989a 2753
27dbd3ac
RH
2754/* Determine if the function can be copied. If so return NULL. If
2755 not return a string describng the reason for failure. */
2756
2757static const char *
2758copy_forbidden (struct function *fun, tree fndecl)
2759{
2760 const char *reason = fun->cannot_be_copied_reason;
2761 tree step;
2762
2763 /* Only examine the function once. */
2764 if (fun->cannot_be_copied_set)
2765 return reason;
2766
2767 /* We cannot copy a function that receives a non-local goto
2768 because we cannot remap the destination label used in the
2769 function that is performing the non-local goto. */
2770 /* ??? Actually, this should be possible, if we work at it.
2771 No doubt there's just a handful of places that simply
2772 assume it doesn't happen and don't substitute properly. */
2773 if (fun->has_nonlocal_label)
2774 {
2775 reason = G_("function %q+F can never be copied "
2776 "because it receives a non-local goto");
2777 goto fail;
2778 }
2779
2780 for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2781 {
2782 tree decl = TREE_VALUE (step);
2783
2784 if (TREE_CODE (decl) == VAR_DECL
2785 && TREE_STATIC (decl)
2786 && !DECL_EXTERNAL (decl)
2787 && DECL_INITIAL (decl)
2788 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2789 has_label_address_in_static_1,
2790 fndecl))
2791 {
2792 reason = G_("function %q+F can never be copied because it saves "
2793 "address of local label in a static variable");
2794 goto fail;
2795 }
27dbd3ac
RH
2796 }
2797
2798 fail:
2799 fun->cannot_be_copied_reason = reason;
2800 fun->cannot_be_copied_set = true;
2801 return reason;
2802}
2803
2804
2805static const char *inline_forbidden_reason;
2806
2807/* A callback for walk_gimple_seq to handle statements. Returns non-null
2808 iff a function can not be inlined. Also sets the reason why. */
c986baf6 2809
c986baf6 2810static tree
726a989a
RB
2811inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2812 struct walk_stmt_info *wip)
c986baf6 2813{
726a989a 2814 tree fn = (tree) wip->info;
f08545a8 2815 tree t;
726a989a 2816 gimple stmt = gsi_stmt (*gsi);
c986baf6 2817
726a989a 2818 switch (gimple_code (stmt))
f08545a8 2819 {
726a989a 2820 case GIMPLE_CALL:
3197c4fd
AS
2821 /* Refuse to inline alloca call unless user explicitly forced so as
2822 this may change program's memory overhead drastically when the
2823 function using alloca is called in loop. In GCC present in
2824 SPEC2000 inlining into schedule_block cause it to require 2GB of
2825 RAM instead of 256MB. */
726a989a 2826 if (gimple_alloca_call_p (stmt)
f08545a8
JH
2827 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2828 {
ddd2d57e 2829 inline_forbidden_reason
dee15844 2830 = G_("function %q+F can never be inlined because it uses "
ddd2d57e 2831 "alloca (override using the always_inline attribute)");
726a989a
RB
2832 *handled_ops_p = true;
2833 return fn;
f08545a8 2834 }
726a989a
RB
2835
2836 t = gimple_call_fndecl (stmt);
2837 if (t == NULL_TREE)
f08545a8 2838 break;
84f5e1b1 2839
f08545a8
JH
2840 /* We cannot inline functions that call setjmp. */
2841 if (setjmp_call_p (t))
2842 {
ddd2d57e 2843 inline_forbidden_reason
dee15844 2844 = G_("function %q+F can never be inlined because it uses setjmp");
726a989a
RB
2845 *handled_ops_p = true;
2846 return t;
f08545a8
JH
2847 }
2848
6de9cd9a 2849 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3197c4fd 2850 switch (DECL_FUNCTION_CODE (t))
f08545a8 2851 {
3197c4fd
AS
2852 /* We cannot inline functions that take a variable number of
2853 arguments. */
2854 case BUILT_IN_VA_START:
3197c4fd
AS
2855 case BUILT_IN_NEXT_ARG:
2856 case BUILT_IN_VA_END:
6de9cd9a 2857 inline_forbidden_reason
dee15844 2858 = G_("function %q+F can never be inlined because it "
6de9cd9a 2859 "uses variable argument lists");
726a989a
RB
2860 *handled_ops_p = true;
2861 return t;
6de9cd9a 2862
3197c4fd 2863 case BUILT_IN_LONGJMP:
6de9cd9a
DN
2864 /* We can't inline functions that call __builtin_longjmp at
2865 all. The non-local goto machinery really requires the
2866 destination be in a different function. If we allow the
2867 function calling __builtin_longjmp to be inlined into the
2868 function calling __builtin_setjmp, Things will Go Awry. */
2869 inline_forbidden_reason
dee15844 2870 = G_("function %q+F can never be inlined because "
6de9cd9a 2871 "it uses setjmp-longjmp exception handling");
726a989a
RB
2872 *handled_ops_p = true;
2873 return t;
6de9cd9a
DN
2874
2875 case BUILT_IN_NONLOCAL_GOTO:
2876 /* Similarly. */
2877 inline_forbidden_reason
dee15844 2878 = G_("function %q+F can never be inlined because "
6de9cd9a 2879 "it uses non-local goto");
726a989a
RB
2880 *handled_ops_p = true;
2881 return t;
f08545a8 2882
4b284111
JJ
2883 case BUILT_IN_RETURN:
2884 case BUILT_IN_APPLY_ARGS:
2885 /* If a __builtin_apply_args caller would be inlined,
2886 it would be saving arguments of the function it has
2887 been inlined into. Similarly __builtin_return would
2888 return from the function the inline has been inlined into. */
2889 inline_forbidden_reason
dee15844 2890 = G_("function %q+F can never be inlined because "
4b284111 2891 "it uses __builtin_return or __builtin_apply_args");
726a989a
RB
2892 *handled_ops_p = true;
2893 return t;
4b284111 2894
3197c4fd
AS
2895 default:
2896 break;
2897 }
f08545a8
JH
2898 break;
2899
726a989a
RB
2900 case GIMPLE_GOTO:
2901 t = gimple_goto_dest (stmt);
f08545a8
JH
2902
2903 /* We will not inline a function which uses computed goto. The
2904 addresses of its local labels, which may be tucked into
2905 global storage, are of course not constant across
2906 instantiations, which causes unexpected behavior. */
2907 if (TREE_CODE (t) != LABEL_DECL)
2908 {
ddd2d57e 2909 inline_forbidden_reason
dee15844 2910 = G_("function %q+F can never be inlined "
ddd2d57e 2911 "because it contains a computed goto");
726a989a
RB
2912 *handled_ops_p = true;
2913 return t;
f08545a8 2914 }
6de9cd9a 2915 break;
f08545a8 2916
f08545a8
JH
2917 default:
2918 break;
2919 }
2920
726a989a 2921 *handled_ops_p = false;
f08545a8 2922 return NULL_TREE;
84f5e1b1
RH
2923}
2924
726a989a
RB
2925/* Return true if FNDECL is a function that cannot be inlined into
2926 another one. */
2927
2928static bool
f08545a8 2929inline_forbidden_p (tree fndecl)
84f5e1b1 2930{
2092ee7d 2931 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
726a989a
RB
2932 struct walk_stmt_info wi;
2933 struct pointer_set_t *visited_nodes;
2934 basic_block bb;
2935 bool forbidden_p = false;
2936
27dbd3ac
RH
2937 /* First check for shared reasons not to copy the code. */
2938 inline_forbidden_reason = copy_forbidden (fun, fndecl);
2939 if (inline_forbidden_reason != NULL)
2940 return true;
2941
2942 /* Next, walk the statements of the function looking for
2943 constraucts we can't handle, or are non-optimal for inlining. */
726a989a
RB
2944 visited_nodes = pointer_set_create ();
2945 memset (&wi, 0, sizeof (wi));
2946 wi.info = (void *) fndecl;
2947 wi.pset = visited_nodes;
e21aff8a 2948
2092ee7d 2949 FOR_EACH_BB_FN (bb, fun)
726a989a
RB
2950 {
2951 gimple ret;
2952 gimple_seq seq = bb_seq (bb);
27dbd3ac 2953 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
726a989a
RB
2954 forbidden_p = (ret != NULL);
2955 if (forbidden_p)
27dbd3ac 2956 break;
2092ee7d
JJ
2957 }
2958
726a989a 2959 pointer_set_destroy (visited_nodes);
726a989a 2960 return forbidden_p;
84f5e1b1
RH
2961}
2962
b3c3af2f
SB
2963/* Returns nonzero if FN is a function that does not have any
2964 fundamental inline blocking properties. */
d4e4baa9 2965
27dbd3ac
RH
2966bool
2967tree_inlinable_function_p (tree fn)
d4e4baa9 2968{
b3c3af2f 2969 bool inlinable = true;
18177c7e
RG
2970 bool do_warning;
2971 tree always_inline;
d4e4baa9
AO
2972
2973 /* If we've already decided this function shouldn't be inlined,
2974 there's no need to check again. */
2975 if (DECL_UNINLINABLE (fn))
b3c3af2f 2976 return false;
d4e4baa9 2977
18177c7e
RG
2978 /* We only warn for functions declared `inline' by the user. */
2979 do_warning = (warn_inline
18177c7e 2980 && DECL_DECLARED_INLINE_P (fn)
0494626a 2981 && !DECL_NO_INLINE_WARNING_P (fn)
18177c7e
RG
2982 && !DECL_IN_SYSTEM_HEADER (fn));
2983
2984 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
2985
e90acd93 2986 if (flag_no_inline
18177c7e
RG
2987 && always_inline == NULL)
2988 {
2989 if (do_warning)
2990 warning (OPT_Winline, "function %q+F can never be inlined because it "
2991 "is suppressed using -fno-inline", fn);
2992 inlinable = false;
2993 }
2994
2995 /* Don't auto-inline anything that might not be bound within
2996 this unit of translation. */
2997 else if (!DECL_DECLARED_INLINE_P (fn)
2998 && DECL_REPLACEABLE_P (fn))
2999 inlinable = false;
3000
3001 else if (!function_attribute_inlinable_p (fn))
3002 {
3003 if (do_warning)
3004 warning (OPT_Winline, "function %q+F can never be inlined because it "
3005 "uses attributes conflicting with inlining", fn);
3006 inlinable = false;
3007 }
46c5ad27 3008
f08545a8 3009 else if (inline_forbidden_p (fn))
b3c3af2f
SB
3010 {
3011 /* See if we should warn about uninlinable functions. Previously,
3012 some of these warnings would be issued while trying to expand
3013 the function inline, but that would cause multiple warnings
3014 about functions that would for example call alloca. But since
3015 this a property of the function, just one warning is enough.
3016 As a bonus we can now give more details about the reason why a
18177c7e
RG
3017 function is not inlinable. */
3018 if (always_inline)
dee15844 3019 sorry (inline_forbidden_reason, fn);
2d327012 3020 else if (do_warning)
d2fcbf6f 3021 warning (OPT_Winline, inline_forbidden_reason, fn);
b3c3af2f
SB
3022
3023 inlinable = false;
3024 }
d4e4baa9
AO
3025
3026 /* Squirrel away the result so that we don't have to check again. */
b3c3af2f 3027 DECL_UNINLINABLE (fn) = !inlinable;
d4e4baa9 3028
b3c3af2f
SB
3029 return inlinable;
3030}
3031
e5c4f28a
RG
3032/* Estimate the cost of a memory move. Use machine dependent
3033 word size and take possible memcpy call into account. */
3034
3035int
3036estimate_move_cost (tree type)
3037{
3038 HOST_WIDE_INT size;
3039
078c3644
JH
3040 gcc_assert (!VOID_TYPE_P (type));
3041
e5c4f28a
RG
3042 size = int_size_in_bytes (type);
3043
e04ad03d 3044 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
e5c4f28a
RG
3045 /* Cost of a memcpy call, 3 arguments and the call. */
3046 return 4;
3047 else
3048 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3049}
3050
726a989a 3051/* Returns cost of operation CODE, according to WEIGHTS */
7f9bc51b 3052
726a989a 3053static int
02f0b13a
JH
3054estimate_operator_cost (enum tree_code code, eni_weights *weights,
3055 tree op1 ATTRIBUTE_UNUSED, tree op2)
6de9cd9a 3056{
726a989a 3057 switch (code)
6de9cd9a 3058 {
726a989a
RB
3059 /* These are "free" conversions, or their presumed cost
3060 is folded into other operations. */
61fcaeec 3061 case RANGE_EXPR:
1a87cf0c 3062 CASE_CONVERT:
726a989a
RB
3063 case COMPLEX_EXPR:
3064 case PAREN_EXPR:
726a989a 3065 return 0;
6de9cd9a 3066
e5c4f28a
RG
3067 /* Assign cost of 1 to usual operations.
3068 ??? We may consider mapping RTL costs to this. */
6de9cd9a 3069 case COND_EXPR:
4151978d 3070 case VEC_COND_EXPR:
6de9cd9a
DN
3071
3072 case PLUS_EXPR:
5be014d5 3073 case POINTER_PLUS_EXPR:
6de9cd9a
DN
3074 case MINUS_EXPR:
3075 case MULT_EXPR:
3076
09e881c9 3077 case ADDR_SPACE_CONVERT_EXPR:
325217ed 3078 case FIXED_CONVERT_EXPR:
6de9cd9a 3079 case FIX_TRUNC_EXPR:
6de9cd9a
DN
3080
3081 case NEGATE_EXPR:
3082 case FLOAT_EXPR:
3083 case MIN_EXPR:
3084 case MAX_EXPR:
3085 case ABS_EXPR:
3086
3087 case LSHIFT_EXPR:
3088 case RSHIFT_EXPR:
3089 case LROTATE_EXPR:
3090 case RROTATE_EXPR:
a6b46ba2
DN
3091 case VEC_LSHIFT_EXPR:
3092 case VEC_RSHIFT_EXPR:
6de9cd9a
DN
3093
3094 case BIT_IOR_EXPR:
3095 case BIT_XOR_EXPR:
3096 case BIT_AND_EXPR:
3097 case BIT_NOT_EXPR:
3098
3099 case TRUTH_ANDIF_EXPR:
3100 case TRUTH_ORIF_EXPR:
3101 case TRUTH_AND_EXPR:
3102 case TRUTH_OR_EXPR:
3103 case TRUTH_XOR_EXPR:
3104 case TRUTH_NOT_EXPR:
3105
3106 case LT_EXPR:
3107 case LE_EXPR:
3108 case GT_EXPR:
3109 case GE_EXPR:
3110 case EQ_EXPR:
3111 case NE_EXPR:
3112 case ORDERED_EXPR:
3113 case UNORDERED_EXPR:
3114
3115 case UNLT_EXPR:
3116 case UNLE_EXPR:
3117 case UNGT_EXPR:
3118 case UNGE_EXPR:
3119 case UNEQ_EXPR:
d1a7edaf 3120 case LTGT_EXPR:
6de9cd9a 3121
6de9cd9a
DN
3122 case CONJ_EXPR:
3123
3124 case PREDECREMENT_EXPR:
3125 case PREINCREMENT_EXPR:
3126 case POSTDECREMENT_EXPR:
3127 case POSTINCREMENT_EXPR:
3128
16630a2c
DN
3129 case REALIGN_LOAD_EXPR:
3130
61d3cdbb
DN
3131 case REDUC_MAX_EXPR:
3132 case REDUC_MIN_EXPR:
3133 case REDUC_PLUS_EXPR:
20f06221 3134 case WIDEN_SUM_EXPR:
726a989a
RB
3135 case WIDEN_MULT_EXPR:
3136 case DOT_PROD_EXPR:
3137
89d67cca
DN
3138 case VEC_WIDEN_MULT_HI_EXPR:
3139 case VEC_WIDEN_MULT_LO_EXPR:
3140 case VEC_UNPACK_HI_EXPR:
3141 case VEC_UNPACK_LO_EXPR:
d9987fb4
UB
3142 case VEC_UNPACK_FLOAT_HI_EXPR:
3143 case VEC_UNPACK_FLOAT_LO_EXPR:
8115817b 3144 case VEC_PACK_TRUNC_EXPR:
89d67cca 3145 case VEC_PACK_SAT_EXPR:
d9987fb4 3146 case VEC_PACK_FIX_TRUNC_EXPR:
98b44b0e
IR
3147 case VEC_EXTRACT_EVEN_EXPR:
3148 case VEC_EXTRACT_ODD_EXPR:
3149 case VEC_INTERLEAVE_HIGH_EXPR:
3150 case VEC_INTERLEAVE_LOW_EXPR:
3151
726a989a 3152 return 1;
6de9cd9a 3153
1ea7e6ad 3154 /* Few special cases of expensive operations. This is useful
6de9cd9a
DN
3155 to avoid inlining on functions having too many of these. */
3156 case TRUNC_DIV_EXPR:
3157 case CEIL_DIV_EXPR:
3158 case FLOOR_DIV_EXPR:
3159 case ROUND_DIV_EXPR:
3160 case EXACT_DIV_EXPR:
3161 case TRUNC_MOD_EXPR:
3162 case CEIL_MOD_EXPR:
3163 case FLOOR_MOD_EXPR:
3164 case ROUND_MOD_EXPR:
3165 case RDIV_EXPR:
02f0b13a
JH
3166 if (TREE_CODE (op2) != INTEGER_CST)
3167 return weights->div_mod_cost;
3168 return 1;
726a989a
RB
3169
3170 default:
3171 /* We expect a copy assignment with no operator. */
3172 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3173 return 0;
3174 }
3175}
3176
3177
3178/* Estimate number of instructions that will be created by expanding
3179 the statements in the statement sequence STMTS.
3180 WEIGHTS contains weights attributed to various constructs. */
3181
3182static
3183int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3184{
3185 int cost;
3186 gimple_stmt_iterator gsi;
3187
3188 cost = 0;
3189 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3190 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3191
3192 return cost;
3193}
3194
3195
3196/* Estimate number of instructions that will be created by expanding STMT.
3197 WEIGHTS contains weights attributed to various constructs. */
3198
3199int
3200estimate_num_insns (gimple stmt, eni_weights *weights)
3201{
3202 unsigned cost, i;
3203 enum gimple_code code = gimple_code (stmt);
3204 tree lhs;
02f0b13a 3205 tree rhs;
726a989a
RB
3206
3207 switch (code)
3208 {
3209 case GIMPLE_ASSIGN:
3210 /* Try to estimate the cost of assignments. We have three cases to
3211 deal with:
3212 1) Simple assignments to registers;
3213 2) Stores to things that must live in memory. This includes
3214 "normal" stores to scalars, but also assignments of large
3215 structures, or constructors of big arrays;
3216
3217 Let us look at the first two cases, assuming we have "a = b + C":
3218 <GIMPLE_ASSIGN <var_decl "a">
3219 <plus_expr <var_decl "b"> <constant C>>
3220 If "a" is a GIMPLE register, the assignment to it is free on almost
3221 any target, because "a" usually ends up in a real register. Hence
3222 the only cost of this expression comes from the PLUS_EXPR, and we
3223 can ignore the GIMPLE_ASSIGN.
3224 If "a" is not a GIMPLE register, the assignment to "a" will most
3225 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3226 of moving something into "a", which we compute using the function
3227 estimate_move_cost. */
3228 lhs = gimple_assign_lhs (stmt);
02f0b13a
JH
3229 rhs = gimple_assign_rhs1 (stmt);
3230
726a989a
RB
3231 if (is_gimple_reg (lhs))
3232 cost = 0;
3233 else
3234 cost = estimate_move_cost (TREE_TYPE (lhs));
3235
02f0b13a
JH
3236 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3237 cost += estimate_move_cost (TREE_TYPE (rhs));
3238
3239 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3240 gimple_assign_rhs1 (stmt),
3241 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3242 == GIMPLE_BINARY_RHS
3243 ? gimple_assign_rhs2 (stmt) : NULL);
726a989a
RB
3244 break;
3245
3246 case GIMPLE_COND:
02f0b13a
JH
3247 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3248 gimple_op (stmt, 0),
3249 gimple_op (stmt, 1));
726a989a
RB
3250 break;
3251
3252 case GIMPLE_SWITCH:
3253 /* Take into account cost of the switch + guess 2 conditional jumps for
b8698a0f 3254 each case label.
726a989a
RB
3255
3256 TODO: once the switch expansion logic is sufficiently separated, we can
3257 do better job on estimating cost of the switch. */
02f0b13a
JH
3258 if (weights->time_based)
3259 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3260 else
3261 cost = gimple_switch_num_labels (stmt) * 2;
6de9cd9a 3262 break;
726a989a
RB
3263
3264 case GIMPLE_CALL:
6de9cd9a 3265 {
726a989a
RB
3266 tree decl = gimple_call_fndecl (stmt);
3267 tree addr = gimple_call_fn (stmt);
8723e2fe
JH
3268 tree funtype = TREE_TYPE (addr);
3269
726a989a
RB
3270 if (POINTER_TYPE_P (funtype))
3271 funtype = TREE_TYPE (funtype);
6de9cd9a 3272
625a2efb 3273 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
726a989a 3274 cost = weights->target_builtin_call_cost;
625a2efb 3275 else
726a989a 3276 cost = weights->call_cost;
b8698a0f 3277
8c96cd51 3278 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
6de9cd9a
DN
3279 switch (DECL_FUNCTION_CODE (decl))
3280 {
d89488ec 3281 /* Builtins that expand to constants. */
6de9cd9a 3282 case BUILT_IN_CONSTANT_P:
6de9cd9a 3283 case BUILT_IN_EXPECT:
d89488ec
JH
3284 case BUILT_IN_OBJECT_SIZE:
3285 case BUILT_IN_UNREACHABLE:
3286 /* Simple register moves or loads from stack. */
3287 case BUILT_IN_RETURN_ADDRESS:
3288 case BUILT_IN_EXTRACT_RETURN_ADDR:
3289 case BUILT_IN_FROB_RETURN_ADDR:
3290 case BUILT_IN_RETURN:
3291 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
3292 case BUILT_IN_FRAME_ADDRESS:
3293 case BUILT_IN_VA_END:
3294 case BUILT_IN_STACK_SAVE:
3295 case BUILT_IN_STACK_RESTORE:
be2fd187
RG
3296 /* Exception state returns or moves registers around. */
3297 case BUILT_IN_EH_FILTER:
3298 case BUILT_IN_EH_POINTER:
3299 case BUILT_IN_EH_COPY_VALUES:
3300 return 0;
d89488ec
JH
3301
3302 /* builtins that are not expensive (that is they are most probably
3303 expanded inline into resonably simple code). */
3304 case BUILT_IN_ABS:
3305 case BUILT_IN_ALLOCA:
3306 case BUILT_IN_BSWAP32:
3307 case BUILT_IN_BSWAP64:
3308 case BUILT_IN_CLZ:
3309 case BUILT_IN_CLZIMAX:
3310 case BUILT_IN_CLZL:
3311 case BUILT_IN_CLZLL:
3312 case BUILT_IN_CTZ:
3313 case BUILT_IN_CTZIMAX:
3314 case BUILT_IN_CTZL:
3315 case BUILT_IN_CTZLL:
3316 case BUILT_IN_FFS:
3317 case BUILT_IN_FFSIMAX:
3318 case BUILT_IN_FFSL:
3319 case BUILT_IN_FFSLL:
3320 case BUILT_IN_IMAXABS:
3321 case BUILT_IN_FINITE:
3322 case BUILT_IN_FINITEF:
3323 case BUILT_IN_FINITEL:
3324 case BUILT_IN_FINITED32:
3325 case BUILT_IN_FINITED64:
3326 case BUILT_IN_FINITED128:
3327 case BUILT_IN_FPCLASSIFY:
3328 case BUILT_IN_ISFINITE:
3329 case BUILT_IN_ISINF_SIGN:
3330 case BUILT_IN_ISINF:
3331 case BUILT_IN_ISINFF:
3332 case BUILT_IN_ISINFL:
3333 case BUILT_IN_ISINFD32:
3334 case BUILT_IN_ISINFD64:
3335 case BUILT_IN_ISINFD128:
3336 case BUILT_IN_ISNAN:
3337 case BUILT_IN_ISNANF:
3338 case BUILT_IN_ISNANL:
3339 case BUILT_IN_ISNAND32:
3340 case BUILT_IN_ISNAND64:
3341 case BUILT_IN_ISNAND128:
3342 case BUILT_IN_ISNORMAL:
3343 case BUILT_IN_ISGREATER:
3344 case BUILT_IN_ISGREATEREQUAL:
3345 case BUILT_IN_ISLESS:
3346 case BUILT_IN_ISLESSEQUAL:
3347 case BUILT_IN_ISLESSGREATER:
3348 case BUILT_IN_ISUNORDERED:
3349 case BUILT_IN_VA_ARG_PACK:
3350 case BUILT_IN_VA_ARG_PACK_LEN:
3351 case BUILT_IN_VA_COPY:
3352 case BUILT_IN_TRAP:
3353 case BUILT_IN_SAVEREGS:
3354 case BUILT_IN_POPCOUNTL:
3355 case BUILT_IN_POPCOUNTLL:
3356 case BUILT_IN_POPCOUNTIMAX:
3357 case BUILT_IN_POPCOUNT:
3358 case BUILT_IN_PARITYL:
3359 case BUILT_IN_PARITYLL:
3360 case BUILT_IN_PARITYIMAX:
3361 case BUILT_IN_PARITY:
3362 case BUILT_IN_LABS:
3363 case BUILT_IN_LLABS:
3364 case BUILT_IN_PREFETCH:
3365 cost = weights->target_builtin_call_cost;
3366 break;
be2fd187 3367
6de9cd9a
DN
3368 default:
3369 break;
3370 }
e5c4f28a 3371
8723e2fe
JH
3372 if (decl)
3373 funtype = TREE_TYPE (decl);
3374
02f0b13a
JH
3375 if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3376 cost += estimate_move_cost (TREE_TYPE (funtype));
726a989a
RB
3377 /* Our cost must be kept in sync with
3378 cgraph_estimate_size_after_inlining that does use function
3379 declaration to figure out the arguments. */
8723e2fe
JH
3380 if (decl && DECL_ARGUMENTS (decl))
3381 {
3382 tree arg;
3383 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
078c3644
JH
3384 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3385 cost += estimate_move_cost (TREE_TYPE (arg));
8723e2fe
JH
3386 }
3387 else if (funtype && prototype_p (funtype))
3388 {
3389 tree t;
078c3644
JH
3390 for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3391 t = TREE_CHAIN (t))
3392 if (!VOID_TYPE_P (TREE_VALUE (t)))
3393 cost += estimate_move_cost (TREE_VALUE (t));
8723e2fe
JH
3394 }
3395 else
c7f599d0 3396 {
726a989a
RB
3397 for (i = 0; i < gimple_call_num_args (stmt); i++)
3398 {
3399 tree arg = gimple_call_arg (stmt, i);
078c3644
JH
3400 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3401 cost += estimate_move_cost (TREE_TYPE (arg));
726a989a 3402 }
c7f599d0 3403 }
e5c4f28a 3404
6de9cd9a
DN
3405 break;
3406 }
88f4034b 3407
726a989a
RB
3408 case GIMPLE_GOTO:
3409 case GIMPLE_LABEL:
3410 case GIMPLE_NOP:
3411 case GIMPLE_PHI:
3412 case GIMPLE_RETURN:
726a989a 3413 case GIMPLE_PREDICT:
b5b8b0ac 3414 case GIMPLE_DEBUG:
726a989a
RB
3415 return 0;
3416
3417 case GIMPLE_ASM:
2bd1d2c8 3418 return asm_str_count (gimple_asm_string (stmt));
726a989a 3419
1d65f45c
RH
3420 case GIMPLE_RESX:
3421 /* This is either going to be an external function call with one
3422 argument, or two register copy statements plus a goto. */
3423 return 2;
3424
3425 case GIMPLE_EH_DISPATCH:
3426 /* ??? This is going to turn into a switch statement. Ideally
3427 we'd have a look at the eh region and estimate the number of
3428 edges involved. */
3429 return 10;
3430
726a989a
RB
3431 case GIMPLE_BIND:
3432 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3433
3434 case GIMPLE_EH_FILTER:
3435 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3436
3437 case GIMPLE_CATCH:
3438 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3439
3440 case GIMPLE_TRY:
3441 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3442 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3443
3444 /* OpenMP directives are generally very expensive. */
3445
3446 case GIMPLE_OMP_RETURN:
3447 case GIMPLE_OMP_SECTIONS_SWITCH:
3448 case GIMPLE_OMP_ATOMIC_STORE:
3449 case GIMPLE_OMP_CONTINUE:
3450 /* ...except these, which are cheap. */
3451 return 0;
3452
3453 case GIMPLE_OMP_ATOMIC_LOAD:
3454 return weights->omp_cost;
3455
3456 case GIMPLE_OMP_FOR:
3457 return (weights->omp_cost
3458 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3459 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3460
3461 case GIMPLE_OMP_PARALLEL:
3462 case GIMPLE_OMP_TASK:
3463 case GIMPLE_OMP_CRITICAL:
3464 case GIMPLE_OMP_MASTER:
3465 case GIMPLE_OMP_ORDERED:
3466 case GIMPLE_OMP_SECTION:
3467 case GIMPLE_OMP_SECTIONS:
3468 case GIMPLE_OMP_SINGLE:
3469 return (weights->omp_cost
3470 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
88f4034b 3471
6de9cd9a 3472 default:
1e128c5f 3473 gcc_unreachable ();
6de9cd9a 3474 }
726a989a
RB
3475
3476 return cost;
6de9cd9a
DN
3477}
3478
726a989a
RB
3479/* Estimate number of instructions that will be created by expanding
3480 function FNDECL. WEIGHTS contains weights attributed to various
3481 constructs. */
aa4a53af 3482
6de9cd9a 3483int
726a989a 3484estimate_num_insns_fn (tree fndecl, eni_weights *weights)
6de9cd9a 3485{
726a989a
RB
3486 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3487 gimple_stmt_iterator bsi;
e21aff8a 3488 basic_block bb;
726a989a 3489 int n = 0;
e21aff8a 3490
726a989a
RB
3491 gcc_assert (my_function && my_function->cfg);
3492 FOR_EACH_BB_FN (bb, my_function)
e21aff8a 3493 {
726a989a
RB
3494 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3495 n += estimate_num_insns (gsi_stmt (bsi), weights);
e21aff8a 3496 }
e21aff8a 3497
726a989a 3498 return n;
7f9bc51b
ZD
3499}
3500
726a989a 3501
7f9bc51b
ZD
3502/* Initializes weights used by estimate_num_insns. */
3503
3504void
3505init_inline_once (void)
3506{
7f9bc51b 3507 eni_size_weights.call_cost = 1;
625a2efb 3508 eni_size_weights.target_builtin_call_cost = 1;
7f9bc51b 3509 eni_size_weights.div_mod_cost = 1;
7f9bc51b 3510 eni_size_weights.omp_cost = 40;
02f0b13a 3511 eni_size_weights.time_based = false;
7f9bc51b
ZD
3512
3513 /* Estimating time for call is difficult, since we have no idea what the
3514 called function does. In the current uses of eni_time_weights,
3515 underestimating the cost does less harm than overestimating it, so
ea2c620c 3516 we choose a rather small value here. */
7f9bc51b 3517 eni_time_weights.call_cost = 10;
625a2efb 3518 eni_time_weights.target_builtin_call_cost = 10;
7f9bc51b 3519 eni_time_weights.div_mod_cost = 10;
7f9bc51b 3520 eni_time_weights.omp_cost = 40;
02f0b13a 3521 eni_time_weights.time_based = true;
6de9cd9a
DN
3522}
3523
726a989a
RB
3524/* Estimate the number of instructions in a gimple_seq. */
3525
3526int
3527count_insns_seq (gimple_seq seq, eni_weights *weights)
3528{
3529 gimple_stmt_iterator gsi;
3530 int n = 0;
3531 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3532 n += estimate_num_insns (gsi_stmt (gsi), weights);
3533
3534 return n;
3535}
3536
3537
e21aff8a 3538/* Install new lexical TREE_BLOCK underneath 'current_block'. */
726a989a 3539
e21aff8a 3540static void
4a283090 3541prepend_lexical_block (tree current_block, tree new_block)
e21aff8a 3542{
4a283090
JH
3543 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3544 BLOCK_SUBBLOCKS (current_block) = new_block;
e21aff8a 3545 BLOCK_SUPERCONTEXT (new_block) = current_block;
e21aff8a
SB
3546}
3547
3e293154
MJ
3548/* Fetch callee declaration from the call graph edge going from NODE and
3549 associated with STMR call statement. Return NULL_TREE if not found. */
3550static tree
726a989a 3551get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3e293154
MJ
3552{
3553 struct cgraph_edge *cs;
3554
3555 cs = cgraph_edge (node, stmt);
e33c6cd6 3556 if (cs && !cs->indirect_unknown_callee)
3e293154
MJ
3557 return cs->callee->decl;
3558
3559 return NULL_TREE;
3560}
3561
726a989a 3562/* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
d4e4baa9 3563
e21aff8a 3564static bool
726a989a 3565expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
d4e4baa9 3566{
0f900dfa 3567 tree use_retvar;
d436bff8 3568 tree fn;
b5b8b0ac 3569 struct pointer_map_t *st, *dst;
110cfe1c 3570 tree return_slot;
7740f00d 3571 tree modify_dest;
6de9cd9a 3572 location_t saved_location;
e21aff8a 3573 struct cgraph_edge *cg_edge;
61a05df1 3574 cgraph_inline_failed_t reason;
e21aff8a
SB
3575 basic_block return_block;
3576 edge e;
726a989a 3577 gimple_stmt_iterator gsi, stmt_gsi;
e21aff8a 3578 bool successfully_inlined = FALSE;
4f6c2131 3579 bool purge_dead_abnormal_edges;
e21aff8a
SB
3580 tree t_step;
3581 tree var;
d4e4baa9 3582
6de9cd9a
DN
3583 /* Set input_location here so we get the right instantiation context
3584 if we call instantiate_decl from inlinable_function_p. */
3585 saved_location = input_location;
726a989a
RB
3586 if (gimple_has_location (stmt))
3587 input_location = gimple_location (stmt);
6de9cd9a 3588
d4e4baa9 3589 /* From here on, we're only interested in CALL_EXPRs. */
726a989a 3590 if (gimple_code (stmt) != GIMPLE_CALL)
6de9cd9a 3591 goto egress;
d4e4baa9
AO
3592
3593 /* First, see if we can figure out what function is being called.
3594 If we cannot, then there is no hope of inlining the function. */
726a989a 3595 fn = gimple_call_fndecl (stmt);
d4e4baa9 3596 if (!fn)
3e293154
MJ
3597 {
3598 fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3599 if (!fn)
3600 goto egress;
3601 }
d4e4baa9 3602
b58b1157 3603 /* Turn forward declarations into real ones. */
d4d1ebc1 3604 fn = cgraph_node (fn)->decl;
b58b1157 3605
726a989a 3606 /* If FN is a declaration of a function in a nested scope that was
a1a0fd4e
AO
3607 globally declared inline, we don't set its DECL_INITIAL.
3608 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3609 C++ front-end uses it for cdtors to refer to their internal
3610 declarations, that are not real functions. Fortunately those
3611 don't have trees to be saved, so we can tell by checking their
726a989a
RB
3612 gimple_body. */
3613 if (!DECL_INITIAL (fn)
a1a0fd4e 3614 && DECL_ABSTRACT_ORIGIN (fn)
39ecc018 3615 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
a1a0fd4e
AO
3616 fn = DECL_ABSTRACT_ORIGIN (fn);
3617
18c6ada9
JH
3618 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3619 Kill this check once this is fixed. */
1b369fae 3620 if (!id->dst_node->analyzed)
6de9cd9a 3621 goto egress;
18c6ada9 3622
1b369fae 3623 cg_edge = cgraph_edge (id->dst_node, stmt);
18c6ada9 3624
f9417da1
RG
3625 /* Don't inline functions with different EH personalities. */
3626 if (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3627 && DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)
3628 && (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3629 != DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)))
3630 goto egress;
3631
d4e4baa9
AO
3632 /* Don't try to inline functions that are not well-suited to
3633 inlining. */
e21aff8a 3634 if (!cgraph_inline_p (cg_edge, &reason))
a833faa5 3635 {
3e293154
MJ
3636 /* If this call was originally indirect, we do not want to emit any
3637 inlining related warnings or sorry messages because there are no
3638 guarantees regarding those. */
e33c6cd6 3639 if (cg_edge->indirect_inlining_edge)
3e293154
MJ
3640 goto egress;
3641
7fac66d4
JH
3642 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3643 /* Avoid warnings during early inline pass. */
7e8b322a 3644 && cgraph_global_info_ready)
2d327012 3645 {
61a05df1
JH
3646 sorry ("inlining failed in call to %q+F: %s", fn,
3647 cgraph_inline_failed_string (reason));
2d327012
JH
3648 sorry ("called from here");
3649 }
3650 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3651 && !DECL_IN_SYSTEM_HEADER (fn)
61a05df1 3652 && reason != CIF_UNSPECIFIED
d63db217
JH
3653 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3654 /* Avoid warnings during early inline pass. */
7e8b322a 3655 && cgraph_global_info_ready)
a833faa5 3656 {
dee15844 3657 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
61a05df1 3658 fn, cgraph_inline_failed_string (reason));
3176a0c2 3659 warning (OPT_Winline, "called from here");
a833faa5 3660 }
6de9cd9a 3661 goto egress;
a833faa5 3662 }
ea99e0be 3663 fn = cg_edge->callee->decl;
d4e4baa9 3664
18c6ada9 3665#ifdef ENABLE_CHECKING
1b369fae 3666 if (cg_edge->callee->decl != id->dst_node->decl)
e21aff8a 3667 verify_cgraph_node (cg_edge->callee);
18c6ada9
JH
3668#endif
3669
e21aff8a 3670 /* We will be inlining this callee. */
1d65f45c 3671 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
e21aff8a 3672
f9417da1
RG
3673 /* Update the callers EH personality. */
3674 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3675 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3676 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3677
726a989a 3678 /* Split the block holding the GIMPLE_CALL. */
e21aff8a
SB
3679 e = split_block (bb, stmt);
3680 bb = e->src;
3681 return_block = e->dest;
3682 remove_edge (e);
3683
4f6c2131
EB
3684 /* split_block splits after the statement; work around this by
3685 moving the call into the second block manually. Not pretty,
3686 but seems easier than doing the CFG manipulation by hand
726a989a
RB
3687 when the GIMPLE_CALL is in the last statement of BB. */
3688 stmt_gsi = gsi_last_bb (bb);
3689 gsi_remove (&stmt_gsi, false);
4f6c2131 3690
726a989a 3691 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4f6c2131
EB
3692 been the source of abnormal edges. In this case, schedule
3693 the removal of dead abnormal edges. */
726a989a
RB
3694 gsi = gsi_start_bb (return_block);
3695 if (gsi_end_p (gsi))
e21aff8a 3696 {
726a989a 3697 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4f6c2131 3698 purge_dead_abnormal_edges = true;
e21aff8a 3699 }
4f6c2131
EB
3700 else
3701 {
726a989a 3702 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4f6c2131
EB
3703 purge_dead_abnormal_edges = false;
3704 }
3705
726a989a 3706 stmt_gsi = gsi_start_bb (return_block);
742a37d5 3707
d436bff8
AH
3708 /* Build a block containing code to initialize the arguments, the
3709 actual inline expansion of the body, and a label for the return
3710 statements within the function to jump to. The type of the
3711 statement expression is the return type of the function call. */
e21aff8a
SB
3712 id->block = make_node (BLOCK);
3713 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3e2844cb 3714 BLOCK_SOURCE_LOCATION (id->block) = input_location;
4a283090 3715 prepend_lexical_block (gimple_block (stmt), id->block);
e21aff8a 3716
d4e4baa9
AO
3717 /* Local declarations will be replaced by their equivalents in this
3718 map. */
3719 st = id->decl_map;
6be42dd4 3720 id->decl_map = pointer_map_create ();
b5b8b0ac
AO
3721 dst = id->debug_map;
3722 id->debug_map = NULL;
d4e4baa9 3723
e21aff8a 3724 /* Record the function we are about to inline. */
1b369fae
RH
3725 id->src_fn = fn;
3726 id->src_node = cg_edge->callee;
110cfe1c 3727 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
726a989a 3728 id->gimple_call = stmt;
1b369fae 3729
3c8da8a5
AO
3730 gcc_assert (!id->src_cfun->after_inlining);
3731
045685a9 3732 id->entry_bb = bb;
7299cb99
JH
3733 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3734 {
3735 gimple_stmt_iterator si = gsi_last_bb (bb);
3736 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3737 NOT_TAKEN),
3738 GSI_NEW_STMT);
3739 }
726a989a 3740 initialize_inlined_parameters (id, stmt, fn, bb);
d4e4baa9 3741
ea99e0be 3742 if (DECL_INITIAL (fn))
4a283090 3743 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
acb8f212 3744
d4e4baa9
AO
3745 /* Return statements in the function body will be replaced by jumps
3746 to the RET_LABEL. */
1e128c5f
GB
3747 gcc_assert (DECL_INITIAL (fn));
3748 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
23700f65 3749
726a989a 3750 /* Find the LHS to which the result of this call is assigned. */
110cfe1c 3751 return_slot = NULL;
726a989a 3752 if (gimple_call_lhs (stmt))
81bafd36 3753 {
726a989a 3754 modify_dest = gimple_call_lhs (stmt);
81bafd36
ILT
3755
3756 /* The function which we are inlining might not return a value,
3757 in which case we should issue a warning that the function
3758 does not return a value. In that case the optimizers will
3759 see that the variable to which the value is assigned was not
3760 initialized. We do not want to issue a warning about that
3761 uninitialized variable. */
3762 if (DECL_P (modify_dest))
3763 TREE_NO_WARNING (modify_dest) = 1;
726a989a
RB
3764
3765 if (gimple_call_return_slot_opt_p (stmt))
fa47911c 3766 {
110cfe1c 3767 return_slot = modify_dest;
fa47911c
JM
3768 modify_dest = NULL;
3769 }
81bafd36 3770 }
7740f00d
RH
3771 else
3772 modify_dest = NULL;
3773
1ea193c2
ILT
3774 /* If we are inlining a call to the C++ operator new, we don't want
3775 to use type based alias analysis on the return value. Otherwise
3776 we may get confused if the compiler sees that the inlined new
3777 function returns a pointer which was just deleted. See bug
3778 33407. */
3779 if (DECL_IS_OPERATOR_NEW (fn))
3780 {
3781 return_slot = NULL;
3782 modify_dest = NULL;
3783 }
3784
d4e4baa9 3785 /* Declare the return variable for the function. */
0f900dfa 3786 use_retvar = declare_return_variable (id, return_slot, modify_dest);
1ea193c2 3787
acb8f212 3788 /* Add local vars in this inlined callee to caller. */
cb91fab0 3789 t_step = id->src_cfun->local_decls;
acb8f212
JH
3790 for (; t_step; t_step = TREE_CHAIN (t_step))
3791 {
3792 var = TREE_VALUE (t_step);
3793 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
eb50f5f4 3794 {
65401a0b 3795 if (var_ann (var) && add_referenced_var (var))
eb50f5f4
JH
3796 cfun->local_decls = tree_cons (NULL_TREE, var,
3797 cfun->local_decls);
3798 }
526d73ab
JH
3799 else if (!can_be_nonlocal (var, id))
3800 cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3801 cfun->local_decls);
acb8f212
JH
3802 }
3803
0d63a740
JH
3804 if (dump_file && (dump_flags & TDF_DETAILS))
3805 {
3806 fprintf (dump_file, "Inlining ");
b8698a0f 3807 print_generic_expr (dump_file, id->src_fn, 0);
0d63a740 3808 fprintf (dump_file, " to ");
b8698a0f 3809 print_generic_expr (dump_file, id->dst_fn, 0);
0d63a740
JH
3810 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3811 }
3812
eb50f5f4
JH
3813 /* This is it. Duplicate the callee body. Assume callee is
3814 pre-gimplified. Note that we must not alter the caller
3815 function in any way before this point, as this CALL_EXPR may be
3816 a self-referential call; if we're calling ourselves, we need to
3817 duplicate our body before altering anything. */
0d63a740
JH
3818 copy_body (id, bb->count,
3819 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
3820 bb, return_block);
eb50f5f4 3821
d086d311 3822 /* Reset the escaped solution. */
6b8ed145 3823 if (cfun->gimple_df)
d086d311 3824 pt_solution_reset (&cfun->gimple_df->escaped);
6b8ed145 3825
d4e4baa9 3826 /* Clean up. */
b5b8b0ac
AO
3827 if (id->debug_map)
3828 {
3829 pointer_map_destroy (id->debug_map);
3830 id->debug_map = dst;
3831 }
6be42dd4 3832 pointer_map_destroy (id->decl_map);
d4e4baa9
AO
3833 id->decl_map = st;
3834
5006671f
RG
3835 /* Unlink the calls virtual operands before replacing it. */
3836 unlink_stmt_vdef (stmt);
3837
84936f6f 3838 /* If the inlined function returns a result that we care about,
726a989a
RB
3839 substitute the GIMPLE_CALL with an assignment of the return
3840 variable to the LHS of the call. That is, if STMT was
3841 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3842 if (use_retvar && gimple_call_lhs (stmt))
e21aff8a 3843 {
726a989a
RB
3844 gimple old_stmt = stmt;
3845 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3846 gsi_replace (&stmt_gsi, stmt, false);
110cfe1c 3847 if (gimple_in_ssa_p (cfun))
5006671f 3848 mark_symbols_for_renaming (stmt);
726a989a 3849 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
e21aff8a 3850 }
6de9cd9a 3851 else
110cfe1c 3852 {
726a989a
RB
3853 /* Handle the case of inlining a function with no return
3854 statement, which causes the return value to become undefined. */
3855 if (gimple_call_lhs (stmt)
3856 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
110cfe1c 3857 {
726a989a
RB
3858 tree name = gimple_call_lhs (stmt);
3859 tree var = SSA_NAME_VAR (name);
110cfe1c
JH
3860 tree def = gimple_default_def (cfun, var);
3861
110cfe1c
JH
3862 if (def)
3863 {
726a989a
RB
3864 /* If the variable is used undefined, make this name
3865 undefined via a move. */
3866 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3867 gsi_replace (&stmt_gsi, stmt, true);
110cfe1c 3868 }
110cfe1c
JH
3869 else
3870 {
726a989a
RB
3871 /* Otherwise make this variable undefined. */
3872 gsi_remove (&stmt_gsi, true);
110cfe1c 3873 set_default_def (var, name);
726a989a 3874 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
110cfe1c
JH
3875 }
3876 }
3877 else
726a989a 3878 gsi_remove (&stmt_gsi, true);
110cfe1c 3879 }
d4e4baa9 3880
4f6c2131 3881 if (purge_dead_abnormal_edges)
726a989a 3882 gimple_purge_dead_abnormal_call_edges (return_block);
84936f6f 3883
e21aff8a
SB
3884 /* If the value of the new expression is ignored, that's OK. We
3885 don't warn about this for CALL_EXPRs, so we shouldn't warn about
3886 the equivalent inlined version either. */
726a989a
RB
3887 if (is_gimple_assign (stmt))
3888 {
3889 gcc_assert (gimple_assign_single_p (stmt)
1a87cf0c 3890 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
726a989a
RB
3891 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3892 }
84936f6f 3893
1eb3331e
DB
3894 /* Output the inlining info for this abstract function, since it has been
3895 inlined. If we don't do this now, we can lose the information about the
3896 variables in the function when the blocks get blown away as soon as we
3897 remove the cgraph node. */
e21aff8a 3898 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
84936f6f 3899
e72fcfe8 3900 /* Update callgraph if needed. */
e21aff8a 3901 cgraph_remove_node (cg_edge->callee);
e72fcfe8 3902
e21aff8a 3903 id->block = NULL_TREE;
e21aff8a 3904 successfully_inlined = TRUE;
742a37d5 3905
6de9cd9a
DN
3906 egress:
3907 input_location = saved_location;
e21aff8a 3908 return successfully_inlined;
d4e4baa9 3909}
6de9cd9a 3910
e21aff8a
SB
3911/* Expand call statements reachable from STMT_P.
3912 We can only have CALL_EXPRs as the "toplevel" tree code or nested
726a989a 3913 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
e21aff8a
SB
3914 unfortunately not use that function here because we need a pointer
3915 to the CALL_EXPR, not the tree itself. */
3916
3917static bool
1b369fae 3918gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
6de9cd9a 3919{
726a989a 3920 gimple_stmt_iterator gsi;
6de9cd9a 3921
726a989a 3922 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 3923 {
726a989a 3924 gimple stmt = gsi_stmt (gsi);
e21aff8a 3925
726a989a
RB
3926 if (is_gimple_call (stmt)
3927 && expand_call_inline (bb, stmt, id))
3928 return true;
6de9cd9a 3929 }
726a989a 3930
e21aff8a 3931 return false;
6de9cd9a
DN
3932}
3933
726a989a 3934
b8a00a4d
JH
3935/* Walk all basic blocks created after FIRST and try to fold every statement
3936 in the STATEMENTS pointer set. */
726a989a 3937
b8a00a4d
JH
3938static void
3939fold_marked_statements (int first, struct pointer_set_t *statements)
3940{
726a989a 3941 for (; first < n_basic_blocks; first++)
b8a00a4d
JH
3942 if (BASIC_BLOCK (first))
3943 {
726a989a
RB
3944 gimple_stmt_iterator gsi;
3945
3946 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
3947 !gsi_end_p (gsi);
3948 gsi_next (&gsi))
3949 if (pointer_set_contains (statements, gsi_stmt (gsi)))
9477eb38 3950 {
726a989a 3951 gimple old_stmt = gsi_stmt (gsi);
4b685e14 3952 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
2bafad93 3953
44e10129
MM
3954 if (old_decl && DECL_BUILT_IN (old_decl))
3955 {
3956 /* Folding builtins can create multiple instructions,
3957 we need to look at all of them. */
3958 gimple_stmt_iterator i2 = gsi;
3959 gsi_prev (&i2);
3960 if (fold_stmt (&gsi))
3961 {
3962 gimple new_stmt;
3963 if (gsi_end_p (i2))
3964 i2 = gsi_start_bb (BASIC_BLOCK (first));
3965 else
3966 gsi_next (&i2);
3967 while (1)
3968 {
3969 new_stmt = gsi_stmt (i2);
3970 update_stmt (new_stmt);
3971 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3972 new_stmt);
3973
3974 if (new_stmt == gsi_stmt (gsi))
3975 {
3976 /* It is okay to check only for the very last
3977 of these statements. If it is a throwing
3978 statement nothing will change. If it isn't
3979 this can remove EH edges. If that weren't
3980 correct then because some intermediate stmts
3981 throw, but not the last one. That would mean
3982 we'd have to split the block, which we can't
3983 here and we'd loose anyway. And as builtins
3984 probably never throw, this all
3985 is mood anyway. */
3986 if (maybe_clean_or_replace_eh_stmt (old_stmt,
3987 new_stmt))
3988 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3989 break;
3990 }
3991 gsi_next (&i2);
3992 }
3993 }
3994 }
3995 else if (fold_stmt (&gsi))
9477eb38 3996 {
726a989a
RB
3997 /* Re-read the statement from GSI as fold_stmt() may
3998 have changed it. */
3999 gimple new_stmt = gsi_stmt (gsi);
4000 update_stmt (new_stmt);
4001
4b685e14
JH
4002 if (is_gimple_call (old_stmt)
4003 || is_gimple_call (new_stmt))
44e10129
MM
4004 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4005 new_stmt);
726a989a
RB
4006
4007 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4008 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
9477eb38
JH
4009 }
4010 }
b8a00a4d
JH
4011 }
4012}
4013
1084e689
JH
4014/* Return true if BB has at least one abnormal outgoing edge. */
4015
4016static inline bool
4017has_abnormal_outgoing_edge_p (basic_block bb)
4018{
4019 edge e;
4020 edge_iterator ei;
4021
4022 FOR_EACH_EDGE (e, ei, bb->succs)
4023 if (e->flags & EDGE_ABNORMAL)
4024 return true;
4025
4026 return false;
4027}
4028
d4e4baa9
AO
4029/* Expand calls to inline functions in the body of FN. */
4030
873aa8f5 4031unsigned int
46c5ad27 4032optimize_inline_calls (tree fn)
d4e4baa9 4033{
1b369fae 4034 copy_body_data id;
e21aff8a 4035 basic_block bb;
b8a00a4d 4036 int last = n_basic_blocks;
d406b663
JJ
4037 struct gimplify_ctx gctx;
4038
c5b6f18e
MM
4039 /* There is no point in performing inlining if errors have already
4040 occurred -- and we might crash if we try to inline invalid
4041 code. */
4042 if (errorcount || sorrycount)
873aa8f5 4043 return 0;
c5b6f18e 4044
d4e4baa9
AO
4045 /* Clear out ID. */
4046 memset (&id, 0, sizeof (id));
4047
1b369fae
RH
4048 id.src_node = id.dst_node = cgraph_node (fn);
4049 id.dst_fn = fn;
d4e4baa9 4050 /* Or any functions that aren't finished yet. */
d4e4baa9 4051 if (current_function_decl)
0f900dfa 4052 id.dst_fn = current_function_decl;
1b369fae
RH
4053
4054 id.copy_decl = copy_decl_maybe_to_var;
4055 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4056 id.transform_new_cfg = false;
4057 id.transform_return_to_modify = true;
9ff420f1 4058 id.transform_lang_insert_block = NULL;
b8a00a4d 4059 id.statements_to_fold = pointer_set_create ();
1b369fae 4060
d406b663 4061 push_gimplify_context (&gctx);
d4e4baa9 4062
672987e8
ZD
4063 /* We make no attempts to keep dominance info up-to-date. */
4064 free_dominance_info (CDI_DOMINATORS);
4065 free_dominance_info (CDI_POST_DOMINATORS);
4066
726a989a
RB
4067 /* Register specific gimple functions. */
4068 gimple_register_cfg_hooks ();
4069
e21aff8a
SB
4070 /* Reach the trees by walking over the CFG, and note the
4071 enclosing basic-blocks in the call edges. */
4072 /* We walk the blocks going forward, because inlined function bodies
4073 will split id->current_basic_block, and the new blocks will
4074 follow it; we'll trudge through them, processing their CALL_EXPRs
4075 along the way. */
4076 FOR_EACH_BB (bb)
4077 gimple_expand_calls_inline (bb, &id);
d4e4baa9 4078
e21aff8a 4079 pop_gimplify_context (NULL);
6de9cd9a 4080
18c6ada9
JH
4081#ifdef ENABLE_CHECKING
4082 {
4083 struct cgraph_edge *e;
4084
1b369fae 4085 verify_cgraph_node (id.dst_node);
18c6ada9
JH
4086
4087 /* Double check that we inlined everything we are supposed to inline. */
1b369fae 4088 for (e = id.dst_node->callees; e; e = e->next_callee)
1e128c5f 4089 gcc_assert (e->inline_failed);
18c6ada9
JH
4090 }
4091#endif
b8698a0f 4092
a9eafe81
AP
4093 /* Fold the statements before compacting/renumbering the basic blocks. */
4094 fold_marked_statements (last, id.statements_to_fold);
4095 pointer_set_destroy (id.statements_to_fold);
b8698a0f 4096
b5b8b0ac
AO
4097 gcc_assert (!id.debug_stmts);
4098
a9eafe81
AP
4099 /* Renumber the (code) basic_blocks consecutively. */
4100 compact_blocks ();
4101 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4102 number_blocks (fn);
b8a00a4d 4103
873aa8f5 4104 fold_cond_expr_cond ();
078c3644
JH
4105 delete_unreachable_blocks_update_callgraph (&id);
4106#ifdef ENABLE_CHECKING
4107 verify_cgraph_node (id.dst_node);
4108#endif
726a989a 4109
110cfe1c
JH
4110 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4111 not possible yet - the IPA passes might make various functions to not
4112 throw and they don't care to proactively update local EH info. This is
4113 done later in fixup_cfg pass that also execute the verification. */
726a989a
RB
4114 return (TODO_update_ssa
4115 | TODO_cleanup_cfg
45a80bb9
JH
4116 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4117 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
d4e4baa9
AO
4118}
4119
d4e4baa9
AO
4120/* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4121
4122tree
46c5ad27 4123copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
d4e4baa9
AO
4124{
4125 enum tree_code code = TREE_CODE (*tp);
07beea0d 4126 enum tree_code_class cl = TREE_CODE_CLASS (code);
d4e4baa9
AO
4127
4128 /* We make copies of most nodes. */
07beea0d 4129 if (IS_EXPR_CODE_CLASS (cl)
d4e4baa9
AO
4130 || code == TREE_LIST
4131 || code == TREE_VEC
8843c120
DN
4132 || code == TYPE_DECL
4133 || code == OMP_CLAUSE)
d4e4baa9
AO
4134 {
4135 /* Because the chain gets clobbered when we make a copy, we save it
4136 here. */
82d6e6fc 4137 tree chain = NULL_TREE, new_tree;
07beea0d 4138
726a989a 4139 chain = TREE_CHAIN (*tp);
d4e4baa9
AO
4140
4141 /* Copy the node. */
82d6e6fc 4142 new_tree = copy_node (*tp);
6de9cd9a
DN
4143
4144 /* Propagate mudflap marked-ness. */
4145 if (flag_mudflap && mf_marked_p (*tp))
82d6e6fc 4146 mf_mark (new_tree);
6de9cd9a 4147
82d6e6fc 4148 *tp = new_tree;
d4e4baa9
AO
4149
4150 /* Now, restore the chain, if appropriate. That will cause
4151 walk_tree to walk into the chain as well. */
50674e96
DN
4152 if (code == PARM_DECL
4153 || code == TREE_LIST
aaf46ef9 4154 || code == OMP_CLAUSE)
d4e4baa9
AO
4155 TREE_CHAIN (*tp) = chain;
4156
4157 /* For now, we don't update BLOCKs when we make copies. So, we
6de9cd9a
DN
4158 have to nullify all BIND_EXPRs. */
4159 if (TREE_CODE (*tp) == BIND_EXPR)
4160 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
d4e4baa9 4161 }
4038c495
GB
4162 else if (code == CONSTRUCTOR)
4163 {
4164 /* CONSTRUCTOR nodes need special handling because
4165 we need to duplicate the vector of elements. */
82d6e6fc 4166 tree new_tree;
4038c495 4167
82d6e6fc 4168 new_tree = copy_node (*tp);
4038c495
GB
4169
4170 /* Propagate mudflap marked-ness. */
4171 if (flag_mudflap && mf_marked_p (*tp))
82d6e6fc 4172 mf_mark (new_tree);
9f63daea 4173
82d6e6fc 4174 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4038c495 4175 CONSTRUCTOR_ELTS (*tp));
82d6e6fc 4176 *tp = new_tree;
4038c495 4177 }
6615c446 4178 else if (TREE_CODE_CLASS (code) == tcc_type)
d4e4baa9 4179 *walk_subtrees = 0;
6615c446 4180 else if (TREE_CODE_CLASS (code) == tcc_declaration)
6de9cd9a 4181 *walk_subtrees = 0;
a396f8ae
GK
4182 else if (TREE_CODE_CLASS (code) == tcc_constant)
4183 *walk_subtrees = 0;
1e128c5f
GB
4184 else
4185 gcc_assert (code != STATEMENT_LIST);
d4e4baa9
AO
4186 return NULL_TREE;
4187}
4188
4189/* The SAVE_EXPR pointed to by TP is being copied. If ST contains
aa4a53af 4190 information indicating to what new SAVE_EXPR this one should be mapped,
e21aff8a
SB
4191 use that one. Otherwise, create a new node and enter it in ST. FN is
4192 the function into which the copy will be placed. */
d4e4baa9 4193
892c7e1e 4194static void
82c82743 4195remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
d4e4baa9 4196{
6be42dd4
RG
4197 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4198 tree *n;
5e20bdd7 4199 tree t;
d4e4baa9
AO
4200
4201 /* See if we already encountered this SAVE_EXPR. */
6be42dd4 4202 n = (tree *) pointer_map_contains (st, *tp);
d92b4486 4203
d4e4baa9
AO
4204 /* If we didn't already remap this SAVE_EXPR, do so now. */
4205 if (!n)
4206 {
5e20bdd7 4207 t = copy_node (*tp);
d4e4baa9 4208
d4e4baa9 4209 /* Remember this SAVE_EXPR. */
6be42dd4 4210 *pointer_map_insert (st, *tp) = t;
350ebd54 4211 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
6be42dd4 4212 *pointer_map_insert (st, t) = t;
d4e4baa9
AO
4213 }
4214 else
5e20bdd7
JZ
4215 {
4216 /* We've already walked into this SAVE_EXPR; don't do it again. */
4217 *walk_subtrees = 0;
6be42dd4 4218 t = *n;
5e20bdd7 4219 }
d4e4baa9
AO
4220
4221 /* Replace this SAVE_EXPR with the copy. */
5e20bdd7 4222 *tp = t;
d4e4baa9 4223}
d436bff8 4224
aa4a53af
RK
4225/* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4226 copies the declaration and enters it in the splay_tree in DATA (which is
1b369fae 4227 really an `copy_body_data *'). */
6de9cd9a
DN
4228
4229static tree
4230mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4231 void *data)
4232{
1b369fae 4233 copy_body_data *id = (copy_body_data *) data;
6de9cd9a
DN
4234
4235 /* Don't walk into types. */
350fae66
RK
4236 if (TYPE_P (*tp))
4237 *walk_subtrees = 0;
6de9cd9a 4238
350fae66 4239 else if (TREE_CODE (*tp) == LABEL_EXPR)
6de9cd9a 4240 {
350fae66 4241 tree decl = TREE_OPERAND (*tp, 0);
6de9cd9a 4242
350fae66 4243 /* Copy the decl and remember the copy. */
1b369fae 4244 insert_decl_map (id, decl, id->copy_decl (decl, id));
6de9cd9a
DN
4245 }
4246
4247 return NULL_TREE;
4248}
4249
19114537
EC
4250/* Perform any modifications to EXPR required when it is unsaved. Does
4251 not recurse into EXPR's subtrees. */
4252
4253static void
4254unsave_expr_1 (tree expr)
4255{
4256 switch (TREE_CODE (expr))
4257 {
4258 case TARGET_EXPR:
4259 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4260 It's OK for this to happen if it was part of a subtree that
4261 isn't immediately expanded, such as operand 2 of another
4262 TARGET_EXPR. */
4263 if (TREE_OPERAND (expr, 1))
4264 break;
4265
4266 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4267 TREE_OPERAND (expr, 3) = NULL_TREE;
4268 break;
4269
4270 default:
4271 break;
4272 }
4273}
4274
6de9cd9a
DN
4275/* Called via walk_tree when an expression is unsaved. Using the
4276 splay_tree pointed to by ST (which is really a `splay_tree'),
4277 remaps all local declarations to appropriate replacements. */
d436bff8
AH
4278
4279static tree
6de9cd9a 4280unsave_r (tree *tp, int *walk_subtrees, void *data)
d436bff8 4281{
1b369fae 4282 copy_body_data *id = (copy_body_data *) data;
6be42dd4
RG
4283 struct pointer_map_t *st = id->decl_map;
4284 tree *n;
6de9cd9a
DN
4285
4286 /* Only a local declaration (variable or label). */
4287 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4288 || TREE_CODE (*tp) == LABEL_DECL)
4289 {
4290 /* Lookup the declaration. */
6be42dd4 4291 n = (tree *) pointer_map_contains (st, *tp);
9f63daea 4292
6de9cd9a
DN
4293 /* If it's there, remap it. */
4294 if (n)
6be42dd4 4295 *tp = *n;
6de9cd9a 4296 }
aa4a53af 4297
6de9cd9a 4298 else if (TREE_CODE (*tp) == STATEMENT_LIST)
726a989a 4299 gcc_unreachable ();
6de9cd9a
DN
4300 else if (TREE_CODE (*tp) == BIND_EXPR)
4301 copy_bind_expr (tp, walk_subtrees, id);
a406865a
RG
4302 else if (TREE_CODE (*tp) == SAVE_EXPR
4303 || TREE_CODE (*tp) == TARGET_EXPR)
82c82743 4304 remap_save_expr (tp, st, walk_subtrees);
d436bff8 4305 else
6de9cd9a
DN
4306 {
4307 copy_tree_r (tp, walk_subtrees, NULL);
4308
4309 /* Do whatever unsaving is required. */
4310 unsave_expr_1 (*tp);
4311 }
4312
4313 /* Keep iterating. */
4314 return NULL_TREE;
d436bff8
AH
4315}
4316
19114537
EC
4317/* Copies everything in EXPR and replaces variables, labels
4318 and SAVE_EXPRs local to EXPR. */
6de9cd9a
DN
4319
4320tree
19114537 4321unsave_expr_now (tree expr)
6de9cd9a 4322{
1b369fae 4323 copy_body_data id;
6de9cd9a
DN
4324
4325 /* There's nothing to do for NULL_TREE. */
4326 if (expr == 0)
4327 return expr;
4328
4329 /* Set up ID. */
4330 memset (&id, 0, sizeof (id));
1b369fae
RH
4331 id.src_fn = current_function_decl;
4332 id.dst_fn = current_function_decl;
6be42dd4 4333 id.decl_map = pointer_map_create ();
b5b8b0ac 4334 id.debug_map = NULL;
6de9cd9a 4335
1b369fae
RH
4336 id.copy_decl = copy_decl_no_change;
4337 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4338 id.transform_new_cfg = false;
4339 id.transform_return_to_modify = false;
9ff420f1 4340 id.transform_lang_insert_block = NULL;
1b369fae 4341
6de9cd9a
DN
4342 /* Walk the tree once to find local labels. */
4343 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4344
4345 /* Walk the tree again, copying, remapping, and unsaving. */
4346 walk_tree (&expr, unsave_r, &id, NULL);
4347
4348 /* Clean up. */
6be42dd4 4349 pointer_map_destroy (id.decl_map);
b5b8b0ac
AO
4350 if (id.debug_map)
4351 pointer_map_destroy (id.debug_map);
6de9cd9a
DN
4352
4353 return expr;
4354}
4355
726a989a
RB
4356/* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4357 label, copies the declaration and enters it in the splay_tree in DATA (which
4358 is really a 'copy_body_data *'. */
4359
4360static tree
4361mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4362 bool *handled_ops_p ATTRIBUTE_UNUSED,
4363 struct walk_stmt_info *wi)
4364{
4365 copy_body_data *id = (copy_body_data *) wi->info;
4366 gimple stmt = gsi_stmt (*gsip);
4367
4368 if (gimple_code (stmt) == GIMPLE_LABEL)
4369 {
4370 tree decl = gimple_label_label (stmt);
4371
4372 /* Copy the decl and remember the copy. */
4373 insert_decl_map (id, decl, id->copy_decl (decl, id));
4374 }
4375
4376 return NULL_TREE;
4377}
4378
4379
4380/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4381 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4382 remaps all local declarations to appropriate replacements in gimple
4383 operands. */
4384
4385static tree
4386replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4387{
4388 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4389 copy_body_data *id = (copy_body_data *) wi->info;
4390 struct pointer_map_t *st = id->decl_map;
4391 tree *n;
4392 tree expr = *tp;
4393
4394 /* Only a local declaration (variable or label). */
4395 if ((TREE_CODE (expr) == VAR_DECL
4396 && !TREE_STATIC (expr))
4397 || TREE_CODE (expr) == LABEL_DECL)
4398 {
4399 /* Lookup the declaration. */
4400 n = (tree *) pointer_map_contains (st, expr);
4401
4402 /* If it's there, remap it. */
4403 if (n)
4404 *tp = *n;
4405 *walk_subtrees = 0;
4406 }
4407 else if (TREE_CODE (expr) == STATEMENT_LIST
4408 || TREE_CODE (expr) == BIND_EXPR
4409 || TREE_CODE (expr) == SAVE_EXPR)
4410 gcc_unreachable ();
4411 else if (TREE_CODE (expr) == TARGET_EXPR)
4412 {
4413 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4414 It's OK for this to happen if it was part of a subtree that
4415 isn't immediately expanded, such as operand 2 of another
4416 TARGET_EXPR. */
4417 if (!TREE_OPERAND (expr, 1))
4418 {
4419 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4420 TREE_OPERAND (expr, 3) = NULL_TREE;
4421 }
4422 }
4423
4424 /* Keep iterating. */
4425 return NULL_TREE;
4426}
4427
4428
4429/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4430 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4431 remaps all local declarations to appropriate replacements in gimple
4432 statements. */
4433
4434static tree
4435replace_locals_stmt (gimple_stmt_iterator *gsip,
4436 bool *handled_ops_p ATTRIBUTE_UNUSED,
4437 struct walk_stmt_info *wi)
4438{
4439 copy_body_data *id = (copy_body_data *) wi->info;
4440 gimple stmt = gsi_stmt (*gsip);
4441
4442 if (gimple_code (stmt) == GIMPLE_BIND)
4443 {
4444 tree block = gimple_bind_block (stmt);
4445
4446 if (block)
4447 {
4448 remap_block (&block, id);
4449 gimple_bind_set_block (stmt, block);
4450 }
4451
4452 /* This will remap a lot of the same decls again, but this should be
4453 harmless. */
4454 if (gimple_bind_vars (stmt))
526d73ab 4455 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
726a989a
RB
4456 }
4457
4458 /* Keep iterating. */
4459 return NULL_TREE;
4460}
4461
4462
4463/* Copies everything in SEQ and replaces variables and labels local to
4464 current_function_decl. */
4465
4466gimple_seq
4467copy_gimple_seq_and_replace_locals (gimple_seq seq)
4468{
4469 copy_body_data id;
4470 struct walk_stmt_info wi;
4471 struct pointer_set_t *visited;
4472 gimple_seq copy;
4473
4474 /* There's nothing to do for NULL_TREE. */
4475 if (seq == NULL)
4476 return seq;
4477
4478 /* Set up ID. */
4479 memset (&id, 0, sizeof (id));
4480 id.src_fn = current_function_decl;
4481 id.dst_fn = current_function_decl;
4482 id.decl_map = pointer_map_create ();
b5b8b0ac 4483 id.debug_map = NULL;
726a989a
RB
4484
4485 id.copy_decl = copy_decl_no_change;
4486 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4487 id.transform_new_cfg = false;
4488 id.transform_return_to_modify = false;
4489 id.transform_lang_insert_block = NULL;
4490
4491 /* Walk the tree once to find local labels. */
4492 memset (&wi, 0, sizeof (wi));
4493 visited = pointer_set_create ();
4494 wi.info = &id;
4495 wi.pset = visited;
4496 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4497 pointer_set_destroy (visited);
4498
4499 copy = gimple_seq_copy (seq);
4500
4501 /* Walk the copy, remapping decls. */
4502 memset (&wi, 0, sizeof (wi));
4503 wi.info = &id;
4504 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4505
4506 /* Clean up. */
4507 pointer_map_destroy (id.decl_map);
b5b8b0ac
AO
4508 if (id.debug_map)
4509 pointer_map_destroy (id.debug_map);
726a989a
RB
4510
4511 return copy;
4512}
4513
4514
6de9cd9a 4515/* Allow someone to determine if SEARCH is a child of TOP from gdb. */
aa4a53af 4516
6de9cd9a
DN
4517static tree
4518debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4519{
4520 if (*tp == data)
4521 return (tree) data;
4522 else
4523 return NULL;
4524}
4525
6de9cd9a
DN
4526bool
4527debug_find_tree (tree top, tree search)
4528{
4529 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4530}
4531
e21aff8a 4532
6de9cd9a
DN
4533/* Declare the variables created by the inliner. Add all the variables in
4534 VARS to BIND_EXPR. */
4535
4536static void
e21aff8a 4537declare_inline_vars (tree block, tree vars)
6de9cd9a 4538{
84936f6f
RH
4539 tree t;
4540 for (t = vars; t; t = TREE_CHAIN (t))
9659ce8b
JH
4541 {
4542 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4543 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
cb91fab0 4544 cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
9659ce8b 4545 }
6de9cd9a 4546
e21aff8a
SB
4547 if (block)
4548 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4549}
4550
19734dd8 4551/* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
1b369fae
RH
4552 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4553 VAR_DECL translation. */
19734dd8 4554
1b369fae
RH
4555static tree
4556copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
19734dd8 4557{
19734dd8
RL
4558 /* Don't generate debug information for the copy if we wouldn't have
4559 generated it for the copy either. */
4560 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4561 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4562
4563 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
b8698a0f 4564 declaration inspired this copy. */
19734dd8
RL
4565 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4566
4567 /* The new variable/label has no RTL, yet. */
68a976f2
RL
4568 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4569 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
19734dd8 4570 SET_DECL_RTL (copy, NULL_RTX);
b8698a0f 4571
19734dd8
RL
4572 /* These args would always appear unused, if not for this. */
4573 TREE_USED (copy) = 1;
4574
4575 /* Set the context for the new declaration. */
4576 if (!DECL_CONTEXT (decl))
4577 /* Globals stay global. */
4578 ;
1b369fae 4579 else if (DECL_CONTEXT (decl) != id->src_fn)
19734dd8
RL
4580 /* Things that weren't in the scope of the function we're inlining
4581 from aren't in the scope we're inlining to, either. */
4582 ;
4583 else if (TREE_STATIC (decl))
4584 /* Function-scoped static variables should stay in the original
4585 function. */
4586 ;
4587 else
4588 /* Ordinary automatic local variables are now in the scope of the
4589 new function. */
1b369fae 4590 DECL_CONTEXT (copy) = id->dst_fn;
19734dd8
RL
4591
4592 return copy;
4593}
4594
1b369fae
RH
4595static tree
4596copy_decl_to_var (tree decl, copy_body_data *id)
4597{
4598 tree copy, type;
4599
4600 gcc_assert (TREE_CODE (decl) == PARM_DECL
4601 || TREE_CODE (decl) == RESULT_DECL);
4602
4603 type = TREE_TYPE (decl);
4604
c2255bc4
AH
4605 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4606 VAR_DECL, DECL_NAME (decl), type);
25a6a873
RG
4607 if (DECL_PT_UID_SET_P (decl))
4608 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
1b369fae
RH
4609 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4610 TREE_READONLY (copy) = TREE_READONLY (decl);
4611 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
0890b981 4612 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
1b369fae
RH
4613
4614 return copy_decl_for_dup_finish (id, decl, copy);
4615}
4616
c08cd4c1
JM
4617/* Like copy_decl_to_var, but create a return slot object instead of a
4618 pointer variable for return by invisible reference. */
4619
4620static tree
4621copy_result_decl_to_var (tree decl, copy_body_data *id)
4622{
4623 tree copy, type;
4624
4625 gcc_assert (TREE_CODE (decl) == PARM_DECL
4626 || TREE_CODE (decl) == RESULT_DECL);
4627
4628 type = TREE_TYPE (decl);
4629 if (DECL_BY_REFERENCE (decl))
4630 type = TREE_TYPE (type);
4631
c2255bc4
AH
4632 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4633 VAR_DECL, DECL_NAME (decl), type);
25a6a873
RG
4634 if (DECL_PT_UID_SET_P (decl))
4635 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
c08cd4c1
JM
4636 TREE_READONLY (copy) = TREE_READONLY (decl);
4637 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4638 if (!DECL_BY_REFERENCE (decl))
4639 {
4640 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
0890b981 4641 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
c08cd4c1
JM
4642 }
4643
4644 return copy_decl_for_dup_finish (id, decl, copy);
4645}
4646
9ff420f1 4647tree
1b369fae
RH
4648copy_decl_no_change (tree decl, copy_body_data *id)
4649{
4650 tree copy;
4651
4652 copy = copy_node (decl);
4653
4654 /* The COPY is not abstract; it will be generated in DST_FN. */
4655 DECL_ABSTRACT (copy) = 0;
4656 lang_hooks.dup_lang_specific_decl (copy);
4657
4658 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4659 been taken; it's for internal bookkeeping in expand_goto_internal. */
4660 if (TREE_CODE (copy) == LABEL_DECL)
4661 {
4662 TREE_ADDRESSABLE (copy) = 0;
4663 LABEL_DECL_UID (copy) = -1;
4664 }
4665
4666 return copy_decl_for_dup_finish (id, decl, copy);
4667}
4668
4669static tree
4670copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4671{
4672 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4673 return copy_decl_to_var (decl, id);
4674 else
4675 return copy_decl_no_change (decl, id);
4676}
4677
19734dd8
RL
4678/* Return a copy of the function's argument tree. */
4679static tree
c6f7cfc1
JH
4680copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4681 bitmap args_to_skip, tree *vars)
19734dd8 4682{
c6f7cfc1
JH
4683 tree arg, *parg;
4684 tree new_parm = NULL;
4685 int i = 0;
19734dd8 4686
c6f7cfc1
JH
4687 parg = &new_parm;
4688
4689 for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4690 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4691 {
4692 tree new_tree = remap_decl (arg, id);
4693 lang_hooks.dup_lang_specific_decl (new_tree);
4694 *parg = new_tree;
4695 parg = &TREE_CHAIN (new_tree);
4696 }
eb50f5f4 4697 else if (!pointer_map_contains (id->decl_map, arg))
c6f7cfc1
JH
4698 {
4699 /* Make an equivalent VAR_DECL. If the argument was used
4700 as temporary variable later in function, the uses will be
4701 replaced by local variable. */
4702 tree var = copy_decl_to_var (arg, id);
4703 get_var_ann (var);
4704 add_referenced_var (var);
4705 insert_decl_map (id, arg, var);
4706 /* Declare this new variable. */
4707 TREE_CHAIN (var) = *vars;
4708 *vars = var;
4709 }
4710 return new_parm;
19734dd8
RL
4711}
4712
4713/* Return a copy of the function's static chain. */
4714static tree
1b369fae 4715copy_static_chain (tree static_chain, copy_body_data * id)
19734dd8
RL
4716{
4717 tree *chain_copy, *pvar;
4718
4719 chain_copy = &static_chain;
4720 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4721 {
82d6e6fc
KG
4722 tree new_tree = remap_decl (*pvar, id);
4723 lang_hooks.dup_lang_specific_decl (new_tree);
4724 TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4725 *pvar = new_tree;
19734dd8
RL
4726 }
4727 return static_chain;
4728}
4729
4730/* Return true if the function is allowed to be versioned.
4731 This is a guard for the versioning functionality. */
27dbd3ac 4732
19734dd8
RL
4733bool
4734tree_versionable_function_p (tree fndecl)
4735{
86631ea3
MJ
4736 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4737 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
19734dd8
RL
4738}
4739
9187e02d
JH
4740/* Delete all unreachable basic blocks and update callgraph.
4741 Doing so is somewhat nontrivial because we need to update all clones and
4742 remove inline function that become unreachable. */
9f5e9983 4743
9187e02d
JH
4744static bool
4745delete_unreachable_blocks_update_callgraph (copy_body_data *id)
9f5e9983 4746{
9187e02d
JH
4747 bool changed = false;
4748 basic_block b, next_bb;
4749
4750 find_unreachable_blocks ();
4751
4752 /* Delete all unreachable basic blocks. */
4753
4754 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4755 {
4756 next_bb = b->next_bb;
4757
4758 if (!(b->flags & BB_REACHABLE))
4759 {
4760 gimple_stmt_iterator bsi;
4761
4762 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4763 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4764 {
4765 struct cgraph_edge *e;
4766 struct cgraph_node *node;
4767
4768 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4769 {
4770 if (!e->inline_failed)
4771 cgraph_remove_node_and_inline_clones (e->callee);
4772 else
4773 cgraph_remove_edge (e);
4774 }
4775 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4776 && id->dst_node->clones)
4777 for (node = id->dst_node->clones; node != id->dst_node;)
4778 {
4779 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4780 {
4781 if (!e->inline_failed)
4782 cgraph_remove_node_and_inline_clones (e->callee);
4783 else
4784 cgraph_remove_edge (e);
4785 }
b8698a0f 4786
9187e02d
JH
4787 if (node->clones)
4788 node = node->clones;
4789 else if (node->next_sibling_clone)
4790 node = node->next_sibling_clone;
4791 else
4792 {
4793 while (node != id->dst_node && !node->next_sibling_clone)
4794 node = node->clone_of;
4795 if (node != id->dst_node)
4796 node = node->next_sibling_clone;
4797 }
4798 }
4799 }
4800 delete_basic_block (b);
4801 changed = true;
4802 }
4803 }
4804
4805 if (changed)
4806 tidy_fallthru_edges ();
9187e02d 4807 return changed;
9f5e9983
JJ
4808}
4809
08ad1d6d
JH
4810/* Update clone info after duplication. */
4811
4812static void
4813update_clone_info (copy_body_data * id)
4814{
4815 struct cgraph_node *node;
4816 if (!id->dst_node->clones)
4817 return;
4818 for (node = id->dst_node->clones; node != id->dst_node;)
4819 {
4820 /* First update replace maps to match the new body. */
4821 if (node->clone.tree_map)
4822 {
4823 unsigned int i;
4824 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4825 {
4826 struct ipa_replace_map *replace_info;
4827 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4828 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4829 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4830 }
4831 }
4832 if (node->clones)
4833 node = node->clones;
4834 else if (node->next_sibling_clone)
4835 node = node->next_sibling_clone;
4836 else
4837 {
4838 while (node != id->dst_node && !node->next_sibling_clone)
4839 node = node->clone_of;
4840 if (node != id->dst_node)
4841 node = node->next_sibling_clone;
4842 }
4843 }
4844}
4845
19734dd8
RL
4846/* Create a copy of a function's tree.
4847 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4848 of the original function and the new copied function
b8698a0f
L
4849 respectively. In case we want to replace a DECL
4850 tree with another tree while duplicating the function's
4851 body, TREE_MAP represents the mapping between these
ea99e0be
JH
4852 trees. If UPDATE_CLONES is set, the call_stmt fields
4853 of edges of clones of the function will be updated. */
19734dd8 4854void
27dbd3ac
RH
4855tree_function_versioning (tree old_decl, tree new_decl,
4856 VEC(ipa_replace_map_p,gc)* tree_map,
c6f7cfc1 4857 bool update_clones, bitmap args_to_skip)
19734dd8
RL
4858{
4859 struct cgraph_node *old_version_node;
4860 struct cgraph_node *new_version_node;
1b369fae 4861 copy_body_data id;
110cfe1c 4862 tree p;
19734dd8
RL
4863 unsigned i;
4864 struct ipa_replace_map *replace_info;
b5b8b0ac 4865 basic_block old_entry_block, bb;
0f1961a2
JH
4866 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4867
19734dd8 4868 tree t_step;
873aa8f5 4869 tree old_current_function_decl = current_function_decl;
0f1961a2 4870 tree vars = NULL_TREE;
19734dd8
RL
4871
4872 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4873 && TREE_CODE (new_decl) == FUNCTION_DECL);
4874 DECL_POSSIBLY_INLINED (old_decl) = 1;
4875
4876 old_version_node = cgraph_node (old_decl);
4877 new_version_node = cgraph_node (new_decl);
4878
a3aadcc5
JH
4879 /* Output the inlining info for this abstract function, since it has been
4880 inlined. If we don't do this now, we can lose the information about the
4881 variables in the function when the blocks get blown away as soon as we
4882 remove the cgraph node. */
4883 (*debug_hooks->outlining_inline_function) (old_decl);
4884
19734dd8
RL
4885 DECL_ARTIFICIAL (new_decl) = 1;
4886 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
f9417da1 4887 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
19734dd8 4888
3d283195
JH
4889 /* Prepare the data structures for the tree copy. */
4890 memset (&id, 0, sizeof (id));
4891
19734dd8 4892 /* Generate a new name for the new version. */
9187e02d 4893 id.statements_to_fold = pointer_set_create ();
b5b8b0ac 4894
6be42dd4 4895 id.decl_map = pointer_map_create ();
b5b8b0ac 4896 id.debug_map = NULL;
1b369fae
RH
4897 id.src_fn = old_decl;
4898 id.dst_fn = new_decl;
4899 id.src_node = old_version_node;
4900 id.dst_node = new_version_node;
4901 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
0e3776db
JH
4902 if (id.src_node->ipa_transforms_to_apply)
4903 {
4904 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
4905 unsigned int i;
4906
4907 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
4908 id.src_node->ipa_transforms_to_apply);
4909 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
4910 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
4911 VEC_index (ipa_opt_pass,
4912 old_transforms_to_apply,
4913 i));
4914 }
b8698a0f 4915
1b369fae
RH
4916 id.copy_decl = copy_decl_no_change;
4917 id.transform_call_graph_edges
4918 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
4919 id.transform_new_cfg = true;
4920 id.transform_return_to_modify = false;
9ff420f1 4921 id.transform_lang_insert_block = NULL;
1b369fae 4922
19734dd8 4923 current_function_decl = new_decl;
110cfe1c
JH
4924 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
4925 (DECL_STRUCT_FUNCTION (old_decl));
4926 initialize_cfun (new_decl, old_decl,
0d63a740 4927 old_entry_block->count);
1755aad0
RG
4928 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
4929 = id.src_cfun->gimple_df->ipa_pta;
110cfe1c 4930 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
b8698a0f 4931
19734dd8
RL
4932 /* Copy the function's static chain. */
4933 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
4934 if (p)
4935 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
4936 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
4937 &id);
b8698a0f 4938
19734dd8
RL
4939 /* If there's a tree_map, prepare for substitution. */
4940 if (tree_map)
9187e02d 4941 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
19734dd8 4942 {
0f1961a2 4943 gimple init;
9187e02d 4944 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
1b369fae 4945 if (replace_info->replace_p)
00fc2333 4946 {
657c0925 4947 tree op = replace_info->new_tree;
922f15c2
JH
4948 if (!replace_info->old_tree)
4949 {
4950 int i = replace_info->parm_num;
4951 tree parm;
4952 for (parm = DECL_ARGUMENTS (old_decl); i; parm = TREE_CHAIN (parm))
4953 i --;
4954 replace_info->old_tree = parm;
4955 }
4956
657c0925
JH
4957
4958 STRIP_NOPS (op);
4959
4960 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
4961 op = TREE_OPERAND (op, 0);
b8698a0f 4962
657c0925 4963 if (TREE_CODE (op) == ADDR_EXPR)
00fc2333 4964 {
657c0925 4965 op = TREE_OPERAND (op, 0);
00fc2333
JH
4966 while (handled_component_p (op))
4967 op = TREE_OPERAND (op, 0);
4968 if (TREE_CODE (op) == VAR_DECL)
4969 add_referenced_var (op);
4970 }
0f1961a2
JH
4971 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
4972 init = setup_one_parameter (&id, replace_info->old_tree,
4973 replace_info->new_tree, id.src_fn,
4974 NULL,
4975 &vars);
4976 if (init)
4977 VEC_safe_push (gimple, heap, init_stmts, init);
00fc2333 4978 }
19734dd8 4979 }
eb50f5f4
JH
4980 /* Copy the function's arguments. */
4981 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
4982 DECL_ARGUMENTS (new_decl) =
4983 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
4984 args_to_skip, &vars);
b8698a0f 4985
eb50f5f4 4986 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
b8698a0f 4987
eb50f5f4
JH
4988 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4989 number_blocks (id.dst_fn);
b8698a0f 4990
0f1961a2 4991 declare_inline_vars (DECL_INITIAL (new_decl), vars);
9187e02d 4992
cb91fab0 4993 if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
19734dd8 4994 /* Add local vars. */
cb91fab0 4995 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
19734dd8
RL
4996 t_step; t_step = TREE_CHAIN (t_step))
4997 {
4998 tree var = TREE_VALUE (t_step);
4999 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
cb91fab0 5000 cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
526d73ab 5001 else if (!can_be_nonlocal (var, &id))
cb91fab0 5002 cfun->local_decls =
19734dd8 5003 tree_cons (NULL_TREE, remap_decl (var, &id),
cb91fab0 5004 cfun->local_decls);
19734dd8 5005 }
b8698a0f 5006
19734dd8 5007 /* Copy the Function's body. */
0d63a740 5008 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
27dbd3ac 5009 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
b8698a0f 5010
19734dd8
RL
5011 if (DECL_RESULT (old_decl) != NULL_TREE)
5012 {
5013 tree *res_decl = &DECL_RESULT (old_decl);
5014 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
5015 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5016 }
b8698a0f 5017
19734dd8
RL
5018 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5019 number_blocks (new_decl);
5020
b5b8b0ac
AO
5021 /* We want to create the BB unconditionally, so that the addition of
5022 debug stmts doesn't affect BB count, which may in the end cause
5023 codegen differences. */
5024 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5025 while (VEC_length (gimple, init_stmts))
5026 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
08ad1d6d 5027 update_clone_info (&id);
0f1961a2 5028
27dbd3ac
RH
5029 /* Remap the nonlocal_goto_save_area, if any. */
5030 if (cfun->nonlocal_goto_save_area)
5031 {
5032 struct walk_stmt_info wi;
5033
5034 memset (&wi, 0, sizeof (wi));
5035 wi.info = &id;
5036 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5037 }
5038
19734dd8 5039 /* Clean up. */
6be42dd4 5040 pointer_map_destroy (id.decl_map);
b5b8b0ac
AO
5041 if (id.debug_map)
5042 pointer_map_destroy (id.debug_map);
5006671f
RG
5043 free_dominance_info (CDI_DOMINATORS);
5044 free_dominance_info (CDI_POST_DOMINATORS);
9187e02d
JH
5045
5046 fold_marked_statements (0, id.statements_to_fold);
5047 pointer_set_destroy (id.statements_to_fold);
5048 fold_cond_expr_cond ();
5049 delete_unreachable_blocks_update_callgraph (&id);
99b766fc
JH
5050 if (id.dst_node->analyzed)
5051 cgraph_rebuild_references ();
9187e02d
JH
5052 update_ssa (TODO_update_ssa);
5053 free_dominance_info (CDI_DOMINATORS);
5054 free_dominance_info (CDI_POST_DOMINATORS);
5055
b5b8b0ac 5056 gcc_assert (!id.debug_stmts);
0f1961a2 5057 VEC_free (gimple, heap, init_stmts);
110cfe1c 5058 pop_cfun ();
873aa8f5
JH
5059 current_function_decl = old_current_function_decl;
5060 gcc_assert (!current_function_decl
5061 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
19734dd8
RL
5062 return;
5063}
5064
f82a627c
EB
5065/* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5066 the callee and return the inlined body on success. */
5067
5068tree
5069maybe_inline_call_in_expr (tree exp)
5070{
5071 tree fn = get_callee_fndecl (exp);
5072
5073 /* We can only try to inline "const" functions. */
5074 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5075 {
5076 struct pointer_map_t *decl_map = pointer_map_create ();
5077 call_expr_arg_iterator iter;
5078 copy_body_data id;
5079 tree param, arg, t;
5080
5081 /* Remap the parameters. */
5082 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5083 param;
5084 param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
5085 *pointer_map_insert (decl_map, param) = arg;
5086
5087 memset (&id, 0, sizeof (id));
5088 id.src_fn = fn;
5089 id.dst_fn = current_function_decl;
5090 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5091 id.decl_map = decl_map;
5092
5093 id.copy_decl = copy_decl_no_change;
5094 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5095 id.transform_new_cfg = false;
5096 id.transform_return_to_modify = true;
5097 id.transform_lang_insert_block = false;
5098
5099 /* Make sure not to unshare trees behind the front-end's back
5100 since front-end specific mechanisms may rely on sharing. */
5101 id.regimplify = false;
5102 id.do_not_unshare = true;
5103
5104 /* We're not inside any EH region. */
1d65f45c 5105 id.eh_lp_nr = 0;
f82a627c
EB
5106
5107 t = copy_tree_body (&id);
5108 pointer_map_destroy (decl_map);
5109
5110 /* We can only return something suitable for use in a GENERIC
5111 expression tree. */
5112 if (TREE_CODE (t) == MODIFY_EXPR)
5113 return TREE_OPERAND (t, 1);
5114 }
5115
5116 return NULL_TREE;
5117}
5118
52dd234b
RH
5119/* Duplicate a type, fields and all. */
5120
5121tree
5122build_duplicate_type (tree type)
5123{
1b369fae 5124 struct copy_body_data id;
52dd234b
RH
5125
5126 memset (&id, 0, sizeof (id));
1b369fae
RH
5127 id.src_fn = current_function_decl;
5128 id.dst_fn = current_function_decl;
5129 id.src_cfun = cfun;
6be42dd4 5130 id.decl_map = pointer_map_create ();
b5b8b0ac 5131 id.debug_map = NULL;
4009f2e7 5132 id.copy_decl = copy_decl_no_change;
52dd234b
RH
5133
5134 type = remap_type_1 (type, &id);
5135
6be42dd4 5136 pointer_map_destroy (id.decl_map);
b5b8b0ac
AO
5137 if (id.debug_map)
5138 pointer_map_destroy (id.debug_map);
52dd234b 5139
f31c9f09
DG
5140 TYPE_CANONICAL (type) = type;
5141
52dd234b
RH
5142 return type;
5143}
ab442df7
MM
5144
5145/* Return whether it is safe to inline a function because it used different
6eb29714
XDL
5146 target specific options or call site actual types mismatch parameter types.
5147 E is the call edge to be checked. */
ab442df7 5148bool
6eb29714 5149tree_can_inline_p (struct cgraph_edge *e)
ab442df7 5150{
5779e713
MM
5151#if 0
5152 /* This causes a regression in SPEC in that it prevents a cold function from
5153 inlining a hot function. Perhaps this should only apply to functions
5154 that the user declares hot/cold/optimize explicitly. */
5155
ab442df7
MM
5156 /* Don't inline a function with a higher optimization level than the
5157 caller, or with different space constraints (hot/cold functions). */
5158 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
5159 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
5160
5161 if (caller_tree != callee_tree)
5162 {
5163 struct cl_optimization *caller_opt
5164 = TREE_OPTIMIZATION ((caller_tree)
5165 ? caller_tree
5166 : optimization_default_node);
5167
5168 struct cl_optimization *callee_opt
5169 = TREE_OPTIMIZATION ((callee_tree)
5170 ? callee_tree
5171 : optimization_default_node);
5172
5173 if ((caller_opt->optimize > callee_opt->optimize)
5174 || (caller_opt->optimize_size != callee_opt->optimize_size))
5175 return false;
5176 }
5779e713 5177#endif
8fd8a06f 5178 tree caller, callee, lhs;
6eb29714
XDL
5179
5180 caller = e->caller->decl;
5181 callee = e->callee->decl;
ab442df7 5182
f9417da1
RG
5183 /* We cannot inline a function that uses a different EH personality
5184 than the caller. */
5185 if (DECL_FUNCTION_PERSONALITY (caller)
5186 && DECL_FUNCTION_PERSONALITY (callee)
5187 && (DECL_FUNCTION_PERSONALITY (caller)
5188 != DECL_FUNCTION_PERSONALITY (callee)))
5189 {
5190 e->inline_failed = CIF_UNSPECIFIED;
5191 gimple_call_set_cannot_inline (e->call_stmt, true);
5192 return false;
5193 }
5194
ab442df7 5195 /* Allow the backend to decide if inlining is ok. */
6eb29714
XDL
5196 if (!targetm.target_option.can_inline_p (caller, callee))
5197 {
5198 e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
5199 gimple_call_set_cannot_inline (e->call_stmt, true);
d7f09764 5200 e->call_stmt_cannot_inline_p = true;
6eb29714
XDL
5201 return false;
5202 }
5203
8fd8a06f
RG
5204 /* Do not inline calls where we cannot triviall work around mismatches
5205 in argument or return types. */
d7f09764 5206 if (e->call_stmt
8fd8a06f
RG
5207 && ((DECL_RESULT (callee)
5208 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
5209 && (lhs = gimple_call_lhs (e->call_stmt)) != NULL_TREE
5210 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
5211 TREE_TYPE (lhs))
5212 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
5213 || !gimple_check_call_args (e->call_stmt)))
6eb29714
XDL
5214 {
5215 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
5216 gimple_call_set_cannot_inline (e->call_stmt, true);
d7f09764 5217 e->call_stmt_cannot_inline_p = true;
6eb29714
XDL
5218 return false;
5219 }
5220
5221 return true;
ab442df7 5222}