]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-inline.c
tree.c (WALK_SUBTREE): Call walk_tree_1.
[thirdparty/gcc.git] / gcc / tree-inline.c
CommitLineData
ac534736 1/* Tree inlining.
ebb07520
RS
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
588d3ade
AO
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
5
54a7b573 6This file is part of GCC.
588d3ade 7
54a7b573 8GCC is free software; you can redistribute it and/or modify
588d3ade 9it under the terms of the GNU General Public License as published by
9dcd6f09 10the Free Software Foundation; either version 3, or (at your option)
588d3ade
AO
11any later version.
12
54a7b573 13GCC is distributed in the hope that it will be useful,
588d3ade
AO
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
588d3ade
AO
21
22#include "config.h"
23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
69dcadff 26#include "toplev.h"
588d3ade
AO
27#include "tree.h"
28#include "tree-inline.h"
d4e4baa9
AO
29#include "rtl.h"
30#include "expr.h"
31#include "flags.h"
32#include "params.h"
33#include "input.h"
34#include "insn-config.h"
d4e4baa9
AO
35#include "varray.h"
36#include "hashtab.h"
d23c55c2 37#include "langhooks.h"
e21aff8a
SB
38#include "basic-block.h"
39#include "tree-iterator.h"
1c4a429a 40#include "cgraph.h"
ddd2d57e 41#include "intl.h"
6de9cd9a 42#include "tree-mudflap.h"
089efaa4 43#include "tree-flow.h"
18c6ada9 44#include "function.h"
e21aff8a
SB
45#include "ggc.h"
46#include "tree-flow.h"
6de9cd9a 47#include "diagnostic.h"
e21aff8a 48#include "except.h"
1eb3331e 49#include "debug.h"
e21aff8a 50#include "pointer-set.h"
19734dd8 51#include "ipa-prop.h"
6946b3f7 52#include "value-prof.h"
110cfe1c 53#include "tree-pass.h"
d4e4baa9 54
6de9cd9a
DN
55/* I'm not real happy about this, but we need to handle gimple and
56 non-gimple trees. */
eadf906f 57#include "tree-gimple.h"
588d3ade 58
1b369fae 59/* Inlining, Cloning, Versioning, Parallelization
e21aff8a
SB
60
61 Inlining: a function body is duplicated, but the PARM_DECLs are
62 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
07beea0d 63 GIMPLE_MODIFY_STMTs that store to a dedicated returned-value variable.
e21aff8a
SB
64 The duplicated eh_region info of the copy will later be appended
65 to the info for the caller; the eh_region info in copied throwing
66 statements and RESX_EXPRs is adjusted accordingly.
67
e21aff8a
SB
68 Cloning: (only in C++) We have one body for a con/de/structor, and
69 multiple function decls, each with a unique parameter list.
70 Duplicate the body, using the given splay tree; some parameters
71 will become constants (like 0 or 1).
72
1b369fae
RH
73 Versioning: a function body is duplicated and the result is a new
74 function rather than into blocks of an existing function as with
75 inlining. Some parameters will become constants.
76
77 Parallelization: a region of a function is duplicated resulting in
78 a new function. Variables may be replaced with complex expressions
79 to enable shared variable semantics.
80
e21aff8a
SB
81 All of these will simultaneously lookup any callgraph edges. If
82 we're going to inline the duplicated function body, and the given
83 function has some cloned callgraph nodes (one for each place this
84 function will be inlined) those callgraph edges will be duplicated.
1b369fae 85 If we're cloning the body, those callgraph edges will be
e21aff8a
SB
86 updated to point into the new body. (Note that the original
87 callgraph node and edge list will not be altered.)
88
89 See the CALL_EXPR handling case in copy_body_r (). */
90
588d3ade 91/* 0 if we should not perform inlining.
d92b4486
KH
92 1 if we should expand functions calls inline at the tree level.
93 2 if we should consider *all* functions to be inline
588d3ade
AO
94 candidates. */
95
96int flag_inline_trees = 0;
d4e4baa9
AO
97
98/* To Do:
99
100 o In order to make inlining-on-trees work, we pessimized
101 function-local static constants. In particular, they are now
102 always output, even when not addressed. Fix this by treating
103 function-local static constants just like global static
104 constants; the back-end already knows not to output them if they
105 are not needed.
106
107 o Provide heuristics to clamp inlining of recursive template
108 calls? */
109
7f9bc51b
ZD
110
111/* Weights that estimate_num_insns uses for heuristics in inlining. */
112
113eni_weights eni_inlining_weights;
114
115/* Weights that estimate_num_insns uses to estimate the size of the
116 produced code. */
117
118eni_weights eni_size_weights;
119
120/* Weights that estimate_num_insns uses to estimate the time necessary
121 to execute the produced code. */
122
123eni_weights eni_time_weights;
124
d4e4baa9
AO
125/* Prototypes. */
126
1b369fae
RH
127static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
128static tree copy_generic_body (copy_body_data *);
b3c3af2f 129static bool inlinable_function_p (tree);
1b369fae
RH
130static void remap_block (tree *, copy_body_data *);
131static tree remap_decls (tree, copy_body_data *);
132static void copy_bind_expr (tree *, int *, copy_body_data *);
6de9cd9a 133static tree mark_local_for_remap_r (tree *, int *, void *);
19114537 134static void unsave_expr_1 (tree);
6de9cd9a 135static tree unsave_r (tree *, int *, void *);
e21aff8a 136static void declare_inline_vars (tree, tree);
892c7e1e 137static void remap_save_expr (tree *, void *, int *);
acb8f212 138static void add_lexical_block (tree current_block, tree new_block);
1b369fae 139static tree copy_decl_to_var (tree, copy_body_data *);
c08cd4c1 140static tree copy_result_decl_to_var (tree, copy_body_data *);
1b369fae
RH
141static tree copy_decl_no_change (tree, copy_body_data *);
142static tree copy_decl_maybe_to_var (tree, copy_body_data *);
e21aff8a 143
5e20bdd7
JZ
144/* Insert a tree->tree mapping for ID. Despite the name suggests
145 that the trees should be variables, it is used for more than that. */
146
1b369fae
RH
147void
148insert_decl_map (copy_body_data *id, tree key, tree value)
5e20bdd7 149{
6be42dd4 150 *pointer_map_insert (id->decl_map, key) = value;
5e20bdd7
JZ
151
152 /* Always insert an identity map as well. If we see this same new
153 node again, we won't want to duplicate it a second time. */
154 if (key != value)
6be42dd4 155 *pointer_map_insert (id->decl_map, value) = value;
5e20bdd7
JZ
156}
157
110cfe1c
JH
158/* Construct new SSA name for old NAME. ID is the inline context. */
159
160static tree
161remap_ssa_name (tree name, copy_body_data *id)
162{
163 tree new;
6be42dd4 164 tree *n;
110cfe1c
JH
165
166 gcc_assert (TREE_CODE (name) == SSA_NAME);
167
6be42dd4 168 n = (tree *) pointer_map_contains (id->decl_map, name);
110cfe1c 169 if (n)
6be42dd4 170 return *n;
110cfe1c
JH
171
172 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
173 in copy_bb. */
174 new = remap_decl (SSA_NAME_VAR (name), id);
175 /* We might've substituted constant or another SSA_NAME for
176 the variable.
177
178 Replace the SSA name representing RESULT_DECL by variable during
179 inlining: this saves us from need to introduce PHI node in a case
180 return value is just partly initialized. */
181 if ((TREE_CODE (new) == VAR_DECL || TREE_CODE (new) == PARM_DECL)
182 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
183 || !id->transform_return_to_modify))
184 {
185 new = make_ssa_name (new, NULL);
186 insert_decl_map (id, name, new);
187 if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name)))
188 {
189 SSA_NAME_DEF_STMT (new) = build_empty_stmt ();
190 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name)) == name)
191 set_default_def (SSA_NAME_VAR (new), new);
192 }
193 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new)
194 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
195 TREE_TYPE (new) = TREE_TYPE (SSA_NAME_VAR (new));
196 }
197 else
198 insert_decl_map (id, name, new);
199 return new;
200}
201
e21aff8a 202/* Remap DECL during the copying of the BLOCK tree for the function. */
d4e4baa9 203
1b369fae
RH
204tree
205remap_decl (tree decl, copy_body_data *id)
d4e4baa9 206{
6be42dd4 207 tree *n;
e21aff8a
SB
208 tree fn;
209
210 /* We only remap local variables in the current function. */
1b369fae 211 fn = id->src_fn;
3c2a7a6a 212
e21aff8a
SB
213 /* See if we have remapped this declaration. */
214
6be42dd4 215 n = (tree *) pointer_map_contains (id->decl_map, decl);
e21aff8a
SB
216
217 /* If we didn't already have an equivalent for this declaration,
218 create one now. */
d4e4baa9
AO
219 if (!n)
220 {
d4e4baa9 221 /* Make a copy of the variable or label. */
1b369fae 222 tree t = id->copy_decl (decl, id);
19734dd8 223
596b98ce
AO
224 /* Remember it, so that if we encounter this local entity again
225 we can reuse this copy. Do this early because remap_type may
226 need this decl for TYPE_STUB_DECL. */
227 insert_decl_map (id, decl, t);
228
1b369fae
RH
229 if (!DECL_P (t))
230 return t;
231
3c2a7a6a
RH
232 /* Remap types, if necessary. */
233 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
234 if (TREE_CODE (t) == TYPE_DECL)
235 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
3c2a7a6a
RH
236
237 /* Remap sizes as necessary. */
238 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
239 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
d4e4baa9 240
8c27b7d4 241 /* If fields, do likewise for offset and qualifier. */
5377d5ba
RK
242 if (TREE_CODE (t) == FIELD_DECL)
243 {
244 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
245 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
246 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
247 }
248
110cfe1c
JH
249 if (cfun && gimple_in_ssa_p (cfun)
250 && (TREE_CODE (t) == VAR_DECL
251 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
252 {
253 tree def = gimple_default_def (id->src_cfun, decl);
254 get_var_ann (t);
255 if (TREE_CODE (decl) != PARM_DECL && def)
256 {
257 tree map = remap_ssa_name (def, id);
258 /* Watch out RESULT_DECLs whose SSA names map directly
259 to them. */
260 if (TREE_CODE (map) == SSA_NAME)
261 set_default_def (t, map);
262 }
263 add_referenced_var (t);
264 }
5e20bdd7 265 return t;
d4e4baa9
AO
266 }
267
6be42dd4 268 return unshare_expr (*n);
d4e4baa9
AO
269}
270
3c2a7a6a 271static tree
1b369fae 272remap_type_1 (tree type, copy_body_data *id)
3c2a7a6a 273{
6be42dd4 274 tree *node;
3c2a7a6a
RH
275 tree new, t;
276
1b369fae
RH
277 if (type == NULL)
278 return type;
279
280 /* See if we have remapped this type. */
6be42dd4 281 node = (tree *) pointer_map_contains (id->decl_map, type);
1b369fae 282 if (node)
6be42dd4 283 return *node;
1b369fae
RH
284
285 /* The type only needs remapping if it's variably modified. */
286 if (! variably_modified_type_p (type, id->src_fn))
287 {
288 insert_decl_map (id, type, type);
289 return type;
290 }
291
ed397c43
RK
292 /* We do need a copy. build and register it now. If this is a pointer or
293 reference type, remap the designated type and make a new pointer or
294 reference type. */
295 if (TREE_CODE (type) == POINTER_TYPE)
296 {
297 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
298 TYPE_MODE (type),
299 TYPE_REF_CAN_ALIAS_ALL (type));
300 insert_decl_map (id, type, new);
301 return new;
302 }
303 else if (TREE_CODE (type) == REFERENCE_TYPE)
304 {
305 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
306 TYPE_MODE (type),
307 TYPE_REF_CAN_ALIAS_ALL (type));
308 insert_decl_map (id, type, new);
309 return new;
310 }
311 else
312 new = copy_node (type);
313
5e20bdd7 314 insert_decl_map (id, type, new);
3c2a7a6a
RH
315
316 /* This is a new type, not a copy of an old type. Need to reassociate
317 variants. We can handle everything except the main variant lazily. */
318 t = TYPE_MAIN_VARIANT (type);
319 if (type != t)
320 {
321 t = remap_type (t, id);
322 TYPE_MAIN_VARIANT (new) = t;
323 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
324 TYPE_NEXT_VARIANT (t) = new;
325 }
326 else
327 {
328 TYPE_MAIN_VARIANT (new) = new;
329 TYPE_NEXT_VARIANT (new) = NULL;
330 }
331
596b98ce
AO
332 if (TYPE_STUB_DECL (type))
333 TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
334
3c2a7a6a
RH
335 /* Lazily create pointer and reference types. */
336 TYPE_POINTER_TO (new) = NULL;
337 TYPE_REFERENCE_TO (new) = NULL;
338
339 switch (TREE_CODE (new))
340 {
341 case INTEGER_TYPE:
342 case REAL_TYPE:
325217ed 343 case FIXED_POINT_TYPE:
3c2a7a6a
RH
344 case ENUMERAL_TYPE:
345 case BOOLEAN_TYPE:
3c2a7a6a
RH
346 t = TYPE_MIN_VALUE (new);
347 if (t && TREE_CODE (t) != INTEGER_CST)
348 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
1c9766da 349
3c2a7a6a
RH
350 t = TYPE_MAX_VALUE (new);
351 if (t && TREE_CODE (t) != INTEGER_CST)
352 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
353 return new;
9f63daea 354
3c2a7a6a
RH
355 case FUNCTION_TYPE:
356 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
357 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
358 return new;
359
360 case ARRAY_TYPE:
361 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
362 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
363 break;
364
365 case RECORD_TYPE:
366 case UNION_TYPE:
367 case QUAL_UNION_TYPE:
52dd234b
RH
368 {
369 tree f, nf = NULL;
370
371 for (f = TYPE_FIELDS (new); f ; f = TREE_CHAIN (f))
372 {
373 t = remap_decl (f, id);
374 DECL_CONTEXT (t) = new;
375 TREE_CHAIN (t) = nf;
376 nf = t;
377 }
378 TYPE_FIELDS (new) = nreverse (nf);
379 }
3c2a7a6a
RH
380 break;
381
3c2a7a6a
RH
382 case OFFSET_TYPE:
383 default:
384 /* Shouldn't have been thought variable sized. */
1e128c5f 385 gcc_unreachable ();
3c2a7a6a
RH
386 }
387
388 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
389 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
390
391 return new;
392}
393
1b369fae
RH
394tree
395remap_type (tree type, copy_body_data *id)
52dd234b 396{
6be42dd4 397 tree *node;
52dd234b
RH
398
399 if (type == NULL)
400 return type;
401
402 /* See if we have remapped this type. */
6be42dd4 403 node = (tree *) pointer_map_contains (id->decl_map, type);
52dd234b 404 if (node)
6be42dd4 405 return *node;
52dd234b
RH
406
407 /* The type only needs remapping if it's variably modified. */
1b369fae 408 if (! variably_modified_type_p (type, id->src_fn))
52dd234b
RH
409 {
410 insert_decl_map (id, type, type);
411 return type;
412 }
413
414 return remap_type_1 (type, id);
415}
416
6de9cd9a 417static tree
1b369fae 418remap_decls (tree decls, copy_body_data *id)
d4e4baa9 419{
6de9cd9a
DN
420 tree old_var;
421 tree new_decls = NULL_TREE;
d4e4baa9 422
6de9cd9a
DN
423 /* Remap its variables. */
424 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
d4e4baa9 425 {
6de9cd9a
DN
426 tree new_var;
427
30be951a
JH
428 /* We can not chain the local static declarations into the unexpanded_var_list
429 as we can't duplicate them or break one decl rule. Go ahead and link
430 them into unexpanded_var_list. */
1b369fae 431 if (!lang_hooks.tree_inlining.auto_var_in_fn_p (old_var, id->src_fn)
30be951a
JH
432 && !DECL_EXTERNAL (old_var))
433 {
434 cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var,
435 cfun->unexpanded_var_list);
436 continue;
437 }
438
6de9cd9a
DN
439 /* Remap the variable. */
440 new_var = remap_decl (old_var, id);
441
442 /* If we didn't remap this variable, so we can't mess with its
443 TREE_CHAIN. If we remapped this variable to the return slot, it's
444 already declared somewhere else, so don't declare it here. */
445 if (!new_var || new_var == id->retvar)
446 ;
d4e4baa9
AO
447 else
448 {
1e128c5f 449 gcc_assert (DECL_P (new_var));
6de9cd9a
DN
450 TREE_CHAIN (new_var) = new_decls;
451 new_decls = new_var;
d4e4baa9 452 }
d4e4baa9 453 }
d4e4baa9 454
6de9cd9a
DN
455 return nreverse (new_decls);
456}
457
458/* Copy the BLOCK to contain remapped versions of the variables
459 therein. And hook the new block into the block-tree. */
460
461static void
1b369fae 462remap_block (tree *block, copy_body_data *id)
6de9cd9a 463{
d436bff8
AH
464 tree old_block;
465 tree new_block;
d436bff8
AH
466 tree fn;
467
468 /* Make the new block. */
469 old_block = *block;
470 new_block = make_node (BLOCK);
471 TREE_USED (new_block) = TREE_USED (old_block);
472 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
3e2844cb 473 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
d436bff8
AH
474 *block = new_block;
475
476 /* Remap its variables. */
6de9cd9a 477 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
d436bff8 478
1b369fae
RH
479 fn = id->dst_fn;
480
481 if (id->transform_lang_insert_block)
673fda6b 482 lang_hooks.decls.insert_block (new_block);
1b369fae 483
d436bff8 484 /* Remember the remapped block. */
6de9cd9a 485 insert_decl_map (id, old_block, new_block);
d4e4baa9
AO
486}
487
acb8f212
JH
488/* Copy the whole block tree and root it in id->block. */
489static tree
1b369fae 490remap_blocks (tree block, copy_body_data *id)
acb8f212
JH
491{
492 tree t;
493 tree new = block;
494
495 if (!block)
496 return NULL;
497
498 remap_block (&new, id);
499 gcc_assert (new != block);
500 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
501 add_lexical_block (new, remap_blocks (t, id));
502 return new;
503}
504
d4e4baa9 505static void
6de9cd9a 506copy_statement_list (tree *tp)
d4e4baa9 507{
6de9cd9a
DN
508 tree_stmt_iterator oi, ni;
509 tree new;
510
511 new = alloc_stmt_list ();
512 ni = tsi_start (new);
513 oi = tsi_start (*tp);
514 *tp = new;
515
516 for (; !tsi_end_p (oi); tsi_next (&oi))
517 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
518}
d4e4baa9 519
6de9cd9a 520static void
1b369fae 521copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
6de9cd9a
DN
522{
523 tree block = BIND_EXPR_BLOCK (*tp);
d4e4baa9
AO
524 /* Copy (and replace) the statement. */
525 copy_tree_r (tp, walk_subtrees, NULL);
6de9cd9a
DN
526 if (block)
527 {
528 remap_block (&block, id);
529 BIND_EXPR_BLOCK (*tp) = block;
530 }
d4e4baa9 531
6de9cd9a
DN
532 if (BIND_EXPR_VARS (*tp))
533 /* This will remap a lot of the same decls again, but this should be
534 harmless. */
535 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
d4e4baa9
AO
536}
537
e21aff8a 538/* Called from copy_body_id via walk_tree. DATA is really an
1b369fae 539 `copy_body_data *'. */
aa4a53af 540
1b369fae 541tree
46c5ad27 542copy_body_r (tree *tp, int *walk_subtrees, void *data)
d4e4baa9 543{
1b369fae
RH
544 copy_body_data *id = (copy_body_data *) data;
545 tree fn = id->src_fn;
acb8f212 546 tree new_block;
d4e4baa9 547
e21aff8a
SB
548 /* Begin by recognizing trees that we'll completely rewrite for the
549 inlining context. Our output for these trees is completely
550 different from out input (e.g. RETURN_EXPR is deleted, and morphs
551 into an edge). Further down, we'll handle trees that get
552 duplicated and/or tweaked. */
d4e4baa9 553
1b369fae 554 /* When requested, RETURN_EXPRs should be transformed to just the
07beea0d 555 contained GIMPLE_MODIFY_STMT. The branch semantics of the return will
1b369fae
RH
556 be handled elsewhere by manipulating the CFG rather than a statement. */
557 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
d4e4baa9 558 {
e21aff8a 559 tree assignment = TREE_OPERAND (*tp, 0);
d4e4baa9
AO
560
561 /* If we're returning something, just turn that into an
e21aff8a
SB
562 assignment into the equivalent of the original RESULT_DECL.
563 If the "assignment" is just the result decl, the result
564 decl has already been set (e.g. a recent "foo (&result_decl,
565 ...)"); just toss the entire RETURN_EXPR. */
07beea0d 566 if (assignment && TREE_CODE (assignment) == GIMPLE_MODIFY_STMT)
e21aff8a
SB
567 {
568 /* Replace the RETURN_EXPR with (a copy of) the
07beea0d 569 GIMPLE_MODIFY_STMT hanging underneath. */
e21aff8a
SB
570 *tp = copy_node (assignment);
571 }
572 else /* Else the RETURN_EXPR returns no value. */
573 {
574 *tp = NULL;
cceb1885 575 return (tree) (void *)1;
e21aff8a 576 }
d4e4baa9 577 }
110cfe1c
JH
578 else if (TREE_CODE (*tp) == SSA_NAME)
579 {
580 *tp = remap_ssa_name (*tp, id);
581 *walk_subtrees = 0;
582 return NULL;
583 }
e21aff8a 584
d4e4baa9
AO
585 /* Local variables and labels need to be replaced by equivalent
586 variables. We don't want to copy static variables; there's only
587 one of those, no matter how many times we inline the containing
5377d5ba 588 function. Similarly for globals from an outer function. */
ae2bcd98 589 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
d4e4baa9
AO
590 {
591 tree new_decl;
592
593 /* Remap the declaration. */
594 new_decl = remap_decl (*tp, id);
1e128c5f 595 gcc_assert (new_decl);
d4e4baa9
AO
596 /* Replace this variable with the copy. */
597 STRIP_TYPE_NOPS (new_decl);
598 *tp = new_decl;
e4cf29ae 599 *walk_subtrees = 0;
d4e4baa9 600 }
6de9cd9a
DN
601 else if (TREE_CODE (*tp) == STATEMENT_LIST)
602 copy_statement_list (tp);
d4e4baa9 603 else if (TREE_CODE (*tp) == SAVE_EXPR)
82c82743 604 remap_save_expr (tp, id->decl_map, walk_subtrees);
17acc01a
JH
605 else if (TREE_CODE (*tp) == LABEL_DECL
606 && (! DECL_CONTEXT (*tp)
1b369fae 607 || decl_function_context (*tp) == id->src_fn))
e21aff8a 608 /* These may need to be remapped for EH handling. */
17acc01a 609 *tp = remap_decl (*tp, id);
6de9cd9a
DN
610 else if (TREE_CODE (*tp) == BIND_EXPR)
611 copy_bind_expr (tp, walk_subtrees, id);
3c2a7a6a
RH
612 /* Types may need remapping as well. */
613 else if (TYPE_P (*tp))
614 *tp = remap_type (*tp, id);
615
bb04998a
RK
616 /* If this is a constant, we have to copy the node iff the type will be
617 remapped. copy_tree_r will not copy a constant. */
3cf11075 618 else if (CONSTANT_CLASS_P (*tp))
bb04998a
RK
619 {
620 tree new_type = remap_type (TREE_TYPE (*tp), id);
621
622 if (new_type == TREE_TYPE (*tp))
623 *walk_subtrees = 0;
624
625 else if (TREE_CODE (*tp) == INTEGER_CST)
626 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
627 TREE_INT_CST_HIGH (*tp));
628 else
629 {
630 *tp = copy_node (*tp);
631 TREE_TYPE (*tp) = new_type;
632 }
633 }
634
d4e4baa9
AO
635 /* Otherwise, just copy the node. Note that copy_tree_r already
636 knows not to copy VAR_DECLs, etc., so this is safe. */
637 else
638 {
e21aff8a
SB
639 /* Here we handle trees that are not completely rewritten.
640 First we detect some inlining-induced bogosities for
641 discarding. */
07beea0d
AH
642 if (TREE_CODE (*tp) == GIMPLE_MODIFY_STMT
643 && GIMPLE_STMT_OPERAND (*tp, 0) == GIMPLE_STMT_OPERAND (*tp, 1)
ae2bcd98 644 && (lang_hooks.tree_inlining.auto_var_in_fn_p
07beea0d 645 (GIMPLE_STMT_OPERAND (*tp, 0), fn)))
d4e4baa9
AO
646 {
647 /* Some assignments VAR = VAR; don't generate any rtl code
648 and thus don't count as variable modification. Avoid
649 keeping bogosities like 0 = 0. */
07beea0d 650 tree decl = GIMPLE_STMT_OPERAND (*tp, 0), value;
6be42dd4 651 tree *n;
d4e4baa9 652
6be42dd4 653 n = (tree *) pointer_map_contains (id->decl_map, decl);
d4e4baa9
AO
654 if (n)
655 {
6be42dd4 656 value = *n;
d4e4baa9
AO
657 STRIP_TYPE_NOPS (value);
658 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
68594ce7 659 {
b03c0b93 660 *tp = build_empty_stmt ();
68594ce7
JM
661 return copy_body_r (tp, walk_subtrees, data);
662 }
d4e4baa9
AO
663 }
664 }
1b369fae 665 else if (TREE_CODE (*tp) == INDIRECT_REF)
6de9cd9a
DN
666 {
667 /* Get rid of *& from inline substitutions that can happen when a
668 pointer argument is an ADDR_EXPR. */
81cfbbc2 669 tree decl = TREE_OPERAND (*tp, 0);
6be42dd4 670 tree *n;
6de9cd9a 671
6be42dd4 672 n = (tree *) pointer_map_contains (id->decl_map, decl);
6de9cd9a
DN
673 if (n)
674 {
5e13fdf7 675 tree new;
d84b37b0 676 tree old;
30d2e943
RG
677 /* If we happen to get an ADDR_EXPR in n->value, strip
678 it manually here as we'll eventually get ADDR_EXPRs
679 which lie about their types pointed to. In this case
680 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
095ecc24
RG
681 but we absolutely rely on that. As fold_indirect_ref
682 does other useful transformations, try that first, though. */
6be42dd4
RG
683 tree type = TREE_TYPE (TREE_TYPE (*n));
684 new = unshare_expr (*n);
d84b37b0 685 old = *tp;
5e13fdf7 686 *tp = fold_indirect_ref_1 (type, new);
095ecc24
RG
687 if (! *tp)
688 {
5e13fdf7
JH
689 if (TREE_CODE (new) == ADDR_EXPR)
690 *tp = TREE_OPERAND (new, 0);
095ecc24 691 else
d84b37b0
AP
692 {
693 *tp = build1 (INDIRECT_REF, type, new);
694 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
695 }
095ecc24 696 }
81cfbbc2
JH
697 *walk_subtrees = 0;
698 return NULL;
68594ce7
JM
699 }
700 }
701
e21aff8a
SB
702 /* Here is the "usual case". Copy this tree node, and then
703 tweak some special cases. */
1b369fae 704 copy_tree_r (tp, walk_subtrees, NULL);
110cfe1c
JH
705
706 /* Global variables we didn't seen yet needs to go into referenced
707 vars. */
708 if (gimple_in_ssa_p (cfun) && TREE_CODE (*tp) == VAR_DECL)
709 add_referenced_var (*tp);
19734dd8 710
acb8f212
JH
711 /* If EXPR has block defined, map it to newly constructed block.
712 When inlining we want EXPRs without block appear in the block
713 of function call. */
07beea0d 714 if (EXPR_P (*tp) || GIMPLE_STMT_P (*tp))
acb8f212
JH
715 {
716 new_block = id->block;
717 if (TREE_BLOCK (*tp))
718 {
6be42dd4
RG
719 tree *n;
720 n = (tree *) pointer_map_contains (id->decl_map,
721 TREE_BLOCK (*tp));
acb8f212 722 gcc_assert (n);
6be42dd4 723 new_block = *n;
acb8f212
JH
724 }
725 TREE_BLOCK (*tp) = new_block;
726 }
68594ce7 727
e0704a46 728 if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
e21aff8a
SB
729 TREE_OPERAND (*tp, 0) =
730 build_int_cst
731 (NULL_TREE,
732 id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
18c6ada9 733
6f719560 734 if (!GIMPLE_TUPLE_P (*tp) && TREE_CODE (*tp) != OMP_CLAUSE)
07beea0d 735 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
3c2a7a6a 736
68594ce7
JM
737 /* The copied TARGET_EXPR has never been expanded, even if the
738 original node was expanded already. */
739 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
740 {
741 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
742 TREE_OPERAND (*tp, 3) = NULL_TREE;
743 }
84cce55d
RH
744
745 /* Variable substitution need not be simple. In particular, the
746 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
747 and friends are up-to-date. */
748 else if (TREE_CODE (*tp) == ADDR_EXPR)
749 {
750 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
8e85fd14
RG
751 /* Handle the case where we substituted an INDIRECT_REF
752 into the operand of the ADDR_EXPR. */
753 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
754 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
755 else
756 recompute_tree_invariant_for_addr_expr (*tp);
84cce55d
RH
757 *walk_subtrees = 0;
758 }
d4e4baa9
AO
759 }
760
761 /* Keep iterating. */
762 return NULL_TREE;
763}
764
e21aff8a
SB
765/* Copy basic block, scale profile accordingly. Edges will be taken care of
766 later */
767
768static basic_block
1b369fae 769copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, int count_scale)
e21aff8a
SB
770{
771 block_stmt_iterator bsi, copy_bsi;
772 basic_block copy_basic_block;
773
774 /* create_basic_block() will append every new block to
775 basic_block_info automatically. */
cceb1885
GDR
776 copy_basic_block = create_basic_block (NULL, (void *) 0,
777 (basic_block) bb->prev_bb->aux);
e21aff8a 778 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
45a80bb9
JH
779
780 /* We are going to rebuild frequencies from scratch. These values have just
781 small importance to drive canonicalize_loop_headers. */
782 copy_basic_block->frequency = ((gcov_type)bb->frequency
e21aff8a 783 * frequency_scale / REG_BR_PROB_BASE);
45a80bb9
JH
784 if (copy_basic_block->frequency > BB_FREQ_MAX)
785 copy_basic_block->frequency = BB_FREQ_MAX;
e21aff8a
SB
786 copy_bsi = bsi_start (copy_basic_block);
787
788 for (bsi = bsi_start (bb);
789 !bsi_end_p (bsi); bsi_next (&bsi))
790 {
791 tree stmt = bsi_stmt (bsi);
792 tree orig_stmt = stmt;
793
794 walk_tree (&stmt, copy_body_r, id, NULL);
795
796 /* RETURN_EXPR might be removed,
797 this is signalled by making stmt pointer NULL. */
798 if (stmt)
799 {
e0704a46 800 tree call, decl;
2b65dae5 801
6946b3f7
JH
802 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
803
2b65dae5
RG
804 /* With return slot optimization we can end up with
805 non-gimple (foo *)&this->m, fix that here. */
07beea0d
AH
806 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
807 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == NOP_EXPR
808 && !is_gimple_val (TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0)))
2b65dae5
RG
809 gimplify_stmt (&stmt);
810
e21aff8a 811 bsi_insert_after (&copy_bsi, stmt, BSI_NEW_STMT);
110cfe1c
JH
812
813 /* Process new statement. gimplify_stmt possibly turned statement
814 into multiple statements, we need to process all of them. */
815 while (!bsi_end_p (copy_bsi))
e0704a46 816 {
110cfe1c
JH
817 stmt = bsi_stmt (copy_bsi);
818 call = get_call_expr_in (stmt);
b8a00a4d
JH
819
820 /* Statements produced by inlining can be unfolded, especially
821 when we constant propagated some operands. We can't fold
822 them right now for two reasons:
823 1) folding require SSA_NAME_DEF_STMTs to be correct
824 2) we can't change function calls to builtins.
825 So we just mark statement for later folding. We mark
826 all new statements, instead just statements that has changed
827 by some nontrivial substitution so even statements made
828 foldable indirectly are updated. If this turns out to be
829 expensive, copy_body can be told to watch for nontrivial
830 changes. */
831 if (id->statements_to_fold)
832 pointer_set_insert (id->statements_to_fold, stmt);
110cfe1c
JH
833 /* We're duplicating a CALL_EXPR. Find any corresponding
834 callgraph edges and update or duplicate them. */
835 if (call && (decl = get_callee_fndecl (call)))
e0704a46 836 {
110cfe1c
JH
837 struct cgraph_node *node;
838 struct cgraph_edge *edge;
839
840 switch (id->transform_call_graph_edges)
ea99e0be 841 {
110cfe1c
JH
842 case CB_CGE_DUPLICATE:
843 edge = cgraph_edge (id->src_node, orig_stmt);
844 if (edge)
845 cgraph_clone_edge (edge, id->dst_node, stmt,
45a80bb9 846 REG_BR_PROB_BASE, 1, edge->frequency, true);
110cfe1c
JH
847 break;
848
849 case CB_CGE_MOVE_CLONES:
850 for (node = id->dst_node->next_clone;
851 node;
852 node = node->next_clone)
853 {
854 edge = cgraph_edge (node, orig_stmt);
855 gcc_assert (edge);
856 cgraph_set_call_stmt (edge, stmt);
857 }
858 /* FALLTHRU */
859
860 case CB_CGE_MOVE:
861 edge = cgraph_edge (id->dst_node, orig_stmt);
862 if (edge)
863 cgraph_set_call_stmt (edge, stmt);
864 break;
865
866 default:
867 gcc_unreachable ();
1b369fae 868 }
110cfe1c
JH
869 }
870 /* If you think we can abort here, you are wrong.
871 There is no region 0 in tree land. */
872 gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt)
873 != 0);
1b369fae 874
2a025b54
JH
875 if (tree_could_throw_p (stmt)
876 /* When we are cloning for inlining, we are supposed to
877 construct a clone that calls precisely the same functions
878 as original. However IPA optimizers might've proved
879 earlier some function calls as non-trapping that might
880 render some basic blocks dead that might become
881 unreachable.
882
883 We can't update SSA with unreachable blocks in CFG and thus
884 we prevent the scenario by preserving even the "dead" eh
885 edges until the point they are later removed by
886 fixup_cfg pass. */
887 || (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
888 && lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) > 0))
110cfe1c
JH
889 {
890 int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
891 /* Add an entry for the copied tree in the EH hashtable.
892 When cloning or versioning, use the hashtable in
893 cfun, and just copy the EH number. When inlining, use the
894 hashtable in the caller, and adjust the region number. */
895 if (region > 0)
896 add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
897
898 /* If this tree doesn't have a region associated with it,
899 and there is a "current region,"
900 then associate this tree with the current region
901 and add edges associated with this region. */
902 if ((lookup_stmt_eh_region_fn (id->src_cfun,
903 orig_stmt) <= 0
904 && id->eh_region > 0)
905 && tree_could_throw_p (stmt))
906 add_stmt_to_eh_region (stmt, id->eh_region);
e0704a46 907 }
110cfe1c
JH
908 if (gimple_in_ssa_p (cfun))
909 {
910 ssa_op_iter i;
911 tree def;
e21aff8a 912
110cfe1c
JH
913 find_new_referenced_vars (bsi_stmt_ptr (copy_bsi));
914 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
915 if (TREE_CODE (def) == SSA_NAME)
916 SSA_NAME_DEF_STMT (def) = stmt;
917 }
918 bsi_next (&copy_bsi);
e21aff8a 919 }
110cfe1c 920 copy_bsi = bsi_last (copy_basic_block);
e21aff8a
SB
921 }
922 }
923 return copy_basic_block;
924}
925
110cfe1c
JH
926/* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
927 form is quite easy, since dominator relationship for old basic blocks does
928 not change.
929
930 There is however exception where inlining might change dominator relation
931 across EH edges from basic block within inlined functions destinating
5305a4cb 932 to landing pads in function we inline into.
110cfe1c
JH
933
934 The function mark PHI_RESULT of such PHI nodes for renaming; it is
935 safe the EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI
936 must be set. This means, that there will be no overlapping live ranges
937 for the underlying symbol.
938
939 This might change in future if we allow redirecting of EH edges and
940 we might want to change way build CFG pre-inlining to include
941 all the possible edges then. */
942static void
943update_ssa_across_eh_edges (basic_block bb)
944{
945 edge e;
946 edge_iterator ei;
947
948 FOR_EACH_EDGE (e, ei, bb->succs)
949 if (!e->dest->aux
950 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
951 {
952 tree phi;
953
954 gcc_assert (e->flags & EDGE_EH);
955 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
956 {
957 gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
958 (PHI_RESULT (phi)));
959 mark_sym_for_renaming
960 (SSA_NAME_VAR (PHI_RESULT (phi)));
961 }
962 }
963}
964
128a79fb
KH
965/* Copy edges from BB into its copy constructed earlier, scale profile
966 accordingly. Edges will be taken care of later. Assume aux
967 pointers to point to the copies of each BB. */
e21aff8a
SB
968static void
969copy_edges_for_bb (basic_block bb, int count_scale)
970{
cceb1885 971 basic_block new_bb = (basic_block) bb->aux;
e21aff8a
SB
972 edge_iterator ei;
973 edge old_edge;
974 block_stmt_iterator bsi;
975 int flags;
976
977 /* Use the indices from the original blocks to create edges for the
978 new ones. */
979 FOR_EACH_EDGE (old_edge, ei, bb->succs)
e0704a46
JH
980 if (!(old_edge->flags & EDGE_EH))
981 {
982 edge new;
e21aff8a 983
e0704a46 984 flags = old_edge->flags;
e21aff8a 985
e0704a46
JH
986 /* Return edges do get a FALLTHRU flag when the get inlined. */
987 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
988 && old_edge->dest->aux != EXIT_BLOCK_PTR)
989 flags |= EDGE_FALLTHRU;
cceb1885 990 new = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
e0704a46
JH
991 new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
992 new->probability = old_edge->probability;
993 }
e21aff8a
SB
994
995 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
996 return;
997
e21aff8a
SB
998 for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
999 {
1000 tree copy_stmt;
1001
1002 copy_stmt = bsi_stmt (bsi);
1003 update_stmt (copy_stmt);
110cfe1c
JH
1004 if (gimple_in_ssa_p (cfun))
1005 mark_symbols_for_renaming (copy_stmt);
e21aff8a
SB
1006 /* Do this before the possible split_block. */
1007 bsi_next (&bsi);
1008
1009 /* If this tree could throw an exception, there are two
1010 cases where we need to add abnormal edge(s): the
1011 tree wasn't in a region and there is a "current
1012 region" in the caller; or the original tree had
1013 EH edges. In both cases split the block after the tree,
1014 and add abnormal edge(s) as needed; we need both
1015 those from the callee and the caller.
1016 We check whether the copy can throw, because the const
1017 propagation can change an INDIRECT_REF which throws
1018 into a COMPONENT_REF which doesn't. If the copy
1019 can throw, the original could also throw. */
1020
e0704a46 1021 if (tree_can_throw_internal (copy_stmt))
e21aff8a
SB
1022 {
1023 if (!bsi_end_p (bsi))
1024 /* Note that bb's predecessor edges aren't necessarily
1025 right at this point; split_block doesn't care. */
1026 {
1027 edge e = split_block (new_bb, copy_stmt);
110cfe1c 1028
e21aff8a 1029 new_bb = e->dest;
110cfe1c 1030 new_bb->aux = e->src->aux;
e21aff8a
SB
1031 bsi = bsi_start (new_bb);
1032 }
1033
1034 make_eh_edges (copy_stmt);
110cfe1c
JH
1035
1036 if (gimple_in_ssa_p (cfun))
1037 update_ssa_across_eh_edges (bb_for_stmt (copy_stmt));
1038 }
1039 }
1040}
1041
1042/* Copy the PHIs. All blocks and edges are copied, some blocks
1043 was possibly split and new outgoing EH edges inserted.
1044 BB points to the block of original function and AUX pointers links
1045 the original and newly copied blocks. */
1046
1047static void
1048copy_phis_for_bb (basic_block bb, copy_body_data *id)
1049{
1050 basic_block new_bb = bb->aux;
1051 edge_iterator ei;
1052 tree phi;
1053
1054 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1055 {
1056 tree res = PHI_RESULT (phi);
1057 tree new_res = res;
1058 tree new_phi;
1059 edge new_edge;
1060
1061 if (is_gimple_reg (res))
1062 {
1063 walk_tree (&new_res, copy_body_r, id, NULL);
1064 SSA_NAME_DEF_STMT (new_res)
1065 = new_phi = create_phi_node (new_res, new_bb);
1066 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1067 {
1068 edge old_edge = find_edge (new_edge->src->aux, bb);
1069 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1070 tree new_arg = arg;
1071
1072 walk_tree (&new_arg, copy_body_r, id, NULL);
1073 gcc_assert (new_arg);
1074 add_phi_arg (new_phi, new_arg, new_edge);
1075 }
e21aff8a
SB
1076 }
1077 }
1078}
1079
1080/* Wrapper for remap_decl so it can be used as a callback. */
1081static tree
1082remap_decl_1 (tree decl, void *data)
1083{
1b369fae 1084 return remap_decl (decl, (copy_body_data *) data);
e21aff8a
SB
1085}
1086
110cfe1c
JH
1087/* Build struct function and associated datastructures for the new clone
1088 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1089
1090static void
1091initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
1092 int frequency)
1093{
1094 struct function *new_cfun
1095 = (struct function *) ggc_alloc_cleared (sizeof (struct function));
1096 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1097 int count_scale, frequency_scale;
1098
1099 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1100 count_scale = (REG_BR_PROB_BASE * count
1101 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1102 else
1103 count_scale = 1;
1104
1105 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1106 frequency_scale = (REG_BR_PROB_BASE * frequency
1107 /
1108 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1109 else
1110 frequency_scale = count_scale;
1111
1112 /* Register specific tree functions. */
1113 tree_register_cfg_hooks ();
1114 *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
3e87758a 1115 new_cfun->funcdef_no = get_next_funcdef_no ();
110cfe1c
JH
1116 VALUE_HISTOGRAMS (new_cfun) = NULL;
1117 new_cfun->unexpanded_var_list = NULL;
1118 new_cfun->cfg = NULL;
1119 new_cfun->decl = new_fndecl /*= copy_node (callee_fndecl)*/;
110cfe1c
JH
1120 DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
1121 push_cfun (new_cfun);
1122 init_empty_tree_cfg ();
1123
1124 ENTRY_BLOCK_PTR->count =
1125 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1126 REG_BR_PROB_BASE);
1127 ENTRY_BLOCK_PTR->frequency =
1128 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1129 frequency_scale / REG_BR_PROB_BASE);
1130 EXIT_BLOCK_PTR->count =
1131 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1132 REG_BR_PROB_BASE);
1133 EXIT_BLOCK_PTR->frequency =
1134 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1135 frequency_scale / REG_BR_PROB_BASE);
1136 if (src_cfun->eh)
1137 init_eh_for_function ();
1138
1139 if (src_cfun->gimple_df)
1140 {
1141 init_tree_ssa ();
1142 cfun->gimple_df->in_ssa_p = true;
1143 init_ssa_operands ();
1144 }
1145 pop_cfun ();
1146}
1147
e21aff8a
SB
1148/* Make a copy of the body of FN so that it can be inserted inline in
1149 another function. Walks FN via CFG, returns new fndecl. */
1150
1151static tree
1b369fae 1152copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
e21aff8a
SB
1153 basic_block entry_block_map, basic_block exit_block_map)
1154{
1b369fae 1155 tree callee_fndecl = id->src_fn;
e21aff8a 1156 /* Original cfun for the callee, doesn't change. */
1b369fae 1157 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
110cfe1c 1158 struct function *cfun_to_copy;
e21aff8a
SB
1159 basic_block bb;
1160 tree new_fndecl = NULL;
e21aff8a 1161 int count_scale, frequency_scale;
110cfe1c 1162 int last;
e21aff8a 1163
1b369fae 1164 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
e21aff8a 1165 count_scale = (REG_BR_PROB_BASE * count
1b369fae 1166 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
e21aff8a
SB
1167 else
1168 count_scale = 1;
1169
1b369fae 1170 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
e21aff8a
SB
1171 frequency_scale = (REG_BR_PROB_BASE * frequency
1172 /
1b369fae 1173 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
e21aff8a
SB
1174 else
1175 frequency_scale = count_scale;
1176
1177 /* Register specific tree functions. */
1178 tree_register_cfg_hooks ();
1179
1180 /* Must have a CFG here at this point. */
1181 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
1182 (DECL_STRUCT_FUNCTION (callee_fndecl)));
1183
110cfe1c
JH
1184 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1185
e21aff8a
SB
1186
1187 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
1188 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
110cfe1c
JH
1189 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1190 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
e21aff8a 1191
e21aff8a
SB
1192 /* Duplicate any exception-handling regions. */
1193 if (cfun->eh)
1194 {
1b369fae 1195 id->eh_region_offset
fad41cd7
RH
1196 = duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
1197 0, id->eh_region);
e21aff8a
SB
1198 }
1199 /* Use aux pointers to map the original blocks to copy. */
1200 FOR_EACH_BB_FN (bb, cfun_to_copy)
110cfe1c
JH
1201 {
1202 basic_block new = copy_bb (id, bb, frequency_scale, count_scale);
1203 bb->aux = new;
1204 new->aux = bb;
1205 }
1206
7c57be85 1207 last = last_basic_block;
e21aff8a
SB
1208 /* Now that we've duplicated the blocks, duplicate their edges. */
1209 FOR_ALL_BB_FN (bb, cfun_to_copy)
1210 copy_edges_for_bb (bb, count_scale);
110cfe1c
JH
1211 if (gimple_in_ssa_p (cfun))
1212 FOR_ALL_BB_FN (bb, cfun_to_copy)
1213 copy_phis_for_bb (bb, id);
e21aff8a 1214 FOR_ALL_BB_FN (bb, cfun_to_copy)
110cfe1c
JH
1215 {
1216 ((basic_block)bb->aux)->aux = NULL;
1217 bb->aux = NULL;
1218 }
1219 /* Zero out AUX fields of newly created block during EH edge
1220 insertion. */
7c57be85 1221 for (; last < last_basic_block; last++)
110cfe1c
JH
1222 BASIC_BLOCK (last)->aux = NULL;
1223 entry_block_map->aux = NULL;
1224 exit_block_map->aux = NULL;
e21aff8a
SB
1225
1226 return new_fndecl;
1227}
1228
d4e4baa9
AO
1229/* Make a copy of the body of FN so that it can be inserted inline in
1230 another function. */
1231
1232static tree
1b369fae 1233copy_generic_body (copy_body_data *id)
d4e4baa9
AO
1234{
1235 tree body;
1b369fae 1236 tree fndecl = id->src_fn;
d4e4baa9 1237
e21aff8a 1238 body = DECL_SAVED_TREE (fndecl);
d4e4baa9
AO
1239 walk_tree (&body, copy_body_r, id, NULL);
1240
1241 return body;
1242}
1243
e21aff8a 1244static tree
1b369fae 1245copy_body (copy_body_data *id, gcov_type count, int frequency,
e21aff8a
SB
1246 basic_block entry_block_map, basic_block exit_block_map)
1247{
1b369fae 1248 tree fndecl = id->src_fn;
e21aff8a
SB
1249 tree body;
1250
1251 /* If this body has a CFG, walk CFG and copy. */
1252 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
1253 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
1254
1255 return body;
1256}
1257
04482133
AO
1258/* Return true if VALUE is an ADDR_EXPR of an automatic variable
1259 defined in function FN, or of a data member thereof. */
1260
1261static bool
1262self_inlining_addr_expr (tree value, tree fn)
1263{
1264 tree var;
1265
1266 if (TREE_CODE (value) != ADDR_EXPR)
1267 return false;
1268
1269 var = get_base_address (TREE_OPERAND (value, 0));
e21aff8a 1270
04482133
AO
1271 return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
1272}
1273
6de9cd9a 1274static void
1b369fae 1275setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
e21aff8a 1276 basic_block bb, tree *vars)
6de9cd9a
DN
1277{
1278 tree init_stmt;
1279 tree var;
e21aff8a 1280 tree var_sub;
f4088621 1281 tree rhs = value;
110cfe1c
JH
1282 tree def = (gimple_in_ssa_p (cfun)
1283 ? gimple_default_def (id->src_cfun, p) : NULL);
6de9cd9a 1284
f4088621
RG
1285 if (value
1286 && value != error_mark_node
1287 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
1288 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
1289
110cfe1c
JH
1290 /* If the parameter is never assigned to, has no SSA_NAMEs created,
1291 we may not need to create a new variable here at all. Instead, we may
1292 be able to just use the argument value. */
6de9cd9a
DN
1293 if (TREE_READONLY (p)
1294 && !TREE_ADDRESSABLE (p)
110cfe1c
JH
1295 && value && !TREE_SIDE_EFFECTS (value)
1296 && !def)
6de9cd9a 1297 {
84936f6f
RH
1298 /* We may produce non-gimple trees by adding NOPs or introduce
1299 invalid sharing when operand is not really constant.
1300 It is not big deal to prohibit constant propagation here as
1301 we will constant propagate in DOM1 pass anyway. */
1302 if (is_gimple_min_invariant (value)
f4088621
RG
1303 && useless_type_conversion_p (TREE_TYPE (p),
1304 TREE_TYPE (value))
04482133
AO
1305 /* We have to be very careful about ADDR_EXPR. Make sure
1306 the base variable isn't a local variable of the inlined
1307 function, e.g., when doing recursive inlining, direct or
1308 mutually-recursive or whatever, which is why we don't
1309 just test whether fn == current_function_decl. */
1310 && ! self_inlining_addr_expr (value, fn))
6de9cd9a 1311 {
6de9cd9a
DN
1312 insert_decl_map (id, p, value);
1313 return;
1314 }
1315 }
1316
5377d5ba
RK
1317 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
1318 here since the type of this decl must be visible to the calling
8c27b7d4 1319 function. */
1b369fae 1320 var = copy_decl_to_var (p, id);
110cfe1c
JH
1321 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
1322 {
1323 get_var_ann (var);
1324 add_referenced_var (var);
1325 }
e21aff8a
SB
1326
1327 /* See if the frontend wants to pass this by invisible reference. If
1328 so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
1329 replace uses of the PARM_DECL with dereferences. */
1330 if (TREE_TYPE (var) != TREE_TYPE (p)
1331 && POINTER_TYPE_P (TREE_TYPE (var))
1332 && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
1333 {
1334 insert_decl_map (id, var, var);
1335 var_sub = build_fold_indirect_ref (var);
1336 }
1337 else
1338 var_sub = var;
6de9cd9a 1339
6de9cd9a
DN
1340 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
1341 that way, when the PARM_DECL is encountered, it will be
1342 automatically replaced by the VAR_DECL. */
e21aff8a 1343 insert_decl_map (id, p, var_sub);
6de9cd9a
DN
1344
1345 /* Declare this new variable. */
1346 TREE_CHAIN (var) = *vars;
1347 *vars = var;
1348
1349 /* Make gimplifier happy about this variable. */
84936f6f 1350 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
6de9cd9a
DN
1351
1352 /* Even if P was TREE_READONLY, the new VAR should not be.
1353 In the original code, we would have constructed a
1354 temporary, and then the function body would have never
1355 changed the value of P. However, now, we will be
1356 constructing VAR directly. The constructor body may
1357 change its value multiple times as it is being
1358 constructed. Therefore, it must not be TREE_READONLY;
1359 the back-end assumes that TREE_READONLY variable is
1360 assigned to only once. */
1361 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
1362 TREE_READONLY (var) = 0;
1363
110cfe1c
JH
1364 /* If there is no setup required and we are in SSA, take the easy route
1365 replacing all SSA names representing the function parameter by the
1366 SSA name passed to function.
1367
1368 We need to construct map for the variable anyway as it might be used
1369 in different SSA names when parameter is set in function.
1370
1371 FIXME: This usually kills the last connection in between inlined
1372 function parameter and the actual value in debug info. Can we do
1373 better here? If we just inserted the statement, copy propagation
1374 would kill it anyway as it always did in older versions of GCC.
1375
1376 We might want to introduce a notion that single SSA_NAME might
1377 represent multiple variables for purposes of debugging. */
1378 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
1379 && (TREE_CODE (rhs) == SSA_NAME
9b718f81
JH
1380 || is_gimple_min_invariant (rhs))
1381 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
110cfe1c
JH
1382 {
1383 insert_decl_map (id, def, rhs);
1384 return;
1385 }
1386
6de9cd9a
DN
1387 /* Initialize this VAR_DECL from the equivalent argument. Convert
1388 the argument to the proper type in case it was promoted. */
1389 if (value)
1390 {
e21aff8a 1391 block_stmt_iterator bsi = bsi_last (bb);
6de9cd9a
DN
1392
1393 if (rhs == error_mark_node)
110cfe1c
JH
1394 {
1395 insert_decl_map (id, p, var_sub);
1396 return;
1397 }
afe08db5 1398
73dab33b 1399 STRIP_USELESS_TYPE_CONVERSION (rhs);
6de9cd9a 1400
07beea0d 1401 /* We want to use GIMPLE_MODIFY_STMT, not INIT_EXPR here so that we
6de9cd9a 1402 keep our trees in gimple form. */
110cfe1c
JH
1403 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
1404 {
1405 def = remap_ssa_name (def, id);
ebb07520 1406 init_stmt = build_gimple_modify_stmt (def, rhs);
110cfe1c
JH
1407 SSA_NAME_DEF_STMT (def) = init_stmt;
1408 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
1409 set_default_def (var, NULL);
1410 }
1411 else
ebb07520 1412 init_stmt = build_gimple_modify_stmt (var, rhs);
6de9cd9a
DN
1413
1414 /* If we did not create a gimple value and we did not create a gimple
1415 cast of a gimple value, then we will need to gimplify INIT_STMTS
1416 at the end. Note that is_gimple_cast only checks the outer
128a79fb 1417 tree code, not its operand. Thus the explicit check that its
6de9cd9a 1418 operand is a gimple value. */
b779a874 1419 if ((!is_gimple_val (rhs)
6de9cd9a
DN
1420 && (!is_gimple_cast (rhs)
1421 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
b779a874 1422 || !is_gimple_reg (var))
110cfe1c
JH
1423 {
1424 tree_stmt_iterator i;
1425
1426 push_gimplify_context ();
1427 gimplify_stmt (&init_stmt);
1428 if (gimple_in_ssa_p (cfun)
1429 && init_stmt && TREE_CODE (init_stmt) == STATEMENT_LIST)
1430 {
1431 /* The replacement can expose previously unreferenced
1432 variables. */
1433 for (i = tsi_start (init_stmt); !tsi_end_p (i); tsi_next (&i))
1434 find_new_referenced_vars (tsi_stmt_ptr (i));
1435 }
1436 pop_gimplify_context (NULL);
1437 }
52f66176
RK
1438
1439 /* If VAR represents a zero-sized variable, it's possible that the
1440 assignment statment may result in no gimple statements. */
047f4b2c
AP
1441 if (init_stmt)
1442 bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
110cfe1c
JH
1443 if (gimple_in_ssa_p (cfun))
1444 for (;!bsi_end_p (bsi); bsi_next (&bsi))
1445 mark_symbols_for_renaming (bsi_stmt (bsi));
6de9cd9a
DN
1446 }
1447}
1448
d4e4baa9 1449/* Generate code to initialize the parameters of the function at the
5039610b 1450 top of the stack in ID from the CALL_EXPR EXP. */
d4e4baa9 1451
e21aff8a 1452static void
5039610b 1453initialize_inlined_parameters (copy_body_data *id, tree exp,
e21aff8a 1454 tree fn, basic_block bb)
d4e4baa9 1455{
d4e4baa9
AO
1456 tree parms;
1457 tree a;
1458 tree p;
d436bff8 1459 tree vars = NULL_TREE;
5039610b
SL
1460 call_expr_arg_iterator iter;
1461 tree static_chain = CALL_EXPR_STATIC_CHAIN (exp);
d4e4baa9
AO
1462
1463 /* Figure out what the parameters are. */
18c6ada9 1464 parms = DECL_ARGUMENTS (fn);
d4e4baa9 1465
d4e4baa9
AO
1466 /* Loop through the parameter declarations, replacing each with an
1467 equivalent VAR_DECL, appropriately initialized. */
5039610b
SL
1468 for (p = parms, a = first_call_expr_arg (exp, &iter); p;
1469 a = next_call_expr_arg (&iter), p = TREE_CHAIN (p))
e36711f3 1470 setup_one_parameter (id, p, a, fn, bb, &vars);
4838c5ee 1471
6de9cd9a
DN
1472 /* Initialize the static chain. */
1473 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
ea99e0be 1474 gcc_assert (fn != current_function_decl);
6de9cd9a
DN
1475 if (p)
1476 {
1477 /* No static chain? Seems like a bug in tree-nested.c. */
1e128c5f 1478 gcc_assert (static_chain);
4838c5ee 1479
e21aff8a 1480 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
4838c5ee
AO
1481 }
1482
e21aff8a 1483 declare_inline_vars (id->block, vars);
d4e4baa9
AO
1484}
1485
e21aff8a
SB
1486/* Declare a return variable to replace the RESULT_DECL for the
1487 function we are calling. An appropriate DECL_STMT is returned.
1488 The USE_STMT is filled to contain a use of the declaration to
1489 indicate the return value of the function.
1490
110cfe1c
JH
1491 RETURN_SLOT, if non-null is place where to store the result. It
1492 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
1493 was the LHS of the GIMPLE_MODIFY_STMT to which this call is the RHS.
7740f00d
RH
1494
1495 The return value is a (possibly null) value that is the result of the
1496 function as seen by the callee. *USE_P is a (possibly null) value that
1497 holds the result as seen by the caller. */
d4e4baa9 1498
d436bff8 1499static tree
110cfe1c
JH
1500declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
1501 tree *use_p)
d4e4baa9 1502{
1b369fae
RH
1503 tree callee = id->src_fn;
1504 tree caller = id->dst_fn;
7740f00d
RH
1505 tree result = DECL_RESULT (callee);
1506 tree callee_type = TREE_TYPE (result);
1507 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
1508 tree var, use;
d4e4baa9
AO
1509
1510 /* We don't need to do anything for functions that don't return
1511 anything. */
7740f00d 1512 if (!result || VOID_TYPE_P (callee_type))
d4e4baa9 1513 {
6de9cd9a 1514 *use_p = NULL_TREE;
d4e4baa9
AO
1515 return NULL_TREE;
1516 }
1517
cc77ae10 1518 /* If there was a return slot, then the return value is the
7740f00d 1519 dereferenced address of that object. */
110cfe1c 1520 if (return_slot)
7740f00d 1521 {
110cfe1c 1522 /* The front end shouldn't have used both return_slot and
7740f00d 1523 a modify expression. */
1e128c5f 1524 gcc_assert (!modify_dest);
cc77ae10 1525 if (DECL_BY_REFERENCE (result))
110cfe1c
JH
1526 {
1527 tree return_slot_addr = build_fold_addr_expr (return_slot);
1528 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
1529
1530 /* We are going to construct *&return_slot and we can't do that
1531 for variables believed to be not addressable.
1532
1533 FIXME: This check possibly can match, because values returned
1534 via return slot optimization are not believed to have address
1535 taken by alias analysis. */
1536 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
1537 if (gimple_in_ssa_p (cfun))
1538 {
1539 HOST_WIDE_INT bitsize;
1540 HOST_WIDE_INT bitpos;
1541 tree offset;
1542 enum machine_mode mode;
1543 int unsignedp;
1544 int volatilep;
1545 tree base;
1546 base = get_inner_reference (return_slot, &bitsize, &bitpos,
1547 &offset,
1548 &mode, &unsignedp, &volatilep,
1549 false);
1550 if (TREE_CODE (base) == INDIRECT_REF)
1551 base = TREE_OPERAND (base, 0);
1552 if (TREE_CODE (base) == SSA_NAME)
1553 base = SSA_NAME_VAR (base);
1554 mark_sym_for_renaming (base);
1555 }
1556 var = return_slot_addr;
1557 }
cc77ae10 1558 else
110cfe1c
JH
1559 {
1560 var = return_slot;
1561 gcc_assert (TREE_CODE (var) != SSA_NAME);
1562 }
0890b981
AP
1563 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1564 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1565 && !DECL_GIMPLE_REG_P (result)
22918034 1566 && DECL_P (var))
0890b981 1567 DECL_GIMPLE_REG_P (var) = 0;
7740f00d
RH
1568 use = NULL;
1569 goto done;
1570 }
1571
1572 /* All types requiring non-trivial constructors should have been handled. */
1e128c5f 1573 gcc_assert (!TREE_ADDRESSABLE (callee_type));
7740f00d
RH
1574
1575 /* Attempt to avoid creating a new temporary variable. */
110cfe1c
JH
1576 if (modify_dest
1577 && TREE_CODE (modify_dest) != SSA_NAME)
7740f00d
RH
1578 {
1579 bool use_it = false;
1580
1581 /* We can't use MODIFY_DEST if there's type promotion involved. */
f4088621 1582 if (!useless_type_conversion_p (callee_type, caller_type))
7740f00d
RH
1583 use_it = false;
1584
1585 /* ??? If we're assigning to a variable sized type, then we must
1586 reuse the destination variable, because we've no good way to
1587 create variable sized temporaries at this point. */
1588 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
1589 use_it = true;
1590
1591 /* If the callee cannot possibly modify MODIFY_DEST, then we can
1592 reuse it as the result of the call directly. Don't do this if
1593 it would promote MODIFY_DEST to addressable. */
e2f9fe42
RH
1594 else if (TREE_ADDRESSABLE (result))
1595 use_it = false;
1596 else
1597 {
1598 tree base_m = get_base_address (modify_dest);
1599
1600 /* If the base isn't a decl, then it's a pointer, and we don't
1601 know where that's going to go. */
1602 if (!DECL_P (base_m))
1603 use_it = false;
1604 else if (is_global_var (base_m))
1605 use_it = false;
0890b981
AP
1606 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1607 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1608 && !DECL_GIMPLE_REG_P (result)
1609 && DECL_GIMPLE_REG_P (base_m))
1d327c16 1610 use_it = false;
e2f9fe42
RH
1611 else if (!TREE_ADDRESSABLE (base_m))
1612 use_it = true;
1613 }
7740f00d
RH
1614
1615 if (use_it)
1616 {
1617 var = modify_dest;
1618 use = NULL;
1619 goto done;
1620 }
1621 }
1622
1e128c5f 1623 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
7740f00d 1624
c08cd4c1 1625 var = copy_result_decl_to_var (result, id);
110cfe1c
JH
1626 if (gimple_in_ssa_p (cfun))
1627 {
1628 get_var_ann (var);
1629 add_referenced_var (var);
1630 }
e21aff8a 1631
7740f00d
RH
1632 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1633 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
1634 = tree_cons (NULL_TREE, var,
1635 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
1636
6de9cd9a 1637 /* Do not have the rest of GCC warn about this variable as it should
471854f8 1638 not be visible to the user. */
6de9cd9a 1639 TREE_NO_WARNING (var) = 1;
d4e4baa9 1640
c08cd4c1
JM
1641 declare_inline_vars (id->block, var);
1642
7740f00d
RH
1643 /* Build the use expr. If the return type of the function was
1644 promoted, convert it back to the expected type. */
1645 use = var;
f4088621 1646 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
7740f00d 1647 use = fold_convert (caller_type, var);
73dab33b
AP
1648
1649 STRIP_USELESS_TYPE_CONVERSION (use);
7740f00d 1650
c08cd4c1
JM
1651 if (DECL_BY_REFERENCE (result))
1652 var = build_fold_addr_expr (var);
1653
7740f00d 1654 done:
d4e4baa9
AO
1655 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
1656 way, when the RESULT_DECL is encountered, it will be
1657 automatically replaced by the VAR_DECL. */
5e20bdd7 1658 insert_decl_map (id, result, var);
d4e4baa9 1659
6de9cd9a
DN
1660 /* Remember this so we can ignore it in remap_decls. */
1661 id->retvar = var;
1662
7740f00d
RH
1663 *use_p = use;
1664 return var;
d4e4baa9
AO
1665}
1666
0e9e1e0a 1667/* Returns nonzero if a function can be inlined as a tree. */
4838c5ee 1668
b3c3af2f
SB
1669bool
1670tree_inlinable_function_p (tree fn)
4838c5ee 1671{
b3c3af2f 1672 return inlinable_function_p (fn);
4838c5ee
AO
1673}
1674
f08545a8 1675static const char *inline_forbidden_reason;
c986baf6 1676
c986baf6 1677static tree
f08545a8 1678inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
edeb3871 1679 void *fnp)
c986baf6 1680{
f08545a8 1681 tree node = *nodep;
edeb3871 1682 tree fn = (tree) fnp;
f08545a8 1683 tree t;
c986baf6 1684
f08545a8
JH
1685 switch (TREE_CODE (node))
1686 {
1687 case CALL_EXPR:
3197c4fd
AS
1688 /* Refuse to inline alloca call unless user explicitly forced so as
1689 this may change program's memory overhead drastically when the
1690 function using alloca is called in loop. In GCC present in
1691 SPEC2000 inlining into schedule_block cause it to require 2GB of
1692 RAM instead of 256MB. */
f08545a8
JH
1693 if (alloca_call_p (node)
1694 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1695 {
ddd2d57e 1696 inline_forbidden_reason
dee15844 1697 = G_("function %q+F can never be inlined because it uses "
ddd2d57e 1698 "alloca (override using the always_inline attribute)");
f08545a8
JH
1699 return node;
1700 }
1701 t = get_callee_fndecl (node);
1702 if (! t)
1703 break;
84f5e1b1 1704
f08545a8
JH
1705 /* We cannot inline functions that call setjmp. */
1706 if (setjmp_call_p (t))
1707 {
ddd2d57e 1708 inline_forbidden_reason
dee15844 1709 = G_("function %q+F can never be inlined because it uses setjmp");
f08545a8
JH
1710 return node;
1711 }
1712
6de9cd9a 1713 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3197c4fd 1714 switch (DECL_FUNCTION_CODE (t))
f08545a8 1715 {
3197c4fd
AS
1716 /* We cannot inline functions that take a variable number of
1717 arguments. */
1718 case BUILT_IN_VA_START:
1719 case BUILT_IN_STDARG_START:
1720 case BUILT_IN_NEXT_ARG:
1721 case BUILT_IN_VA_END:
6de9cd9a 1722 inline_forbidden_reason
dee15844 1723 = G_("function %q+F can never be inlined because it "
6de9cd9a
DN
1724 "uses variable argument lists");
1725 return node;
1726
3197c4fd 1727 case BUILT_IN_LONGJMP:
6de9cd9a
DN
1728 /* We can't inline functions that call __builtin_longjmp at
1729 all. The non-local goto machinery really requires the
1730 destination be in a different function. If we allow the
1731 function calling __builtin_longjmp to be inlined into the
1732 function calling __builtin_setjmp, Things will Go Awry. */
1733 inline_forbidden_reason
dee15844 1734 = G_("function %q+F can never be inlined because "
6de9cd9a
DN
1735 "it uses setjmp-longjmp exception handling");
1736 return node;
1737
1738 case BUILT_IN_NONLOCAL_GOTO:
1739 /* Similarly. */
1740 inline_forbidden_reason
dee15844 1741 = G_("function %q+F can never be inlined because "
6de9cd9a
DN
1742 "it uses non-local goto");
1743 return node;
f08545a8 1744
4b284111
JJ
1745 case BUILT_IN_RETURN:
1746 case BUILT_IN_APPLY_ARGS:
1747 /* If a __builtin_apply_args caller would be inlined,
1748 it would be saving arguments of the function it has
1749 been inlined into. Similarly __builtin_return would
1750 return from the function the inline has been inlined into. */
1751 inline_forbidden_reason
dee15844 1752 = G_("function %q+F can never be inlined because "
4b284111
JJ
1753 "it uses __builtin_return or __builtin_apply_args");
1754 return node;
1755
3197c4fd
AS
1756 default:
1757 break;
1758 }
f08545a8
JH
1759 break;
1760
f08545a8
JH
1761 case GOTO_EXPR:
1762 t = TREE_OPERAND (node, 0);
1763
1764 /* We will not inline a function which uses computed goto. The
1765 addresses of its local labels, which may be tucked into
1766 global storage, are of course not constant across
1767 instantiations, which causes unexpected behavior. */
1768 if (TREE_CODE (t) != LABEL_DECL)
1769 {
ddd2d57e 1770 inline_forbidden_reason
dee15844 1771 = G_("function %q+F can never be inlined "
ddd2d57e 1772 "because it contains a computed goto");
f08545a8
JH
1773 return node;
1774 }
6de9cd9a 1775 break;
f08545a8 1776
6de9cd9a
DN
1777 case LABEL_EXPR:
1778 t = TREE_OPERAND (node, 0);
1779 if (DECL_NONLOCAL (t))
f08545a8 1780 {
6de9cd9a
DN
1781 /* We cannot inline a function that receives a non-local goto
1782 because we cannot remap the destination label used in the
1783 function that is performing the non-local goto. */
ddd2d57e 1784 inline_forbidden_reason
dee15844 1785 = G_("function %q+F can never be inlined "
6de9cd9a 1786 "because it receives a non-local goto");
ed397c43 1787 return node;
f08545a8 1788 }
f08545a8
JH
1789 break;
1790
1791 case RECORD_TYPE:
1792 case UNION_TYPE:
1793 /* We cannot inline a function of the form
1794
1795 void F (int i) { struct S { int ar[i]; } s; }
1796
1797 Attempting to do so produces a catch-22.
1798 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1799 UNION_TYPE nodes, then it goes into infinite recursion on a
1800 structure containing a pointer to its own type. If it doesn't,
1801 then the type node for S doesn't get adjusted properly when
0e61db61 1802 F is inlined.
27b892b4
RK
1803
1804 ??? This is likely no longer true, but it's too late in the 4.0
1805 cycle to try to find out. This should be checked for 4.1. */
f08545a8 1806 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
5377d5ba 1807 if (variably_modified_type_p (TREE_TYPE (t), NULL))
f08545a8 1808 {
ddd2d57e 1809 inline_forbidden_reason
dee15844 1810 = G_("function %q+F can never be inlined "
ddd2d57e 1811 "because it uses variable sized variables");
f08545a8
JH
1812 return node;
1813 }
6de9cd9a 1814
f08545a8
JH
1815 default:
1816 break;
1817 }
1818
1819 return NULL_TREE;
84f5e1b1
RH
1820}
1821
f08545a8 1822/* Return subexpression representing possible alloca call, if any. */
84f5e1b1 1823static tree
f08545a8 1824inline_forbidden_p (tree fndecl)
84f5e1b1 1825{
070588f0 1826 location_t saved_loc = input_location;
e21aff8a
SB
1827 block_stmt_iterator bsi;
1828 basic_block bb;
1829 tree ret = NULL_TREE;
1830
1831 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (fndecl))
1832 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1833 {
1834 ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
1835 inline_forbidden_p_1, fndecl);
1836 if (ret)
1837 goto egress;
1838 }
ed397c43 1839
e21aff8a 1840egress:
070588f0 1841 input_location = saved_loc;
d1a74aa7 1842 return ret;
84f5e1b1
RH
1843}
1844
b3c3af2f
SB
1845/* Returns nonzero if FN is a function that does not have any
1846 fundamental inline blocking properties. */
d4e4baa9 1847
b3c3af2f
SB
1848static bool
1849inlinable_function_p (tree fn)
d4e4baa9 1850{
b3c3af2f 1851 bool inlinable = true;
d4e4baa9
AO
1852
1853 /* If we've already decided this function shouldn't be inlined,
1854 there's no need to check again. */
1855 if (DECL_UNINLINABLE (fn))
b3c3af2f 1856 return false;
d4e4baa9 1857
d58b7c2d
MM
1858 /* See if there is any language-specific reason it cannot be
1859 inlined. (It is important that this hook be called early because
b3c3af2f
SB
1860 in C++ it may result in template instantiation.)
1861 If the function is not inlinable for language-specific reasons,
1862 it is left up to the langhook to explain why. */
ae2bcd98 1863 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
46c5ad27 1864
b3c3af2f
SB
1865 /* If we don't have the function body available, we can't inline it.
1866 However, this should not be recorded since we also get here for
1867 forward declared inline functions. Therefore, return at once. */
1868 if (!DECL_SAVED_TREE (fn))
1869 return false;
1870
1871 /* If we're not inlining at all, then we cannot inline this function. */
1872 else if (!flag_inline_trees)
1873 inlinable = false;
1874
1875 /* Only try to inline functions if DECL_INLINE is set. This should be
1876 true for all functions declared `inline', and for all other functions
1877 as well with -finline-functions.
1878
1879 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1880 it's the front-end that must set DECL_INLINE in this case, because
1881 dwarf2out loses if a function that does not have DECL_INLINE set is
1882 inlined anyway. That is why we have both DECL_INLINE and
1883 DECL_DECLARED_INLINE_P. */
1884 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1885 here should be redundant. */
1886 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1887 inlinable = false;
a0c8285b 1888
f08545a8 1889 else if (inline_forbidden_p (fn))
b3c3af2f
SB
1890 {
1891 /* See if we should warn about uninlinable functions. Previously,
1892 some of these warnings would be issued while trying to expand
1893 the function inline, but that would cause multiple warnings
1894 about functions that would for example call alloca. But since
1895 this a property of the function, just one warning is enough.
1896 As a bonus we can now give more details about the reason why a
1897 function is not inlinable.
1898 We only warn for functions declared `inline' by the user. */
1899 bool do_warning = (warn_inline
1900 && DECL_INLINE (fn)
1901 && DECL_DECLARED_INLINE_P (fn)
1902 && !DECL_IN_SYSTEM_HEADER (fn));
1903
aa4a53af 1904 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
dee15844 1905 sorry (inline_forbidden_reason, fn);
2d327012 1906 else if (do_warning)
d2fcbf6f 1907 warning (OPT_Winline, inline_forbidden_reason, fn);
b3c3af2f
SB
1908
1909 inlinable = false;
1910 }
d4e4baa9
AO
1911
1912 /* Squirrel away the result so that we don't have to check again. */
b3c3af2f 1913 DECL_UNINLINABLE (fn) = !inlinable;
d4e4baa9 1914
b3c3af2f
SB
1915 return inlinable;
1916}
1917
e5c4f28a
RG
1918/* Estimate the cost of a memory move. Use machine dependent
1919 word size and take possible memcpy call into account. */
1920
1921int
1922estimate_move_cost (tree type)
1923{
1924 HOST_WIDE_INT size;
1925
1926 size = int_size_in_bytes (type);
1927
1928 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1929 /* Cost of a memcpy call, 3 arguments and the call. */
1930 return 4;
1931 else
1932 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1933}
1934
7f9bc51b
ZD
1935/* Arguments for estimate_num_insns_1. */
1936
1937struct eni_data
1938{
1939 /* Used to return the number of insns. */
1940 int count;
1941
1942 /* Weights of various constructs. */
1943 eni_weights *weights;
1944};
1945
6de9cd9a
DN
1946/* Used by estimate_num_insns. Estimate number of instructions seen
1947 by given statement. */
aa4a53af 1948
6de9cd9a
DN
1949static tree
1950estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1951{
7f9bc51b 1952 struct eni_data *d = data;
6de9cd9a 1953 tree x = *tp;
7f9bc51b 1954 unsigned cost;
6de9cd9a 1955
6615c446 1956 if (IS_TYPE_OR_DECL_P (x))
6de9cd9a
DN
1957 {
1958 *walk_subtrees = 0;
1959 return NULL;
1960 }
1961 /* Assume that constants and references counts nothing. These should
1962 be majorized by amount of operations among them we count later
1963 and are common target of CSE and similar optimizations. */
6615c446 1964 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
6de9cd9a 1965 return NULL;
ed397c43 1966
6de9cd9a 1967 switch (TREE_CODE (x))
9f63daea 1968 {
6de9cd9a
DN
1969 /* Containers have no cost. */
1970 case TREE_LIST:
1971 case TREE_VEC:
1972 case BLOCK:
1973 case COMPONENT_REF:
1974 case BIT_FIELD_REF:
1975 case INDIRECT_REF:
16630a2c
DN
1976 case ALIGN_INDIRECT_REF:
1977 case MISALIGNED_INDIRECT_REF:
6de9cd9a
DN
1978 case ARRAY_REF:
1979 case ARRAY_RANGE_REF:
0f59171d 1980 case OBJ_TYPE_REF:
6de9cd9a
DN
1981 case EXC_PTR_EXPR: /* ??? */
1982 case FILTER_EXPR: /* ??? */
1983 case COMPOUND_EXPR:
1984 case BIND_EXPR:
6de9cd9a
DN
1985 case WITH_CLEANUP_EXPR:
1986 case NOP_EXPR:
5cda34b1 1987 case CONVERT_EXPR:
6de9cd9a
DN
1988 case VIEW_CONVERT_EXPR:
1989 case SAVE_EXPR:
6de9cd9a 1990 case ADDR_EXPR:
6de9cd9a 1991 case COMPLEX_EXPR:
61fcaeec 1992 case RANGE_EXPR:
6de9cd9a
DN
1993 case CASE_LABEL_EXPR:
1994 case SSA_NAME:
1995 case CATCH_EXPR:
1996 case EH_FILTER_EXPR:
1997 case STATEMENT_LIST:
1998 case ERROR_MARK:
1999 case NON_LVALUE_EXPR:
6de9cd9a
DN
2000 case FDESC_EXPR:
2001 case VA_ARG_EXPR:
2002 case TRY_CATCH_EXPR:
2003 case TRY_FINALLY_EXPR:
2004 case LABEL_EXPR:
2005 case GOTO_EXPR:
2006 case RETURN_EXPR:
2007 case EXIT_EXPR:
2008 case LOOP_EXPR:
6de9cd9a 2009 case PHI_NODE:
d25cee4d 2010 case WITH_SIZE_EXPR:
aaf46ef9 2011 case OMP_CLAUSE:
777f7f9a
RH
2012 case OMP_RETURN:
2013 case OMP_CONTINUE:
e5c95afe 2014 case OMP_SECTIONS_SWITCH:
6de9cd9a 2015 break;
aa4a53af 2016
6de9cd9a
DN
2017 /* We don't account constants for now. Assume that the cost is amortized
2018 by operations that do use them. We may re-consider this decision once
128a79fb 2019 we are able to optimize the tree before estimating its size and break
6de9cd9a
DN
2020 out static initializers. */
2021 case IDENTIFIER_NODE:
2022 case INTEGER_CST:
2023 case REAL_CST:
325217ed 2024 case FIXED_CST:
6de9cd9a
DN
2025 case COMPLEX_CST:
2026 case VECTOR_CST:
2027 case STRING_CST:
2028 *walk_subtrees = 0;
2029 return NULL;
3a5b9284 2030
058dcc25
ILT
2031 /* CHANGE_DYNAMIC_TYPE_EXPR explicitly expands to nothing. */
2032 case CHANGE_DYNAMIC_TYPE_EXPR:
2033 *walk_subtrees = 0;
2034 return NULL;
2035
e5c4f28a
RG
2036 /* Try to estimate the cost of assignments. We have three cases to
2037 deal with:
2038 1) Simple assignments to registers;
2039 2) Stores to things that must live in memory. This includes
2040 "normal" stores to scalars, but also assignments of large
2041 structures, or constructors of big arrays;
2042 3) TARGET_EXPRs.
2043
2044 Let us look at the first two cases, assuming we have "a = b + C":
07beea0d
AH
2045 <GIMPLE_MODIFY_STMT <var_decl "a">
2046 <plus_expr <var_decl "b"> <constant C>>
e5c4f28a
RG
2047 If "a" is a GIMPLE register, the assignment to it is free on almost
2048 any target, because "a" usually ends up in a real register. Hence
2049 the only cost of this expression comes from the PLUS_EXPR, and we
07beea0d 2050 can ignore the GIMPLE_MODIFY_STMT.
e5c4f28a 2051 If "a" is not a GIMPLE register, the assignment to "a" will most
07beea0d 2052 likely be a real store, so the cost of the GIMPLE_MODIFY_STMT is the cost
e5c4f28a
RG
2053 of moving something into "a", which we compute using the function
2054 estimate_move_cost.
2055
2056 The third case deals with TARGET_EXPRs, for which the semantics are
2057 that a temporary is assigned, unless the TARGET_EXPR itself is being
2058 assigned to something else. In the latter case we do not need the
07beea0d
AH
2059 temporary. E.g. in:
2060 <GIMPLE_MODIFY_STMT <var_decl "a"> <target_expr>>, the
2061 GIMPLE_MODIFY_STMT is free. */
6de9cd9a 2062 case INIT_EXPR:
07beea0d 2063 case GIMPLE_MODIFY_STMT:
e5c4f28a 2064 /* Is the right and side a TARGET_EXPR? */
07beea0d 2065 if (TREE_CODE (GENERIC_TREE_OPERAND (x, 1)) == TARGET_EXPR)
e5c4f28a
RG
2066 break;
2067 /* ... fall through ... */
2068
3a5b9284 2069 case TARGET_EXPR:
07beea0d 2070 x = GENERIC_TREE_OPERAND (x, 0);
e5c4f28a
RG
2071 /* Is this an assignments to a register? */
2072 if (is_gimple_reg (x))
2073 break;
2074 /* Otherwise it's a store, so fall through to compute the move cost. */
e21aff8a 2075
6de9cd9a 2076 case CONSTRUCTOR:
7f9bc51b 2077 d->count += estimate_move_cost (TREE_TYPE (x));
6de9cd9a
DN
2078 break;
2079
e5c4f28a
RG
2080 /* Assign cost of 1 to usual operations.
2081 ??? We may consider mapping RTL costs to this. */
6de9cd9a 2082 case COND_EXPR:
4151978d 2083 case VEC_COND_EXPR:
6de9cd9a
DN
2084
2085 case PLUS_EXPR:
5be014d5 2086 case POINTER_PLUS_EXPR:
6de9cd9a
DN
2087 case MINUS_EXPR:
2088 case MULT_EXPR:
2089
325217ed 2090 case FIXED_CONVERT_EXPR:
6de9cd9a 2091 case FIX_TRUNC_EXPR:
6de9cd9a
DN
2092
2093 case NEGATE_EXPR:
2094 case FLOAT_EXPR:
2095 case MIN_EXPR:
2096 case MAX_EXPR:
2097 case ABS_EXPR:
2098
2099 case LSHIFT_EXPR:
2100 case RSHIFT_EXPR:
2101 case LROTATE_EXPR:
2102 case RROTATE_EXPR:
a6b46ba2
DN
2103 case VEC_LSHIFT_EXPR:
2104 case VEC_RSHIFT_EXPR:
6de9cd9a
DN
2105
2106 case BIT_IOR_EXPR:
2107 case BIT_XOR_EXPR:
2108 case BIT_AND_EXPR:
2109 case BIT_NOT_EXPR:
2110
2111 case TRUTH_ANDIF_EXPR:
2112 case TRUTH_ORIF_EXPR:
2113 case TRUTH_AND_EXPR:
2114 case TRUTH_OR_EXPR:
2115 case TRUTH_XOR_EXPR:
2116 case TRUTH_NOT_EXPR:
2117
2118 case LT_EXPR:
2119 case LE_EXPR:
2120 case GT_EXPR:
2121 case GE_EXPR:
2122 case EQ_EXPR:
2123 case NE_EXPR:
2124 case ORDERED_EXPR:
2125 case UNORDERED_EXPR:
2126
2127 case UNLT_EXPR:
2128 case UNLE_EXPR:
2129 case UNGT_EXPR:
2130 case UNGE_EXPR:
2131 case UNEQ_EXPR:
d1a7edaf 2132 case LTGT_EXPR:
6de9cd9a 2133
6de9cd9a
DN
2134 case CONJ_EXPR:
2135
2136 case PREDECREMENT_EXPR:
2137 case PREINCREMENT_EXPR:
2138 case POSTDECREMENT_EXPR:
2139 case POSTINCREMENT_EXPR:
2140
6de9cd9a
DN
2141 case ASM_EXPR:
2142
16630a2c
DN
2143 case REALIGN_LOAD_EXPR:
2144
61d3cdbb
DN
2145 case REDUC_MAX_EXPR:
2146 case REDUC_MIN_EXPR:
2147 case REDUC_PLUS_EXPR:
20f06221
DN
2148 case WIDEN_SUM_EXPR:
2149 case DOT_PROD_EXPR:
89d67cca
DN
2150 case VEC_WIDEN_MULT_HI_EXPR:
2151 case VEC_WIDEN_MULT_LO_EXPR:
2152 case VEC_UNPACK_HI_EXPR:
2153 case VEC_UNPACK_LO_EXPR:
d9987fb4
UB
2154 case VEC_UNPACK_FLOAT_HI_EXPR:
2155 case VEC_UNPACK_FLOAT_LO_EXPR:
8115817b 2156 case VEC_PACK_TRUNC_EXPR:
89d67cca 2157 case VEC_PACK_SAT_EXPR:
d9987fb4 2158 case VEC_PACK_FIX_TRUNC_EXPR:
20f06221
DN
2159
2160 case WIDEN_MULT_EXPR:
61d3cdbb 2161
98b44b0e
IR
2162 case VEC_EXTRACT_EVEN_EXPR:
2163 case VEC_EXTRACT_ODD_EXPR:
2164 case VEC_INTERLEAVE_HIGH_EXPR:
2165 case VEC_INTERLEAVE_LOW_EXPR:
2166
6de9cd9a 2167 case RESX_EXPR:
7f9bc51b
ZD
2168 d->count += 1;
2169 break;
2170
2171 case SWITCH_EXPR:
2172 /* TODO: Cost of a switch should be derived from the number of
2173 branches. */
2174 d->count += d->weights->switch_cost;
6de9cd9a
DN
2175 break;
2176
1ea7e6ad 2177 /* Few special cases of expensive operations. This is useful
6de9cd9a
DN
2178 to avoid inlining on functions having too many of these. */
2179 case TRUNC_DIV_EXPR:
2180 case CEIL_DIV_EXPR:
2181 case FLOOR_DIV_EXPR:
2182 case ROUND_DIV_EXPR:
2183 case EXACT_DIV_EXPR:
2184 case TRUNC_MOD_EXPR:
2185 case CEIL_MOD_EXPR:
2186 case FLOOR_MOD_EXPR:
2187 case ROUND_MOD_EXPR:
2188 case RDIV_EXPR:
7f9bc51b 2189 d->count += d->weights->div_mod_cost;
6de9cd9a
DN
2190 break;
2191 case CALL_EXPR:
2192 {
2193 tree decl = get_callee_fndecl (x);
2194
7f9bc51b 2195 cost = d->weights->call_cost;
8c96cd51 2196 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
6de9cd9a
DN
2197 switch (DECL_FUNCTION_CODE (decl))
2198 {
2199 case BUILT_IN_CONSTANT_P:
2200 *walk_subtrees = 0;
2201 return NULL_TREE;
2202 case BUILT_IN_EXPECT:
2203 return NULL_TREE;
7f9bc51b
ZD
2204 /* Prefetch instruction is not expensive. */
2205 case BUILT_IN_PREFETCH:
2206 cost = 1;
2207 break;
6de9cd9a
DN
2208 default:
2209 break;
2210 }
e5c4f28a 2211
c7f599d0
JH
2212 /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
2213 that does use function declaration to figure out the arguments. */
2214 if (!decl)
2215 {
5039610b
SL
2216 tree a;
2217 call_expr_arg_iterator iter;
2218 FOR_EACH_CALL_EXPR_ARG (a, iter, x)
2219 d->count += estimate_move_cost (TREE_TYPE (a));
c7f599d0
JH
2220 }
2221 else
2222 {
5039610b 2223 tree arg;
c7f599d0 2224 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
7f9bc51b 2225 d->count += estimate_move_cost (TREE_TYPE (arg));
c7f599d0 2226 }
e5c4f28a 2227
7f9bc51b 2228 d->count += cost;
6de9cd9a
DN
2229 break;
2230 }
88f4034b
DN
2231
2232 case OMP_PARALLEL:
2233 case OMP_FOR:
2234 case OMP_SECTIONS:
2235 case OMP_SINGLE:
2236 case OMP_SECTION:
2237 case OMP_MASTER:
2238 case OMP_ORDERED:
2239 case OMP_CRITICAL:
2240 case OMP_ATOMIC:
2241 /* OpenMP directives are generally very expensive. */
7f9bc51b 2242 d->count += d->weights->omp_cost;
88f4034b
DN
2243 break;
2244
6de9cd9a 2245 default:
1e128c5f 2246 gcc_unreachable ();
6de9cd9a
DN
2247 }
2248 return NULL;
2249}
2250
7f9bc51b 2251/* Estimate number of instructions that will be created by expanding EXPR.
9f5ed61a 2252 WEIGHTS contains weights attributed to various constructs. */
aa4a53af 2253
6de9cd9a 2254int
7f9bc51b 2255estimate_num_insns (tree expr, eni_weights *weights)
6de9cd9a 2256{
e21aff8a
SB
2257 struct pointer_set_t *visited_nodes;
2258 basic_block bb;
2259 block_stmt_iterator bsi;
2260 struct function *my_function;
7f9bc51b
ZD
2261 struct eni_data data;
2262
2263 data.count = 0;
2264 data.weights = weights;
e21aff8a
SB
2265
2266 /* If we're given an entire function, walk the CFG. */
2267 if (TREE_CODE (expr) == FUNCTION_DECL)
2268 {
2269 my_function = DECL_STRUCT_FUNCTION (expr);
2270 gcc_assert (my_function && my_function->cfg);
2271 visited_nodes = pointer_set_create ();
2272 FOR_EACH_BB_FN (bb, my_function)
2273 {
2274 for (bsi = bsi_start (bb);
2275 !bsi_end_p (bsi);
2276 bsi_next (&bsi))
2277 {
2278 walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
7f9bc51b 2279 &data, visited_nodes);
e21aff8a
SB
2280 }
2281 }
2282 pointer_set_destroy (visited_nodes);
2283 }
2284 else
7f9bc51b 2285 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &data);
e21aff8a 2286
7f9bc51b
ZD
2287 return data.count;
2288}
2289
2290/* Initializes weights used by estimate_num_insns. */
2291
2292void
2293init_inline_once (void)
2294{
2295 eni_inlining_weights.call_cost = PARAM_VALUE (PARAM_INLINE_CALL_COST);
2296 eni_inlining_weights.div_mod_cost = 10;
2297 eni_inlining_weights.switch_cost = 1;
2298 eni_inlining_weights.omp_cost = 40;
2299
2300 eni_size_weights.call_cost = 1;
2301 eni_size_weights.div_mod_cost = 1;
2302 eni_size_weights.switch_cost = 10;
2303 eni_size_weights.omp_cost = 40;
2304
2305 /* Estimating time for call is difficult, since we have no idea what the
2306 called function does. In the current uses of eni_time_weights,
2307 underestimating the cost does less harm than overestimating it, so
ea2c620c 2308 we choose a rather small value here. */
7f9bc51b
ZD
2309 eni_time_weights.call_cost = 10;
2310 eni_time_weights.div_mod_cost = 10;
2311 eni_time_weights.switch_cost = 4;
2312 eni_time_weights.omp_cost = 40;
6de9cd9a
DN
2313}
2314
426357ea
KH
2315typedef struct function *function_p;
2316
2317DEF_VEC_P(function_p);
2318DEF_VEC_ALLOC_P(function_p,heap);
2319
e21aff8a 2320/* Initialized with NOGC, making this poisonous to the garbage collector. */
426357ea 2321static VEC(function_p,heap) *cfun_stack;
e21aff8a
SB
2322
2323void
2324push_cfun (struct function *new_cfun)
2325{
426357ea 2326 VEC_safe_push (function_p, heap, cfun_stack, cfun);
e21aff8a
SB
2327 cfun = new_cfun;
2328}
2329
2330void
2331pop_cfun (void)
2332{
426357ea 2333 cfun = VEC_pop (function_p, cfun_stack);
e21aff8a
SB
2334}
2335
2336/* Install new lexical TREE_BLOCK underneath 'current_block'. */
2337static void
2338add_lexical_block (tree current_block, tree new_block)
2339{
2340 tree *blk_p;
2341
2342 /* Walk to the last sub-block. */
2343 for (blk_p = &BLOCK_SUBBLOCKS (current_block);
2344 *blk_p;
2345 blk_p = &TREE_CHAIN (*blk_p))
2346 ;
2347 *blk_p = new_block;
2348 BLOCK_SUPERCONTEXT (new_block) = current_block;
e21aff8a
SB
2349}
2350
d4e4baa9
AO
2351/* If *TP is a CALL_EXPR, replace it with its inline expansion. */
2352
e21aff8a
SB
2353static bool
2354expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
d4e4baa9 2355{
1b369fae 2356 copy_body_data *id;
d4e4baa9 2357 tree t;
6de9cd9a 2358 tree use_retvar;
d436bff8 2359 tree fn;
6be42dd4 2360 struct pointer_map_t *st;
110cfe1c 2361 tree return_slot;
7740f00d 2362 tree modify_dest;
6de9cd9a 2363 location_t saved_location;
e21aff8a 2364 struct cgraph_edge *cg_edge;
dc0bfe6a 2365 const char *reason;
e21aff8a
SB
2366 basic_block return_block;
2367 edge e;
2368 block_stmt_iterator bsi, stmt_bsi;
2369 bool successfully_inlined = FALSE;
4f6c2131 2370 bool purge_dead_abnormal_edges;
e21aff8a
SB
2371 tree t_step;
2372 tree var;
d4e4baa9
AO
2373
2374 /* See what we've got. */
1b369fae 2375 id = (copy_body_data *) data;
d4e4baa9
AO
2376 t = *tp;
2377
6de9cd9a
DN
2378 /* Set input_location here so we get the right instantiation context
2379 if we call instantiate_decl from inlinable_function_p. */
2380 saved_location = input_location;
2381 if (EXPR_HAS_LOCATION (t))
2382 input_location = EXPR_LOCATION (t);
2383
d4e4baa9
AO
2384 /* From here on, we're only interested in CALL_EXPRs. */
2385 if (TREE_CODE (t) != CALL_EXPR)
6de9cd9a 2386 goto egress;
d4e4baa9
AO
2387
2388 /* First, see if we can figure out what function is being called.
2389 If we cannot, then there is no hope of inlining the function. */
2390 fn = get_callee_fndecl (t);
2391 if (!fn)
6de9cd9a 2392 goto egress;
d4e4baa9 2393
b58b1157 2394 /* Turn forward declarations into real ones. */
d4d1ebc1 2395 fn = cgraph_node (fn)->decl;
b58b1157 2396
a1a0fd4e
AO
2397 /* If fn is a declaration of a function in a nested scope that was
2398 globally declared inline, we don't set its DECL_INITIAL.
2399 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
2400 C++ front-end uses it for cdtors to refer to their internal
2401 declarations, that are not real functions. Fortunately those
2402 don't have trees to be saved, so we can tell by checking their
2403 DECL_SAVED_TREE. */
2404 if (! DECL_INITIAL (fn)
2405 && DECL_ABSTRACT_ORIGIN (fn)
2406 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
2407 fn = DECL_ABSTRACT_ORIGIN (fn);
2408
18c6ada9
JH
2409 /* Objective C and fortran still calls tree_rest_of_compilation directly.
2410 Kill this check once this is fixed. */
1b369fae 2411 if (!id->dst_node->analyzed)
6de9cd9a 2412 goto egress;
18c6ada9 2413
1b369fae 2414 cg_edge = cgraph_edge (id->dst_node, stmt);
18c6ada9
JH
2415
2416 /* Constant propagation on argument done during previous inlining
2417 may create new direct call. Produce an edge for it. */
e21aff8a 2418 if (!cg_edge)
18c6ada9
JH
2419 {
2420 struct cgraph_node *dest = cgraph_node (fn);
2421
6de9cd9a
DN
2422 /* We have missing edge in the callgraph. This can happen in one case
2423 where previous inlining turned indirect call into direct call by
2424 constant propagating arguments. In all other cases we hit a bug
2425 (incorrect node sharing is most common reason for missing edges. */
70f3cc30 2426 gcc_assert (dest->needed || !flag_unit_at_a_time);
1b369fae 2427 cgraph_create_edge (id->dst_node, dest, stmt,
45a80bb9
JH
2428 bb->count, CGRAPH_FREQ_BASE,
2429 bb->loop_depth)->inline_failed
18c6ada9 2430 = N_("originally indirect function call not considered for inlining");
45a80bb9
JH
2431 if (dump_file)
2432 {
2433 fprintf (dump_file, "Created new direct edge to %s",
2434 cgraph_node_name (dest));
2435 }
6de9cd9a 2436 goto egress;
18c6ada9
JH
2437 }
2438
d4e4baa9
AO
2439 /* Don't try to inline functions that are not well-suited to
2440 inlining. */
e21aff8a 2441 if (!cgraph_inline_p (cg_edge, &reason))
a833faa5 2442 {
7fac66d4
JH
2443 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
2444 /* Avoid warnings during early inline pass. */
2445 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2d327012 2446 {
dee15844 2447 sorry ("inlining failed in call to %q+F: %s", fn, reason);
2d327012
JH
2448 sorry ("called from here");
2449 }
2450 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
2451 && !DECL_IN_SYSTEM_HEADER (fn)
09ebcffa 2452 && strlen (reason)
d63db217
JH
2453 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
2454 /* Avoid warnings during early inline pass. */
2455 && (!flag_unit_at_a_time || cgraph_global_info_ready))
a833faa5 2456 {
dee15844
JM
2457 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
2458 fn, reason);
3176a0c2 2459 warning (OPT_Winline, "called from here");
a833faa5 2460 }
6de9cd9a 2461 goto egress;
a833faa5 2462 }
ea99e0be 2463 fn = cg_edge->callee->decl;
d4e4baa9 2464
18c6ada9 2465#ifdef ENABLE_CHECKING
1b369fae 2466 if (cg_edge->callee->decl != id->dst_node->decl)
e21aff8a 2467 verify_cgraph_node (cg_edge->callee);
18c6ada9
JH
2468#endif
2469
e21aff8a 2470 /* We will be inlining this callee. */
e21aff8a
SB
2471 id->eh_region = lookup_stmt_eh_region (stmt);
2472
2473 /* Split the block holding the CALL_EXPR. */
e21aff8a
SB
2474 e = split_block (bb, stmt);
2475 bb = e->src;
2476 return_block = e->dest;
2477 remove_edge (e);
2478
4f6c2131
EB
2479 /* split_block splits after the statement; work around this by
2480 moving the call into the second block manually. Not pretty,
2481 but seems easier than doing the CFG manipulation by hand
2482 when the CALL_EXPR is in the last statement of BB. */
e21aff8a 2483 stmt_bsi = bsi_last (bb);
4f6c2131
EB
2484 bsi_remove (&stmt_bsi, false);
2485
2486 /* If the CALL_EXPR was in the last statement of BB, it may have
2487 been the source of abnormal edges. In this case, schedule
2488 the removal of dead abnormal edges. */
e21aff8a 2489 bsi = bsi_start (return_block);
4f6c2131 2490 if (bsi_end_p (bsi))
e21aff8a 2491 {
e21aff8a 2492 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
4f6c2131 2493 purge_dead_abnormal_edges = true;
e21aff8a 2494 }
4f6c2131
EB
2495 else
2496 {
2497 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
2498 purge_dead_abnormal_edges = false;
2499 }
2500
e21aff8a 2501 stmt_bsi = bsi_start (return_block);
742a37d5 2502
d436bff8
AH
2503 /* Build a block containing code to initialize the arguments, the
2504 actual inline expansion of the body, and a label for the return
2505 statements within the function to jump to. The type of the
2506 statement expression is the return type of the function call. */
e21aff8a
SB
2507 id->block = make_node (BLOCK);
2508 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3e2844cb 2509 BLOCK_SOURCE_LOCATION (id->block) = input_location;
e21aff8a
SB
2510 add_lexical_block (TREE_BLOCK (stmt), id->block);
2511
d4e4baa9
AO
2512 /* Local declarations will be replaced by their equivalents in this
2513 map. */
2514 st = id->decl_map;
6be42dd4 2515 id->decl_map = pointer_map_create ();
d4e4baa9 2516
e21aff8a 2517 /* Record the function we are about to inline. */
1b369fae
RH
2518 id->src_fn = fn;
2519 id->src_node = cg_edge->callee;
110cfe1c 2520 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
1b369fae 2521
5039610b 2522 initialize_inlined_parameters (id, t, fn, bb);
d4e4baa9 2523
ea99e0be 2524 if (DECL_INITIAL (fn))
acb8f212
JH
2525 add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
2526
d4e4baa9
AO
2527 /* Return statements in the function body will be replaced by jumps
2528 to the RET_LABEL. */
d4e4baa9 2529
1e128c5f
GB
2530 gcc_assert (DECL_INITIAL (fn));
2531 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
23700f65 2532
7740f00d 2533 /* Find the lhs to which the result of this call is assigned. */
110cfe1c 2534 return_slot = NULL;
07beea0d 2535 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
81bafd36 2536 {
07beea0d 2537 modify_dest = GIMPLE_STMT_OPERAND (stmt, 0);
81bafd36
ILT
2538
2539 /* The function which we are inlining might not return a value,
2540 in which case we should issue a warning that the function
2541 does not return a value. In that case the optimizers will
2542 see that the variable to which the value is assigned was not
2543 initialized. We do not want to issue a warning about that
2544 uninitialized variable. */
2545 if (DECL_P (modify_dest))
2546 TREE_NO_WARNING (modify_dest) = 1;
fa47911c
JM
2547 if (CALL_EXPR_RETURN_SLOT_OPT (t))
2548 {
110cfe1c 2549 return_slot = modify_dest;
fa47911c
JM
2550 modify_dest = NULL;
2551 }
81bafd36 2552 }
7740f00d
RH
2553 else
2554 modify_dest = NULL;
2555
d4e4baa9 2556 /* Declare the return variable for the function. */
110cfe1c 2557 declare_return_variable (id, return_slot,
c08cd4c1 2558 modify_dest, &use_retvar);
d4e4baa9 2559
e21aff8a
SB
2560 /* This is it. Duplicate the callee body. Assume callee is
2561 pre-gimplified. Note that we must not alter the caller
2562 function in any way before this point, as this CALL_EXPR may be
2563 a self-referential call; if we're calling ourselves, we need to
2564 duplicate our body before altering anything. */
2565 copy_body (id, bb->count, bb->frequency, bb, return_block);
50aadcbc 2566
acb8f212 2567 /* Add local vars in this inlined callee to caller. */
1b369fae 2568 t_step = id->src_cfun->unexpanded_var_list;
acb8f212
JH
2569 for (; t_step; t_step = TREE_CHAIN (t_step))
2570 {
2571 var = TREE_VALUE (t_step);
2572 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2573 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2574 cfun->unexpanded_var_list);
2575 else
2576 cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id),
2577 cfun->unexpanded_var_list);
2578 }
2579
d4e4baa9 2580 /* Clean up. */
6be42dd4 2581 pointer_map_destroy (id->decl_map);
d4e4baa9
AO
2582 id->decl_map = st;
2583
84936f6f 2584 /* If the inlined function returns a result that we care about,
e21aff8a
SB
2585 clobber the CALL_EXPR with a reference to the return variable. */
2586 if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
2587 {
2588 *tp = use_retvar;
110cfe1c
JH
2589 if (gimple_in_ssa_p (cfun))
2590 {
2591 update_stmt (stmt);
2592 mark_symbols_for_renaming (stmt);
2593 }
e21aff8a
SB
2594 maybe_clean_or_replace_eh_stmt (stmt, stmt);
2595 }
6de9cd9a 2596 else
e21aff8a
SB
2597 /* We're modifying a TSI owned by gimple_expand_calls_inline();
2598 tsi_delink() will leave the iterator in a sane state. */
110cfe1c
JH
2599 {
2600 /* Handle case of inlining function that miss return statement so
2601 return value becomes undefined. */
2602 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
2603 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME)
2604 {
2605 tree name = TREE_OPERAND (stmt, 0);
2606 tree var = SSA_NAME_VAR (TREE_OPERAND (stmt, 0));
2607 tree def = gimple_default_def (cfun, var);
2608
2609 /* If the variable is used undefined, make this name undefined via
2610 move. */
2611 if (def)
2612 {
2613 TREE_OPERAND (stmt, 1) = def;
2614 update_stmt (stmt);
2615 }
2616 /* Otherwise make this variable undefined. */
2617 else
2618 {
2619 bsi_remove (&stmt_bsi, true);
2620 set_default_def (var, name);
2621 SSA_NAME_DEF_STMT (name) = build_empty_stmt ();
2622 }
2623 }
2624 else
2625 bsi_remove (&stmt_bsi, true);
2626 }
d4e4baa9 2627
4f6c2131
EB
2628 if (purge_dead_abnormal_edges)
2629 tree_purge_dead_abnormal_call_edges (return_block);
84936f6f 2630
e21aff8a
SB
2631 /* If the value of the new expression is ignored, that's OK. We
2632 don't warn about this for CALL_EXPRs, so we shouldn't warn about
2633 the equivalent inlined version either. */
2634 TREE_USED (*tp) = 1;
84936f6f 2635
1eb3331e
DB
2636 /* Output the inlining info for this abstract function, since it has been
2637 inlined. If we don't do this now, we can lose the information about the
2638 variables in the function when the blocks get blown away as soon as we
2639 remove the cgraph node. */
e21aff8a 2640 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
84936f6f 2641
e72fcfe8 2642 /* Update callgraph if needed. */
e21aff8a 2643 cgraph_remove_node (cg_edge->callee);
e72fcfe8 2644
e21aff8a 2645 id->block = NULL_TREE;
e21aff8a 2646 successfully_inlined = TRUE;
742a37d5 2647
6de9cd9a
DN
2648 egress:
2649 input_location = saved_location;
e21aff8a 2650 return successfully_inlined;
d4e4baa9 2651}
6de9cd9a 2652
e21aff8a
SB
2653/* Expand call statements reachable from STMT_P.
2654 We can only have CALL_EXPRs as the "toplevel" tree code or nested
07beea0d 2655 in a GIMPLE_MODIFY_STMT. See tree-gimple.c:get_call_expr_in(). We can
e21aff8a
SB
2656 unfortunately not use that function here because we need a pointer
2657 to the CALL_EXPR, not the tree itself. */
2658
2659static bool
1b369fae 2660gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
6de9cd9a 2661{
e21aff8a 2662 block_stmt_iterator bsi;
6de9cd9a 2663
e21aff8a
SB
2664 /* Register specific tree functions. */
2665 tree_register_cfg_hooks ();
2666 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
6de9cd9a 2667 {
e21aff8a
SB
2668 tree *expr_p = bsi_stmt_ptr (bsi);
2669 tree stmt = *expr_p;
2670
07beea0d
AH
2671 if (TREE_CODE (*expr_p) == GIMPLE_MODIFY_STMT)
2672 expr_p = &GIMPLE_STMT_OPERAND (*expr_p, 1);
e21aff8a
SB
2673 if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
2674 expr_p = &TREE_OPERAND (*expr_p, 0);
2675 if (TREE_CODE (*expr_p) == CALL_EXPR)
2676 if (expand_call_inline (bb, stmt, expr_p, id))
2677 return true;
6de9cd9a 2678 }
e21aff8a 2679 return false;
6de9cd9a
DN
2680}
2681
b8a00a4d
JH
2682/* Walk all basic blocks created after FIRST and try to fold every statement
2683 in the STATEMENTS pointer set. */
2684static void
2685fold_marked_statements (int first, struct pointer_set_t *statements)
2686{
2687 for (;first < n_basic_blocks;first++)
2688 if (BASIC_BLOCK (first))
2689 {
2690 block_stmt_iterator bsi;
2691 for (bsi = bsi_start (BASIC_BLOCK (first));
2692 !bsi_end_p (bsi); bsi_next (&bsi))
2693 if (pointer_set_contains (statements, bsi_stmt (bsi)))
9477eb38
JH
2694 {
2695 tree old_stmt = bsi_stmt (bsi);
2696 if (fold_stmt (bsi_stmt_ptr (bsi)))
2697 {
2698 update_stmt (bsi_stmt (bsi));
2699 if (maybe_clean_or_replace_eh_stmt (old_stmt, bsi_stmt (bsi)))
2700 tree_purge_dead_eh_edges (BASIC_BLOCK (first));
2701 }
2702 }
b8a00a4d
JH
2703 }
2704}
2705
1084e689
JH
2706/* Return true if BB has at least one abnormal outgoing edge. */
2707
2708static inline bool
2709has_abnormal_outgoing_edge_p (basic_block bb)
2710{
2711 edge e;
2712 edge_iterator ei;
2713
2714 FOR_EACH_EDGE (e, ei, bb->succs)
2715 if (e->flags & EDGE_ABNORMAL)
2716 return true;
2717
2718 return false;
2719}
2720
2721/* When a block from the inlined function contains a call with side-effects
2722 in the middle gets inlined in a function with non-locals labels, the call
2723 becomes a potential non-local goto so we need to add appropriate edge. */
2724
2725static void
2726make_nonlocal_label_edges (void)
2727{
2728 block_stmt_iterator bsi;
2729 basic_block bb;
2730
2731 FOR_EACH_BB (bb)
2732 {
2733 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2734 {
2735 tree stmt = bsi_stmt (bsi);
2736 if (tree_can_make_abnormal_goto (stmt))
2737 {
2738 if (stmt == bsi_stmt (bsi_last (bb)))
2739 {
2740 if (!has_abnormal_outgoing_edge_p (bb))
2741 make_abnormal_goto_edges (bb, true);
2742 }
2743 else
2744 {
2745 edge e = split_block (bb, stmt);
2746 bb = e->src;
2747 make_abnormal_goto_edges (bb, true);
2748 }
2749 break;
2750 }
2751
2752 /* Update PHIs on nonlocal goto receivers we (possibly)
2753 just created new edges into. */
2754 if (TREE_CODE (stmt) == LABEL_EXPR
2755 && gimple_in_ssa_p (cfun))
2756 {
2757 tree target = LABEL_EXPR_LABEL (stmt);
2758 if (DECL_NONLOCAL (target))
2759 {
2760 tree phi;
2761
2762 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2763 {
2764 gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
2765 (PHI_RESULT (phi)));
2766 mark_sym_for_renaming
2767 (SSA_NAME_VAR (PHI_RESULT (phi)));
2768 }
2769 }
2770 }
2771 }
2772 }
2773}
2774
d4e4baa9
AO
2775/* Expand calls to inline functions in the body of FN. */
2776
873aa8f5 2777unsigned int
46c5ad27 2778optimize_inline_calls (tree fn)
d4e4baa9 2779{
1b369fae 2780 copy_body_data id;
d4e4baa9 2781 tree prev_fn;
e21aff8a 2782 basic_block bb;
b8a00a4d 2783 int last = n_basic_blocks;
c5b6f18e
MM
2784 /* There is no point in performing inlining if errors have already
2785 occurred -- and we might crash if we try to inline invalid
2786 code. */
2787 if (errorcount || sorrycount)
873aa8f5 2788 return 0;
c5b6f18e 2789
d4e4baa9
AO
2790 /* Clear out ID. */
2791 memset (&id, 0, sizeof (id));
2792
1b369fae
RH
2793 id.src_node = id.dst_node = cgraph_node (fn);
2794 id.dst_fn = fn;
d4e4baa9
AO
2795 /* Or any functions that aren't finished yet. */
2796 prev_fn = NULL_TREE;
2797 if (current_function_decl)
2798 {
1b369fae 2799 id.dst_fn = current_function_decl;
d4e4baa9
AO
2800 prev_fn = current_function_decl;
2801 }
1b369fae
RH
2802
2803 id.copy_decl = copy_decl_maybe_to_var;
2804 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2805 id.transform_new_cfg = false;
2806 id.transform_return_to_modify = true;
2807 id.transform_lang_insert_block = false;
b8a00a4d 2808 id.statements_to_fold = pointer_set_create ();
1b369fae 2809
e21aff8a 2810 push_gimplify_context ();
d4e4baa9 2811
672987e8
ZD
2812 /* We make no attempts to keep dominance info up-to-date. */
2813 free_dominance_info (CDI_DOMINATORS);
2814 free_dominance_info (CDI_POST_DOMINATORS);
2815
e21aff8a
SB
2816 /* Reach the trees by walking over the CFG, and note the
2817 enclosing basic-blocks in the call edges. */
2818 /* We walk the blocks going forward, because inlined function bodies
2819 will split id->current_basic_block, and the new blocks will
2820 follow it; we'll trudge through them, processing their CALL_EXPRs
2821 along the way. */
2822 FOR_EACH_BB (bb)
2823 gimple_expand_calls_inline (bb, &id);
d4e4baa9 2824
e21aff8a 2825 pop_gimplify_context (NULL);
6de9cd9a 2826
18c6ada9
JH
2827#ifdef ENABLE_CHECKING
2828 {
2829 struct cgraph_edge *e;
2830
1b369fae 2831 verify_cgraph_node (id.dst_node);
18c6ada9
JH
2832
2833 /* Double check that we inlined everything we are supposed to inline. */
1b369fae 2834 for (e = id.dst_node->callees; e; e = e->next_callee)
1e128c5f 2835 gcc_assert (e->inline_failed);
18c6ada9
JH
2836 }
2837#endif
a9eafe81
AP
2838
2839 /* Fold the statements before compacting/renumbering the basic blocks. */
2840 fold_marked_statements (last, id.statements_to_fold);
2841 pointer_set_destroy (id.statements_to_fold);
2842
2843 /* Renumber the (code) basic_blocks consecutively. */
2844 compact_blocks ();
2845 /* Renumber the lexical scoping (non-code) blocks consecutively. */
2846 number_blocks (fn);
b8a00a4d 2847
873aa8f5
JH
2848 /* We are not going to maintain the cgraph edges up to date.
2849 Kill it so it won't confuse us. */
2850 cgraph_node_remove_callees (id.dst_node);
2851
873aa8f5 2852 fold_cond_expr_cond ();
1084e689
JH
2853 if (current_function_has_nonlocal_label)
2854 make_nonlocal_label_edges ();
110cfe1c
JH
2855 /* It would be nice to check SSA/CFG/statement consistency here, but it is
2856 not possible yet - the IPA passes might make various functions to not
2857 throw and they don't care to proactively update local EH info. This is
2858 done later in fixup_cfg pass that also execute the verification. */
873aa8f5 2859 return (TODO_update_ssa | TODO_cleanup_cfg
45a80bb9
JH
2860 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
2861 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
d4e4baa9
AO
2862}
2863
aa4a53af
RK
2864/* FN is a function that has a complete body, and CLONE is a function whose
2865 body is to be set to a copy of FN, mapping argument declarations according
2866 to the ARG_MAP splay_tree. */
d4e4baa9
AO
2867
2868void
46c5ad27 2869clone_body (tree clone, tree fn, void *arg_map)
d4e4baa9 2870{
1b369fae 2871 copy_body_data id;
d4e4baa9 2872
aa4a53af 2873 /* Clone the body, as if we were making an inline call. But, remap the
e21aff8a 2874 parameters in the callee to the parameters of caller. */
d4e4baa9 2875 memset (&id, 0, sizeof (id));
1b369fae
RH
2876 id.src_fn = fn;
2877 id.dst_fn = clone;
2878 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6be42dd4 2879 id.decl_map = (struct pointer_map_t *)arg_map;
d4e4baa9 2880
1b369fae
RH
2881 id.copy_decl = copy_decl_no_change;
2882 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2883 id.transform_new_cfg = true;
2884 id.transform_return_to_modify = false;
2885 id.transform_lang_insert_block = true;
d4e4baa9 2886
e21aff8a
SB
2887 /* We're not inside any EH region. */
2888 id.eh_region = -1;
2889
d4e4baa9 2890 /* Actually copy the body. */
e21aff8a 2891 append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
d4e4baa9
AO
2892}
2893
d4e4baa9
AO
2894/* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2895
2896tree
46c5ad27 2897copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
d4e4baa9
AO
2898{
2899 enum tree_code code = TREE_CODE (*tp);
07beea0d 2900 enum tree_code_class cl = TREE_CODE_CLASS (code);
d4e4baa9
AO
2901
2902 /* We make copies of most nodes. */
07beea0d
AH
2903 if (IS_EXPR_CODE_CLASS (cl)
2904 || IS_GIMPLE_STMT_CODE_CLASS (cl)
d4e4baa9
AO
2905 || code == TREE_LIST
2906 || code == TREE_VEC
8843c120
DN
2907 || code == TYPE_DECL
2908 || code == OMP_CLAUSE)
d4e4baa9
AO
2909 {
2910 /* Because the chain gets clobbered when we make a copy, we save it
2911 here. */
07beea0d
AH
2912 tree chain = NULL_TREE, new;
2913
2914 if (!GIMPLE_TUPLE_P (*tp))
2915 chain = TREE_CHAIN (*tp);
d4e4baa9
AO
2916
2917 /* Copy the node. */
6de9cd9a
DN
2918 new = copy_node (*tp);
2919
2920 /* Propagate mudflap marked-ness. */
2921 if (flag_mudflap && mf_marked_p (*tp))
2922 mf_mark (new);
2923
2924 *tp = new;
d4e4baa9
AO
2925
2926 /* Now, restore the chain, if appropriate. That will cause
2927 walk_tree to walk into the chain as well. */
50674e96
DN
2928 if (code == PARM_DECL
2929 || code == TREE_LIST
aaf46ef9 2930 || code == OMP_CLAUSE)
d4e4baa9
AO
2931 TREE_CHAIN (*tp) = chain;
2932
2933 /* For now, we don't update BLOCKs when we make copies. So, we
6de9cd9a
DN
2934 have to nullify all BIND_EXPRs. */
2935 if (TREE_CODE (*tp) == BIND_EXPR)
2936 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
d4e4baa9 2937 }
4038c495
GB
2938 else if (code == CONSTRUCTOR)
2939 {
2940 /* CONSTRUCTOR nodes need special handling because
2941 we need to duplicate the vector of elements. */
2942 tree new;
2943
2944 new = copy_node (*tp);
2945
2946 /* Propagate mudflap marked-ness. */
2947 if (flag_mudflap && mf_marked_p (*tp))
2948 mf_mark (new);
9f63daea 2949
4038c495
GB
2950 CONSTRUCTOR_ELTS (new) = VEC_copy (constructor_elt, gc,
2951 CONSTRUCTOR_ELTS (*tp));
2952 *tp = new;
2953 }
6615c446 2954 else if (TREE_CODE_CLASS (code) == tcc_type)
d4e4baa9 2955 *walk_subtrees = 0;
6615c446 2956 else if (TREE_CODE_CLASS (code) == tcc_declaration)
6de9cd9a 2957 *walk_subtrees = 0;
a396f8ae
GK
2958 else if (TREE_CODE_CLASS (code) == tcc_constant)
2959 *walk_subtrees = 0;
1e128c5f
GB
2960 else
2961 gcc_assert (code != STATEMENT_LIST);
d4e4baa9
AO
2962 return NULL_TREE;
2963}
2964
2965/* The SAVE_EXPR pointed to by TP is being copied. If ST contains
aa4a53af 2966 information indicating to what new SAVE_EXPR this one should be mapped,
e21aff8a
SB
2967 use that one. Otherwise, create a new node and enter it in ST. FN is
2968 the function into which the copy will be placed. */
d4e4baa9 2969
892c7e1e 2970static void
82c82743 2971remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
d4e4baa9 2972{
6be42dd4
RG
2973 struct pointer_map_t *st = (struct pointer_map_t *) st_;
2974 tree *n;
5e20bdd7 2975 tree t;
d4e4baa9
AO
2976
2977 /* See if we already encountered this SAVE_EXPR. */
6be42dd4 2978 n = (tree *) pointer_map_contains (st, *tp);
d92b4486 2979
d4e4baa9
AO
2980 /* If we didn't already remap this SAVE_EXPR, do so now. */
2981 if (!n)
2982 {
5e20bdd7 2983 t = copy_node (*tp);
d4e4baa9 2984
d4e4baa9 2985 /* Remember this SAVE_EXPR. */
6be42dd4 2986 *pointer_map_insert (st, *tp) = t;
350ebd54 2987 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
6be42dd4 2988 *pointer_map_insert (st, t) = t;
d4e4baa9
AO
2989 }
2990 else
5e20bdd7
JZ
2991 {
2992 /* We've already walked into this SAVE_EXPR; don't do it again. */
2993 *walk_subtrees = 0;
6be42dd4 2994 t = *n;
5e20bdd7 2995 }
d4e4baa9
AO
2996
2997 /* Replace this SAVE_EXPR with the copy. */
5e20bdd7 2998 *tp = t;
d4e4baa9 2999}
d436bff8 3000
aa4a53af
RK
3001/* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
3002 copies the declaration and enters it in the splay_tree in DATA (which is
1b369fae 3003 really an `copy_body_data *'). */
6de9cd9a
DN
3004
3005static tree
3006mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
3007 void *data)
3008{
1b369fae 3009 copy_body_data *id = (copy_body_data *) data;
6de9cd9a
DN
3010
3011 /* Don't walk into types. */
350fae66
RK
3012 if (TYPE_P (*tp))
3013 *walk_subtrees = 0;
6de9cd9a 3014
350fae66 3015 else if (TREE_CODE (*tp) == LABEL_EXPR)
6de9cd9a 3016 {
350fae66 3017 tree decl = TREE_OPERAND (*tp, 0);
6de9cd9a 3018
350fae66 3019 /* Copy the decl and remember the copy. */
1b369fae 3020 insert_decl_map (id, decl, id->copy_decl (decl, id));
6de9cd9a
DN
3021 }
3022
3023 return NULL_TREE;
3024}
3025
19114537
EC
3026/* Perform any modifications to EXPR required when it is unsaved. Does
3027 not recurse into EXPR's subtrees. */
3028
3029static void
3030unsave_expr_1 (tree expr)
3031{
3032 switch (TREE_CODE (expr))
3033 {
3034 case TARGET_EXPR:
3035 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
3036 It's OK for this to happen if it was part of a subtree that
3037 isn't immediately expanded, such as operand 2 of another
3038 TARGET_EXPR. */
3039 if (TREE_OPERAND (expr, 1))
3040 break;
3041
3042 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
3043 TREE_OPERAND (expr, 3) = NULL_TREE;
3044 break;
3045
3046 default:
3047 break;
3048 }
3049}
3050
6de9cd9a
DN
3051/* Called via walk_tree when an expression is unsaved. Using the
3052 splay_tree pointed to by ST (which is really a `splay_tree'),
3053 remaps all local declarations to appropriate replacements. */
d436bff8
AH
3054
3055static tree
6de9cd9a 3056unsave_r (tree *tp, int *walk_subtrees, void *data)
d436bff8 3057{
1b369fae 3058 copy_body_data *id = (copy_body_data *) data;
6be42dd4
RG
3059 struct pointer_map_t *st = id->decl_map;
3060 tree *n;
6de9cd9a
DN
3061
3062 /* Only a local declaration (variable or label). */
3063 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
3064 || TREE_CODE (*tp) == LABEL_DECL)
3065 {
3066 /* Lookup the declaration. */
6be42dd4 3067 n = (tree *) pointer_map_contains (st, *tp);
9f63daea 3068
6de9cd9a
DN
3069 /* If it's there, remap it. */
3070 if (n)
6be42dd4 3071 *tp = *n;
6de9cd9a 3072 }
aa4a53af 3073
6de9cd9a
DN
3074 else if (TREE_CODE (*tp) == STATEMENT_LIST)
3075 copy_statement_list (tp);
3076 else if (TREE_CODE (*tp) == BIND_EXPR)
3077 copy_bind_expr (tp, walk_subtrees, id);
3078 else if (TREE_CODE (*tp) == SAVE_EXPR)
82c82743 3079 remap_save_expr (tp, st, walk_subtrees);
d436bff8 3080 else
6de9cd9a
DN
3081 {
3082 copy_tree_r (tp, walk_subtrees, NULL);
3083
3084 /* Do whatever unsaving is required. */
3085 unsave_expr_1 (*tp);
3086 }
3087
3088 /* Keep iterating. */
3089 return NULL_TREE;
d436bff8
AH
3090}
3091
19114537
EC
3092/* Copies everything in EXPR and replaces variables, labels
3093 and SAVE_EXPRs local to EXPR. */
6de9cd9a
DN
3094
3095tree
19114537 3096unsave_expr_now (tree expr)
6de9cd9a 3097{
1b369fae 3098 copy_body_data id;
6de9cd9a
DN
3099
3100 /* There's nothing to do for NULL_TREE. */
3101 if (expr == 0)
3102 return expr;
3103
3104 /* Set up ID. */
3105 memset (&id, 0, sizeof (id));
1b369fae
RH
3106 id.src_fn = current_function_decl;
3107 id.dst_fn = current_function_decl;
6be42dd4 3108 id.decl_map = pointer_map_create ();
6de9cd9a 3109
1b369fae
RH
3110 id.copy_decl = copy_decl_no_change;
3111 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3112 id.transform_new_cfg = false;
3113 id.transform_return_to_modify = false;
3114 id.transform_lang_insert_block = false;
3115
6de9cd9a
DN
3116 /* Walk the tree once to find local labels. */
3117 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
3118
3119 /* Walk the tree again, copying, remapping, and unsaving. */
3120 walk_tree (&expr, unsave_r, &id, NULL);
3121
3122 /* Clean up. */
6be42dd4 3123 pointer_map_destroy (id.decl_map);
6de9cd9a
DN
3124
3125 return expr;
3126}
3127
3128/* Allow someone to determine if SEARCH is a child of TOP from gdb. */
aa4a53af 3129
6de9cd9a
DN
3130static tree
3131debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
3132{
3133 if (*tp == data)
3134 return (tree) data;
3135 else
3136 return NULL;
3137}
3138
6de9cd9a
DN
3139bool
3140debug_find_tree (tree top, tree search)
3141{
3142 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
3143}
3144
e21aff8a 3145
6de9cd9a
DN
3146/* Declare the variables created by the inliner. Add all the variables in
3147 VARS to BIND_EXPR. */
3148
3149static void
e21aff8a 3150declare_inline_vars (tree block, tree vars)
6de9cd9a 3151{
84936f6f
RH
3152 tree t;
3153 for (t = vars; t; t = TREE_CHAIN (t))
9659ce8b
JH
3154 {
3155 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
3156 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
3157 cfun->unexpanded_var_list =
3158 tree_cons (NULL_TREE, t,
3159 cfun->unexpanded_var_list);
3160 }
6de9cd9a 3161
e21aff8a
SB
3162 if (block)
3163 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
3164}
3165
19734dd8
RL
3166
3167/* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
1b369fae
RH
3168 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
3169 VAR_DECL translation. */
19734dd8 3170
1b369fae
RH
3171static tree
3172copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
19734dd8 3173{
19734dd8
RL
3174 /* Don't generate debug information for the copy if we wouldn't have
3175 generated it for the copy either. */
3176 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
3177 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
3178
3179 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
3180 declaration inspired this copy. */
3181 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
3182
3183 /* The new variable/label has no RTL, yet. */
68a976f2
RL
3184 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
3185 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
19734dd8
RL
3186 SET_DECL_RTL (copy, NULL_RTX);
3187
3188 /* These args would always appear unused, if not for this. */
3189 TREE_USED (copy) = 1;
3190
3191 /* Set the context for the new declaration. */
3192 if (!DECL_CONTEXT (decl))
3193 /* Globals stay global. */
3194 ;
1b369fae 3195 else if (DECL_CONTEXT (decl) != id->src_fn)
19734dd8
RL
3196 /* Things that weren't in the scope of the function we're inlining
3197 from aren't in the scope we're inlining to, either. */
3198 ;
3199 else if (TREE_STATIC (decl))
3200 /* Function-scoped static variables should stay in the original
3201 function. */
3202 ;
3203 else
3204 /* Ordinary automatic local variables are now in the scope of the
3205 new function. */
1b369fae 3206 DECL_CONTEXT (copy) = id->dst_fn;
19734dd8
RL
3207
3208 return copy;
3209}
3210
1b369fae
RH
3211static tree
3212copy_decl_to_var (tree decl, copy_body_data *id)
3213{
3214 tree copy, type;
3215
3216 gcc_assert (TREE_CODE (decl) == PARM_DECL
3217 || TREE_CODE (decl) == RESULT_DECL);
3218
3219 type = TREE_TYPE (decl);
3220
3221 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
3222 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
3223 TREE_READONLY (copy) = TREE_READONLY (decl);
3224 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
0890b981 3225 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
058dcc25 3226 DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (decl);
1b369fae
RH
3227
3228 return copy_decl_for_dup_finish (id, decl, copy);
3229}
3230
c08cd4c1
JM
3231/* Like copy_decl_to_var, but create a return slot object instead of a
3232 pointer variable for return by invisible reference. */
3233
3234static tree
3235copy_result_decl_to_var (tree decl, copy_body_data *id)
3236{
3237 tree copy, type;
3238
3239 gcc_assert (TREE_CODE (decl) == PARM_DECL
3240 || TREE_CODE (decl) == RESULT_DECL);
3241
3242 type = TREE_TYPE (decl);
3243 if (DECL_BY_REFERENCE (decl))
3244 type = TREE_TYPE (type);
3245
3246 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
3247 TREE_READONLY (copy) = TREE_READONLY (decl);
3248 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
3249 if (!DECL_BY_REFERENCE (decl))
3250 {
3251 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
0890b981 3252 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
058dcc25 3253 DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (decl);
c08cd4c1
JM
3254 }
3255
3256 return copy_decl_for_dup_finish (id, decl, copy);
3257}
3258
3259
1b369fae
RH
3260static tree
3261copy_decl_no_change (tree decl, copy_body_data *id)
3262{
3263 tree copy;
3264
3265 copy = copy_node (decl);
3266
3267 /* The COPY is not abstract; it will be generated in DST_FN. */
3268 DECL_ABSTRACT (copy) = 0;
3269 lang_hooks.dup_lang_specific_decl (copy);
3270
3271 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
3272 been taken; it's for internal bookkeeping in expand_goto_internal. */
3273 if (TREE_CODE (copy) == LABEL_DECL)
3274 {
3275 TREE_ADDRESSABLE (copy) = 0;
3276 LABEL_DECL_UID (copy) = -1;
3277 }
3278
3279 return copy_decl_for_dup_finish (id, decl, copy);
3280}
3281
3282static tree
3283copy_decl_maybe_to_var (tree decl, copy_body_data *id)
3284{
3285 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
3286 return copy_decl_to_var (decl, id);
3287 else
3288 return copy_decl_no_change (decl, id);
3289}
3290
19734dd8
RL
3291/* Return a copy of the function's argument tree. */
3292static tree
1b369fae 3293copy_arguments_for_versioning (tree orig_parm, copy_body_data * id)
19734dd8
RL
3294{
3295 tree *arg_copy, *parg;
3296
3297 arg_copy = &orig_parm;
3298 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
3299 {
3300 tree new = remap_decl (*parg, id);
3301 lang_hooks.dup_lang_specific_decl (new);
3302 TREE_CHAIN (new) = TREE_CHAIN (*parg);
3303 *parg = new;
3304 }
3305 return orig_parm;
3306}
3307
3308/* Return a copy of the function's static chain. */
3309static tree
1b369fae 3310copy_static_chain (tree static_chain, copy_body_data * id)
19734dd8
RL
3311{
3312 tree *chain_copy, *pvar;
3313
3314 chain_copy = &static_chain;
3315 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
3316 {
3317 tree new = remap_decl (*pvar, id);
3318 lang_hooks.dup_lang_specific_decl (new);
3319 TREE_CHAIN (new) = TREE_CHAIN (*pvar);
3320 *pvar = new;
3321 }
3322 return static_chain;
3323}
3324
3325/* Return true if the function is allowed to be versioned.
3326 This is a guard for the versioning functionality. */
3327bool
3328tree_versionable_function_p (tree fndecl)
3329{
3330 if (fndecl == NULL_TREE)
3331 return false;
3332 /* ??? There are cases where a function is
3333 uninlinable but can be versioned. */
3334 if (!tree_inlinable_function_p (fndecl))
3335 return false;
3336
3337 return true;
3338}
3339
3340/* Create a copy of a function's tree.
3341 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
3342 of the original function and the new copied function
3343 respectively. In case we want to replace a DECL
3344 tree with another tree while duplicating the function's
3345 body, TREE_MAP represents the mapping between these
ea99e0be
JH
3346 trees. If UPDATE_CLONES is set, the call_stmt fields
3347 of edges of clones of the function will be updated. */
19734dd8 3348void
ea99e0be
JH
3349tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
3350 bool update_clones)
19734dd8
RL
3351{
3352 struct cgraph_node *old_version_node;
3353 struct cgraph_node *new_version_node;
1b369fae 3354 copy_body_data id;
110cfe1c 3355 tree p;
19734dd8
RL
3356 unsigned i;
3357 struct ipa_replace_map *replace_info;
3358 basic_block old_entry_block;
3359 tree t_step;
873aa8f5 3360 tree old_current_function_decl = current_function_decl;
19734dd8
RL
3361
3362 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
3363 && TREE_CODE (new_decl) == FUNCTION_DECL);
3364 DECL_POSSIBLY_INLINED (old_decl) = 1;
3365
3366 old_version_node = cgraph_node (old_decl);
3367 new_version_node = cgraph_node (new_decl);
3368
19734dd8
RL
3369 DECL_ARTIFICIAL (new_decl) = 1;
3370 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
3371
3d283195
JH
3372 /* Prepare the data structures for the tree copy. */
3373 memset (&id, 0, sizeof (id));
3374
19734dd8 3375 /* Generate a new name for the new version. */
ea99e0be 3376 if (!update_clones)
19734dd8 3377 {
95c8e172
RL
3378 DECL_NAME (new_decl) = create_tmp_var_name (NULL);
3379 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
3380 SET_DECL_RTL (new_decl, NULL_RTX);
3d283195 3381 id.statements_to_fold = pointer_set_create ();
19734dd8 3382 }
19734dd8 3383
6be42dd4 3384 id.decl_map = pointer_map_create ();
1b369fae
RH
3385 id.src_fn = old_decl;
3386 id.dst_fn = new_decl;
3387 id.src_node = old_version_node;
3388 id.dst_node = new_version_node;
3389 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
19734dd8 3390
1b369fae
RH
3391 id.copy_decl = copy_decl_no_change;
3392 id.transform_call_graph_edges
3393 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
3394 id.transform_new_cfg = true;
3395 id.transform_return_to_modify = false;
3396 id.transform_lang_insert_block = false;
3397
19734dd8 3398 current_function_decl = new_decl;
110cfe1c
JH
3399 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
3400 (DECL_STRUCT_FUNCTION (old_decl));
3401 initialize_cfun (new_decl, old_decl,
3402 old_entry_block->count,
3403 old_entry_block->frequency);
3404 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
19734dd8
RL
3405
3406 /* Copy the function's static chain. */
3407 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
3408 if (p)
3409 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
3410 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
3411 &id);
3412 /* Copy the function's arguments. */
3413 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
3414 DECL_ARGUMENTS (new_decl) =
3415 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id);
3416
3417 /* If there's a tree_map, prepare for substitution. */
3418 if (tree_map)
3419 for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
3420 {
3421 replace_info = VARRAY_GENERIC_PTR (tree_map, i);
1b369fae 3422 if (replace_info->replace_p)
19734dd8
RL
3423 insert_decl_map (&id, replace_info->old_tree,
3424 replace_info->new_tree);
19734dd8
RL
3425 }
3426
1b369fae 3427 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
19734dd8
RL
3428
3429 /* Renumber the lexical scoping (non-code) blocks consecutively. */
1b369fae 3430 number_blocks (id.dst_fn);
19734dd8
RL
3431
3432 if (DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list != NULL_TREE)
3433 /* Add local vars. */
3434 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list;
3435 t_step; t_step = TREE_CHAIN (t_step))
3436 {
3437 tree var = TREE_VALUE (t_step);
3438 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3439 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
3440 cfun->unexpanded_var_list);
3441 else
3442 cfun->unexpanded_var_list =
3443 tree_cons (NULL_TREE, remap_decl (var, &id),
3444 cfun->unexpanded_var_list);
3445 }
3446
3447 /* Copy the Function's body. */
110cfe1c 3448 copy_body (&id, old_entry_block->count, old_entry_block->frequency, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
19734dd8 3449
19734dd8
RL
3450 if (DECL_RESULT (old_decl) != NULL_TREE)
3451 {
3452 tree *res_decl = &DECL_RESULT (old_decl);
3453 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
3454 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
3455 }
3456
19734dd8
RL
3457 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3458 number_blocks (new_decl);
3459
3460 /* Clean up. */
6be42dd4 3461 pointer_map_destroy (id.decl_map);
3d283195
JH
3462 if (!update_clones)
3463 {
3464 fold_marked_statements (0, id.statements_to_fold);
3465 pointer_set_destroy (id.statements_to_fold);
3466 fold_cond_expr_cond ();
3467 }
110cfe1c
JH
3468 if (gimple_in_ssa_p (cfun))
3469 {
3e87758a
RL
3470 free_dominance_info (CDI_DOMINATORS);
3471 free_dominance_info (CDI_POST_DOMINATORS);
3d283195
JH
3472 if (!update_clones)
3473 delete_unreachable_blocks ();
110cfe1c 3474 update_ssa (TODO_update_ssa);
3d283195
JH
3475 if (!update_clones)
3476 {
3477 fold_cond_expr_cond ();
3478 if (need_ssa_update_p ())
3479 update_ssa (TODO_update_ssa);
3480 }
110cfe1c
JH
3481 }
3482 free_dominance_info (CDI_DOMINATORS);
3483 free_dominance_info (CDI_POST_DOMINATORS);
3484 pop_cfun ();
873aa8f5
JH
3485 current_function_decl = old_current_function_decl;
3486 gcc_assert (!current_function_decl
3487 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
19734dd8
RL
3488 return;
3489}
3490
52dd234b
RH
3491/* Duplicate a type, fields and all. */
3492
3493tree
3494build_duplicate_type (tree type)
3495{
1b369fae 3496 struct copy_body_data id;
52dd234b
RH
3497
3498 memset (&id, 0, sizeof (id));
1b369fae
RH
3499 id.src_fn = current_function_decl;
3500 id.dst_fn = current_function_decl;
3501 id.src_cfun = cfun;
6be42dd4 3502 id.decl_map = pointer_map_create ();
52dd234b
RH
3503
3504 type = remap_type_1 (type, &id);
3505
6be42dd4 3506 pointer_map_destroy (id.decl_map);
52dd234b
RH
3507
3508 return type;
3509}