]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-inline.c
Daily bump.
[thirdparty/gcc.git] / gcc / tree-inline.c
CommitLineData
ac534736 1/* Tree inlining.
65401a0b 2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
ebb07520 3 Free Software Foundation, Inc.
588d3ade
AO
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
5
54a7b573 6This file is part of GCC.
588d3ade 7
54a7b573 8GCC is free software; you can redistribute it and/or modify
588d3ade 9it under the terms of the GNU General Public License as published by
9dcd6f09 10the Free Software Foundation; either version 3, or (at your option)
588d3ade
AO
11any later version.
12
54a7b573 13GCC is distributed in the hope that it will be useful,
588d3ade
AO
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
588d3ade
AO
21
22#include "config.h"
23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
69dcadff 26#include "toplev.h"
588d3ade
AO
27#include "tree.h"
28#include "tree-inline.h"
d4e4baa9
AO
29#include "rtl.h"
30#include "expr.h"
31#include "flags.h"
32#include "params.h"
33#include "input.h"
34#include "insn-config.h"
d4e4baa9 35#include "hashtab.h"
d23c55c2 36#include "langhooks.h"
e21aff8a
SB
37#include "basic-block.h"
38#include "tree-iterator.h"
1c4a429a 39#include "cgraph.h"
ddd2d57e 40#include "intl.h"
6de9cd9a 41#include "tree-mudflap.h"
089efaa4 42#include "tree-flow.h"
18c6ada9 43#include "function.h"
e21aff8a
SB
44#include "ggc.h"
45#include "tree-flow.h"
6de9cd9a 46#include "diagnostic.h"
e21aff8a 47#include "except.h"
1eb3331e 48#include "debug.h"
e21aff8a 49#include "pointer-set.h"
19734dd8 50#include "ipa-prop.h"
6946b3f7 51#include "value-prof.h"
110cfe1c 52#include "tree-pass.h"
18177c7e
RG
53#include "target.h"
54#include "integrate.h"
d4e4baa9 55
6de9cd9a
DN
56/* I'm not real happy about this, but we need to handle gimple and
57 non-gimple trees. */
726a989a 58#include "gimple.h"
588d3ade 59
1b369fae 60/* Inlining, Cloning, Versioning, Parallelization
e21aff8a
SB
61
62 Inlining: a function body is duplicated, but the PARM_DECLs are
63 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
726a989a 64 MODIFY_EXPRs that store to a dedicated returned-value variable.
e21aff8a
SB
65 The duplicated eh_region info of the copy will later be appended
66 to the info for the caller; the eh_region info in copied throwing
67 statements and RESX_EXPRs is adjusted accordingly.
68
e21aff8a
SB
69 Cloning: (only in C++) We have one body for a con/de/structor, and
70 multiple function decls, each with a unique parameter list.
71 Duplicate the body, using the given splay tree; some parameters
72 will become constants (like 0 or 1).
73
1b369fae
RH
74 Versioning: a function body is duplicated and the result is a new
75 function rather than into blocks of an existing function as with
76 inlining. Some parameters will become constants.
77
78 Parallelization: a region of a function is duplicated resulting in
79 a new function. Variables may be replaced with complex expressions
80 to enable shared variable semantics.
81
e21aff8a
SB
82 All of these will simultaneously lookup any callgraph edges. If
83 we're going to inline the duplicated function body, and the given
84 function has some cloned callgraph nodes (one for each place this
85 function will be inlined) those callgraph edges will be duplicated.
1b369fae 86 If we're cloning the body, those callgraph edges will be
e21aff8a
SB
87 updated to point into the new body. (Note that the original
88 callgraph node and edge list will not be altered.)
89
726a989a 90 See the CALL_EXPR handling case in copy_tree_body_r (). */
e21aff8a 91
d4e4baa9
AO
92/* To Do:
93
94 o In order to make inlining-on-trees work, we pessimized
95 function-local static constants. In particular, they are now
96 always output, even when not addressed. Fix this by treating
97 function-local static constants just like global static
98 constants; the back-end already knows not to output them if they
99 are not needed.
100
101 o Provide heuristics to clamp inlining of recursive template
102 calls? */
103
7f9bc51b
ZD
104
105/* Weights that estimate_num_insns uses for heuristics in inlining. */
106
107eni_weights eni_inlining_weights;
108
109/* Weights that estimate_num_insns uses to estimate the size of the
110 produced code. */
111
112eni_weights eni_size_weights;
113
114/* Weights that estimate_num_insns uses to estimate the time necessary
115 to execute the produced code. */
116
117eni_weights eni_time_weights;
118
d4e4baa9
AO
119/* Prototypes. */
120
1b369fae 121static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
1b369fae 122static void remap_block (tree *, copy_body_data *);
1b369fae 123static void copy_bind_expr (tree *, int *, copy_body_data *);
6de9cd9a 124static tree mark_local_for_remap_r (tree *, int *, void *);
19114537 125static void unsave_expr_1 (tree);
6de9cd9a 126static tree unsave_r (tree *, int *, void *);
e21aff8a 127static void declare_inline_vars (tree, tree);
892c7e1e 128static void remap_save_expr (tree *, void *, int *);
4a283090 129static void prepend_lexical_block (tree current_block, tree new_block);
1b369fae 130static tree copy_decl_to_var (tree, copy_body_data *);
c08cd4c1 131static tree copy_result_decl_to_var (tree, copy_body_data *);
1b369fae 132static tree copy_decl_maybe_to_var (tree, copy_body_data *);
726a989a 133static gimple remap_gimple_stmt (gimple, copy_body_data *);
078c3644 134static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
e21aff8a 135
5e20bdd7
JZ
136/* Insert a tree->tree mapping for ID. Despite the name suggests
137 that the trees should be variables, it is used for more than that. */
138
1b369fae
RH
139void
140insert_decl_map (copy_body_data *id, tree key, tree value)
5e20bdd7 141{
6be42dd4 142 *pointer_map_insert (id->decl_map, key) = value;
5e20bdd7
JZ
143
144 /* Always insert an identity map as well. If we see this same new
145 node again, we won't want to duplicate it a second time. */
146 if (key != value)
6be42dd4 147 *pointer_map_insert (id->decl_map, value) = value;
5e20bdd7
JZ
148}
149
110cfe1c
JH
150/* Construct new SSA name for old NAME. ID is the inline context. */
151
152static tree
153remap_ssa_name (tree name, copy_body_data *id)
154{
82d6e6fc 155 tree new_tree;
6be42dd4 156 tree *n;
110cfe1c
JH
157
158 gcc_assert (TREE_CODE (name) == SSA_NAME);
159
6be42dd4 160 n = (tree *) pointer_map_contains (id->decl_map, name);
110cfe1c 161 if (n)
129a37fc 162 return unshare_expr (*n);
110cfe1c
JH
163
164 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
165 in copy_bb. */
82d6e6fc 166 new_tree = remap_decl (SSA_NAME_VAR (name), id);
726a989a 167
110cfe1c
JH
168 /* We might've substituted constant or another SSA_NAME for
169 the variable.
170
171 Replace the SSA name representing RESULT_DECL by variable during
172 inlining: this saves us from need to introduce PHI node in a case
173 return value is just partly initialized. */
82d6e6fc 174 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
110cfe1c
JH
175 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
176 || !id->transform_return_to_modify))
177 {
82d6e6fc
KG
178 new_tree = make_ssa_name (new_tree, NULL);
179 insert_decl_map (id, name, new_tree);
180 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
110cfe1c 181 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
82d6e6fc 182 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
726a989a 183 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
045685a9
JH
184 {
185 /* By inlining function having uninitialized variable, we might
186 extend the lifetime (variable might get reused). This cause
187 ICE in the case we end up extending lifetime of SSA name across
fa10beec 188 abnormal edge, but also increase register pressure.
045685a9 189
726a989a
RB
190 We simply initialize all uninitialized vars by 0 except
191 for case we are inlining to very first BB. We can avoid
192 this for all BBs that are not inside strongly connected
193 regions of the CFG, but this is expensive to test. */
194 if (id->entry_bb
195 && is_gimple_reg (SSA_NAME_VAR (name))
045685a9 196 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
0723b99a 197 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
045685a9
JH
198 || EDGE_COUNT (id->entry_bb->preds) != 1))
199 {
726a989a
RB
200 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
201 gimple init_stmt;
202
82d6e6fc
KG
203 init_stmt = gimple_build_assign (new_tree,
204 fold_convert (TREE_TYPE (new_tree),
045685a9 205 integer_zero_node));
726a989a 206 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
82d6e6fc 207 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
045685a9
JH
208 }
209 else
210 {
82d6e6fc 211 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
726a989a
RB
212 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
213 == name)
82d6e6fc 214 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
045685a9
JH
215 }
216 }
110cfe1c
JH
217 }
218 else
82d6e6fc
KG
219 insert_decl_map (id, name, new_tree);
220 return new_tree;
110cfe1c
JH
221}
222
e21aff8a 223/* Remap DECL during the copying of the BLOCK tree for the function. */
d4e4baa9 224
1b369fae
RH
225tree
226remap_decl (tree decl, copy_body_data *id)
d4e4baa9 227{
6be42dd4 228 tree *n;
e21aff8a
SB
229 tree fn;
230
231 /* We only remap local variables in the current function. */
1b369fae 232 fn = id->src_fn;
3c2a7a6a 233
e21aff8a
SB
234 /* See if we have remapped this declaration. */
235
6be42dd4 236 n = (tree *) pointer_map_contains (id->decl_map, decl);
e21aff8a
SB
237
238 /* If we didn't already have an equivalent for this declaration,
239 create one now. */
d4e4baa9
AO
240 if (!n)
241 {
d4e4baa9 242 /* Make a copy of the variable or label. */
1b369fae 243 tree t = id->copy_decl (decl, id);
19734dd8 244
596b98ce
AO
245 /* Remember it, so that if we encounter this local entity again
246 we can reuse this copy. Do this early because remap_type may
247 need this decl for TYPE_STUB_DECL. */
248 insert_decl_map (id, decl, t);
249
1b369fae
RH
250 if (!DECL_P (t))
251 return t;
252
3c2a7a6a
RH
253 /* Remap types, if necessary. */
254 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
255 if (TREE_CODE (t) == TYPE_DECL)
256 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
3c2a7a6a
RH
257
258 /* Remap sizes as necessary. */
726a989a
RB
259 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
260 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
d4e4baa9 261
8c27b7d4 262 /* If fields, do likewise for offset and qualifier. */
5377d5ba
RK
263 if (TREE_CODE (t) == FIELD_DECL)
264 {
726a989a 265 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
5377d5ba 266 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
726a989a 267 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
5377d5ba
RK
268 }
269
110cfe1c
JH
270 if (cfun && gimple_in_ssa_p (cfun)
271 && (TREE_CODE (t) == VAR_DECL
272 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
273 {
274 tree def = gimple_default_def (id->src_cfun, decl);
275 get_var_ann (t);
276 if (TREE_CODE (decl) != PARM_DECL && def)
277 {
278 tree map = remap_ssa_name (def, id);
279 /* Watch out RESULT_DECLs whose SSA names map directly
280 to them. */
045685a9 281 if (TREE_CODE (map) == SSA_NAME
726a989a 282 && gimple_nop_p (SSA_NAME_DEF_STMT (map)))
110cfe1c
JH
283 set_default_def (t, map);
284 }
285 add_referenced_var (t);
286 }
5e20bdd7 287 return t;
d4e4baa9
AO
288 }
289
f82a627c
EB
290 if (id->do_not_unshare)
291 return *n;
292 else
293 return unshare_expr (*n);
d4e4baa9
AO
294}
295
3c2a7a6a 296static tree
1b369fae 297remap_type_1 (tree type, copy_body_data *id)
3c2a7a6a 298{
82d6e6fc 299 tree new_tree, t;
3c2a7a6a 300
ed397c43
RK
301 /* We do need a copy. build and register it now. If this is a pointer or
302 reference type, remap the designated type and make a new pointer or
303 reference type. */
304 if (TREE_CODE (type) == POINTER_TYPE)
305 {
82d6e6fc 306 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
ed397c43
RK
307 TYPE_MODE (type),
308 TYPE_REF_CAN_ALIAS_ALL (type));
82d6e6fc
KG
309 insert_decl_map (id, type, new_tree);
310 return new_tree;
ed397c43
RK
311 }
312 else if (TREE_CODE (type) == REFERENCE_TYPE)
313 {
82d6e6fc 314 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
ed397c43
RK
315 TYPE_MODE (type),
316 TYPE_REF_CAN_ALIAS_ALL (type));
82d6e6fc
KG
317 insert_decl_map (id, type, new_tree);
318 return new_tree;
ed397c43
RK
319 }
320 else
82d6e6fc 321 new_tree = copy_node (type);
ed397c43 322
82d6e6fc 323 insert_decl_map (id, type, new_tree);
3c2a7a6a
RH
324
325 /* This is a new type, not a copy of an old type. Need to reassociate
326 variants. We can handle everything except the main variant lazily. */
327 t = TYPE_MAIN_VARIANT (type);
328 if (type != t)
329 {
330 t = remap_type (t, id);
82d6e6fc
KG
331 TYPE_MAIN_VARIANT (new_tree) = t;
332 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
333 TYPE_NEXT_VARIANT (t) = new_tree;
3c2a7a6a
RH
334 }
335 else
336 {
82d6e6fc
KG
337 TYPE_MAIN_VARIANT (new_tree) = new_tree;
338 TYPE_NEXT_VARIANT (new_tree) = NULL;
3c2a7a6a
RH
339 }
340
596b98ce 341 if (TYPE_STUB_DECL (type))
82d6e6fc 342 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
596b98ce 343
3c2a7a6a 344 /* Lazily create pointer and reference types. */
82d6e6fc
KG
345 TYPE_POINTER_TO (new_tree) = NULL;
346 TYPE_REFERENCE_TO (new_tree) = NULL;
3c2a7a6a 347
82d6e6fc 348 switch (TREE_CODE (new_tree))
3c2a7a6a
RH
349 {
350 case INTEGER_TYPE:
351 case REAL_TYPE:
325217ed 352 case FIXED_POINT_TYPE:
3c2a7a6a
RH
353 case ENUMERAL_TYPE:
354 case BOOLEAN_TYPE:
82d6e6fc 355 t = TYPE_MIN_VALUE (new_tree);
3c2a7a6a 356 if (t && TREE_CODE (t) != INTEGER_CST)
82d6e6fc 357 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
1c9766da 358
82d6e6fc 359 t = TYPE_MAX_VALUE (new_tree);
3c2a7a6a 360 if (t && TREE_CODE (t) != INTEGER_CST)
82d6e6fc
KG
361 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
362 return new_tree;
9f63daea 363
3c2a7a6a 364 case FUNCTION_TYPE:
82d6e6fc
KG
365 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
366 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
367 return new_tree;
3c2a7a6a
RH
368
369 case ARRAY_TYPE:
82d6e6fc
KG
370 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
371 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
3c2a7a6a
RH
372 break;
373
374 case RECORD_TYPE:
375 case UNION_TYPE:
376 case QUAL_UNION_TYPE:
52dd234b
RH
377 {
378 tree f, nf = NULL;
379
82d6e6fc 380 for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
52dd234b
RH
381 {
382 t = remap_decl (f, id);
82d6e6fc 383 DECL_CONTEXT (t) = new_tree;
52dd234b
RH
384 TREE_CHAIN (t) = nf;
385 nf = t;
386 }
82d6e6fc 387 TYPE_FIELDS (new_tree) = nreverse (nf);
52dd234b 388 }
3c2a7a6a
RH
389 break;
390
3c2a7a6a
RH
391 case OFFSET_TYPE:
392 default:
393 /* Shouldn't have been thought variable sized. */
1e128c5f 394 gcc_unreachable ();
3c2a7a6a
RH
395 }
396
82d6e6fc
KG
397 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
398 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
3c2a7a6a 399
82d6e6fc 400 return new_tree;
3c2a7a6a
RH
401}
402
1b369fae
RH
403tree
404remap_type (tree type, copy_body_data *id)
52dd234b 405{
6be42dd4 406 tree *node;
4f5c64b8 407 tree tmp;
52dd234b
RH
408
409 if (type == NULL)
410 return type;
411
412 /* See if we have remapped this type. */
6be42dd4 413 node = (tree *) pointer_map_contains (id->decl_map, type);
52dd234b 414 if (node)
6be42dd4 415 return *node;
52dd234b
RH
416
417 /* The type only needs remapping if it's variably modified. */
1b369fae 418 if (! variably_modified_type_p (type, id->src_fn))
52dd234b
RH
419 {
420 insert_decl_map (id, type, type);
421 return type;
422 }
423
4f5c64b8
RG
424 id->remapping_type_depth++;
425 tmp = remap_type_1 (type, id);
426 id->remapping_type_depth--;
427
428 return tmp;
52dd234b
RH
429}
430
13e4e36e
L
431/* Return previously remapped type of TYPE in ID. Return NULL if TYPE
432 is NULL or TYPE has not been remapped before. */
433
434static tree
435remapped_type (tree type, copy_body_data *id)
436{
437 tree *node;
438
439 if (type == NULL)
440 return type;
441
442 /* See if we have remapped this type. */
443 node = (tree *) pointer_map_contains (id->decl_map, type);
444 if (node)
445 return *node;
446 else
447 return NULL;
448}
449
450 /* The type only needs remapping if it's variably modified. */
526d73ab
JH
451/* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
452
453static bool
454can_be_nonlocal (tree decl, copy_body_data *id)
455{
456 /* We can not duplicate function decls. */
457 if (TREE_CODE (decl) == FUNCTION_DECL)
458 return true;
459
460 /* Local static vars must be non-local or we get multiple declaration
461 problems. */
462 if (TREE_CODE (decl) == VAR_DECL
463 && !auto_var_in_fn_p (decl, id->src_fn))
464 return true;
465
466 /* At the moment dwarf2out can handle only these types of nodes. We
467 can support more later. */
468 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
469 return false;
470
13e4e36e
L
471 /* We must use global type. We call remapped_type instead of
472 remap_type since we don't want to remap this type here if it
473 hasn't been remapped before. */
474 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
526d73ab
JH
475 return false;
476
477 /* Wihtout SSA we can't tell if variable is used. */
478 if (!gimple_in_ssa_p (cfun))
479 return false;
480
481 /* Live variables must be copied so we can attach DECL_RTL. */
482 if (var_ann (decl))
483 return false;
484
485 return true;
486}
487
6de9cd9a 488static tree
526d73ab 489remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
d4e4baa9 490{
6de9cd9a
DN
491 tree old_var;
492 tree new_decls = NULL_TREE;
d4e4baa9 493
6de9cd9a
DN
494 /* Remap its variables. */
495 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
d4e4baa9 496 {
6de9cd9a 497 tree new_var;
526d73ab 498 tree origin_var = DECL_ORIGIN (old_var);
6de9cd9a 499
526d73ab 500 if (can_be_nonlocal (old_var, id))
30be951a 501 {
526d73ab
JH
502 if (TREE_CODE (old_var) == VAR_DECL
503 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
504 cfun->local_decls = tree_cons (NULL_TREE, old_var,
505 cfun->local_decls);
9e6aced0 506 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
526d73ab
JH
507 && !DECL_IGNORED_P (old_var)
508 && nonlocalized_list)
509 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
30be951a
JH
510 continue;
511 }
512
6de9cd9a
DN
513 /* Remap the variable. */
514 new_var = remap_decl (old_var, id);
515
726a989a 516 /* If we didn't remap this variable, we can't mess with its
6de9cd9a
DN
517 TREE_CHAIN. If we remapped this variable to the return slot, it's
518 already declared somewhere else, so don't declare it here. */
526d73ab
JH
519
520 if (new_var == id->retvar)
6de9cd9a 521 ;
526d73ab
JH
522 else if (!new_var)
523 {
9e6aced0 524 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
526d73ab
JH
525 && !DECL_IGNORED_P (old_var)
526 && nonlocalized_list)
527 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
528 }
d4e4baa9
AO
529 else
530 {
1e128c5f 531 gcc_assert (DECL_P (new_var));
6de9cd9a
DN
532 TREE_CHAIN (new_var) = new_decls;
533 new_decls = new_var;
d4e4baa9 534 }
d4e4baa9 535 }
d4e4baa9 536
6de9cd9a
DN
537 return nreverse (new_decls);
538}
539
540/* Copy the BLOCK to contain remapped versions of the variables
541 therein. And hook the new block into the block-tree. */
542
543static void
1b369fae 544remap_block (tree *block, copy_body_data *id)
6de9cd9a 545{
d436bff8
AH
546 tree old_block;
547 tree new_block;
d436bff8
AH
548 tree fn;
549
550 /* Make the new block. */
551 old_block = *block;
552 new_block = make_node (BLOCK);
553 TREE_USED (new_block) = TREE_USED (old_block);
554 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
3e2844cb 555 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
526d73ab
JH
556 BLOCK_NONLOCALIZED_VARS (new_block)
557 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
d436bff8
AH
558 *block = new_block;
559
560 /* Remap its variables. */
526d73ab
JH
561 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
562 &BLOCK_NONLOCALIZED_VARS (new_block),
563 id);
d436bff8 564
1b369fae
RH
565 fn = id->dst_fn;
566
567 if (id->transform_lang_insert_block)
9ff420f1 568 id->transform_lang_insert_block (new_block);
1b369fae 569
d436bff8 570 /* Remember the remapped block. */
6de9cd9a 571 insert_decl_map (id, old_block, new_block);
d4e4baa9
AO
572}
573
acb8f212
JH
574/* Copy the whole block tree and root it in id->block. */
575static tree
1b369fae 576remap_blocks (tree block, copy_body_data *id)
acb8f212
JH
577{
578 tree t;
82d6e6fc 579 tree new_tree = block;
acb8f212
JH
580
581 if (!block)
582 return NULL;
583
82d6e6fc
KG
584 remap_block (&new_tree, id);
585 gcc_assert (new_tree != block);
acb8f212 586 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
4a283090
JH
587 prepend_lexical_block (new_tree, remap_blocks (t, id));
588 /* Blocks are in arbitrary order, but make things slightly prettier and do
589 not swap order when producing a copy. */
590 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
82d6e6fc 591 return new_tree;
acb8f212
JH
592}
593
d4e4baa9 594static void
6de9cd9a 595copy_statement_list (tree *tp)
d4e4baa9 596{
6de9cd9a 597 tree_stmt_iterator oi, ni;
82d6e6fc 598 tree new_tree;
6de9cd9a 599
82d6e6fc
KG
600 new_tree = alloc_stmt_list ();
601 ni = tsi_start (new_tree);
6de9cd9a 602 oi = tsi_start (*tp);
82d6e6fc 603 *tp = new_tree;
6de9cd9a
DN
604
605 for (; !tsi_end_p (oi); tsi_next (&oi))
a406865a
RG
606 {
607 tree stmt = tsi_stmt (oi);
608 if (TREE_CODE (stmt) == STATEMENT_LIST)
609 copy_statement_list (&stmt);
610 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
611 }
6de9cd9a 612}
d4e4baa9 613
6de9cd9a 614static void
1b369fae 615copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
6de9cd9a
DN
616{
617 tree block = BIND_EXPR_BLOCK (*tp);
d4e4baa9
AO
618 /* Copy (and replace) the statement. */
619 copy_tree_r (tp, walk_subtrees, NULL);
6de9cd9a
DN
620 if (block)
621 {
622 remap_block (&block, id);
623 BIND_EXPR_BLOCK (*tp) = block;
624 }
d4e4baa9 625
6de9cd9a
DN
626 if (BIND_EXPR_VARS (*tp))
627 /* This will remap a lot of the same decls again, but this should be
628 harmless. */
526d73ab 629 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
d4e4baa9
AO
630}
631
726a989a
RB
632
633/* Create a new gimple_seq by remapping all the statements in BODY
634 using the inlining information in ID. */
635
636gimple_seq
637remap_gimple_seq (gimple_seq body, copy_body_data *id)
638{
639 gimple_stmt_iterator si;
640 gimple_seq new_body = NULL;
641
642 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
643 {
644 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
645 gimple_seq_add_stmt (&new_body, new_stmt);
646 }
647
648 return new_body;
649}
650
651
652/* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
653 block using the mapping information in ID. */
654
655static gimple
656copy_gimple_bind (gimple stmt, copy_body_data *id)
657{
658 gimple new_bind;
659 tree new_block, new_vars;
660 gimple_seq body, new_body;
661
662 /* Copy the statement. Note that we purposely don't use copy_stmt
663 here because we need to remap statements as we copy. */
664 body = gimple_bind_body (stmt);
665 new_body = remap_gimple_seq (body, id);
666
667 new_block = gimple_bind_block (stmt);
668 if (new_block)
669 remap_block (&new_block, id);
670
671 /* This will remap a lot of the same decls again, but this should be
672 harmless. */
673 new_vars = gimple_bind_vars (stmt);
674 if (new_vars)
526d73ab 675 new_vars = remap_decls (new_vars, NULL, id);
726a989a
RB
676
677 new_bind = gimple_build_bind (new_vars, new_body, new_block);
678
679 return new_bind;
680}
681
682
683/* Remap the GIMPLE operand pointed to by *TP. DATA is really a
684 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
685 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
686 recursing into the children nodes of *TP. */
687
688static tree
689remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
690{
691 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
692 copy_body_data *id = (copy_body_data *) wi_p->info;
693 tree fn = id->src_fn;
694
695 if (TREE_CODE (*tp) == SSA_NAME)
696 {
697 *tp = remap_ssa_name (*tp, id);
698 *walk_subtrees = 0;
699 return NULL;
700 }
701 else if (auto_var_in_fn_p (*tp, fn))
702 {
703 /* Local variables and labels need to be replaced by equivalent
704 variables. We don't want to copy static variables; there's
705 only one of those, no matter how many times we inline the
706 containing function. Similarly for globals from an outer
707 function. */
708 tree new_decl;
709
710 /* Remap the declaration. */
711 new_decl = remap_decl (*tp, id);
712 gcc_assert (new_decl);
713 /* Replace this variable with the copy. */
714 STRIP_TYPE_NOPS (new_decl);
211ca15c
RG
715 /* ??? The C++ frontend uses void * pointer zero to initialize
716 any other type. This confuses the middle-end type verification.
717 As cloned bodies do not go through gimplification again the fixup
718 there doesn't trigger. */
719 if (TREE_CODE (new_decl) == INTEGER_CST
720 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
721 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
726a989a
RB
722 *tp = new_decl;
723 *walk_subtrees = 0;
724 }
725 else if (TREE_CODE (*tp) == STATEMENT_LIST)
726 gcc_unreachable ();
727 else if (TREE_CODE (*tp) == SAVE_EXPR)
728 gcc_unreachable ();
729 else if (TREE_CODE (*tp) == LABEL_DECL
730 && (!DECL_CONTEXT (*tp)
731 || decl_function_context (*tp) == id->src_fn))
732 /* These may need to be remapped for EH handling. */
733 *tp = remap_decl (*tp, id);
734 else if (TYPE_P (*tp))
735 /* Types may need remapping as well. */
736 *tp = remap_type (*tp, id);
737 else if (CONSTANT_CLASS_P (*tp))
738 {
739 /* If this is a constant, we have to copy the node iff the type
740 will be remapped. copy_tree_r will not copy a constant. */
741 tree new_type = remap_type (TREE_TYPE (*tp), id);
742
743 if (new_type == TREE_TYPE (*tp))
744 *walk_subtrees = 0;
745
746 else if (TREE_CODE (*tp) == INTEGER_CST)
747 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
748 TREE_INT_CST_HIGH (*tp));
749 else
750 {
751 *tp = copy_node (*tp);
752 TREE_TYPE (*tp) = new_type;
753 }
754 }
755 else
756 {
757 /* Otherwise, just copy the node. Note that copy_tree_r already
758 knows not to copy VAR_DECLs, etc., so this is safe. */
759 if (TREE_CODE (*tp) == INDIRECT_REF)
760 {
761 /* Get rid of *& from inline substitutions that can happen when a
762 pointer argument is an ADDR_EXPR. */
763 tree decl = TREE_OPERAND (*tp, 0);
764 tree *n;
765
766 n = (tree *) pointer_map_contains (id->decl_map, decl);
767 if (n)
768 {
82d6e6fc 769 tree type, new_tree, old;
726a989a
RB
770
771 /* If we happen to get an ADDR_EXPR in n->value, strip
772 it manually here as we'll eventually get ADDR_EXPRs
773 which lie about their types pointed to. In this case
774 build_fold_indirect_ref wouldn't strip the
775 INDIRECT_REF, but we absolutely rely on that. As
776 fold_indirect_ref does other useful transformations,
777 try that first, though. */
778 type = TREE_TYPE (TREE_TYPE (*n));
82d6e6fc 779 new_tree = unshare_expr (*n);
726a989a 780 old = *tp;
82d6e6fc 781 *tp = gimple_fold_indirect_ref (new_tree);
726a989a
RB
782 if (!*tp)
783 {
82d6e6fc 784 if (TREE_CODE (new_tree) == ADDR_EXPR)
726a989a 785 {
db3927fb
AH
786 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
787 type, new_tree);
726a989a
RB
788 /* ??? We should either assert here or build
789 a VIEW_CONVERT_EXPR instead of blindly leaking
790 incompatible types to our IL. */
791 if (! *tp)
82d6e6fc 792 *tp = TREE_OPERAND (new_tree, 0);
726a989a
RB
793 }
794 else
795 {
82d6e6fc 796 *tp = build1 (INDIRECT_REF, type, new_tree);
726a989a 797 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
ce1b6498 798 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
726a989a
RB
799 }
800 }
801 *walk_subtrees = 0;
802 return NULL;
803 }
804 }
805
806 /* Here is the "usual case". Copy this tree node, and then
807 tweak some special cases. */
808 copy_tree_r (tp, walk_subtrees, NULL);
809
810 /* Global variables we haven't seen yet need to go into referenced
811 vars. If not referenced from types only. */
812 if (gimple_in_ssa_p (cfun)
813 && TREE_CODE (*tp) == VAR_DECL
814 && id->remapping_type_depth == 0)
815 add_referenced_var (*tp);
816
817 /* We should never have TREE_BLOCK set on non-statements. */
818 if (EXPR_P (*tp))
819 gcc_assert (!TREE_BLOCK (*tp));
820
821 if (TREE_CODE (*tp) != OMP_CLAUSE)
822 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
823
824 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
825 {
826 /* The copied TARGET_EXPR has never been expanded, even if the
827 original node was expanded already. */
828 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
829 TREE_OPERAND (*tp, 3) = NULL_TREE;
830 }
831 else if (TREE_CODE (*tp) == ADDR_EXPR)
832 {
833 /* Variable substitution need not be simple. In particular,
834 the INDIRECT_REF substitution above. Make sure that
835 TREE_CONSTANT and friends are up-to-date. But make sure
836 to not improperly set TREE_BLOCK on some sub-expressions. */
837 int invariant = is_gimple_min_invariant (*tp);
838 tree block = id->block;
839 id->block = NULL_TREE;
840 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
841 id->block = block;
842
843 /* Handle the case where we substituted an INDIRECT_REF
844 into the operand of the ADDR_EXPR. */
845 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
846 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
847 else
848 recompute_tree_invariant_for_addr_expr (*tp);
849
850 /* If this used to be invariant, but is not any longer,
851 then regimplification is probably needed. */
852 if (invariant && !is_gimple_min_invariant (*tp))
853 id->regimplify = true;
854
855 *walk_subtrees = 0;
856 }
857 }
858
859 /* Keep iterating. */
860 return NULL_TREE;
861}
862
863
864/* Called from copy_body_id via walk_tree. DATA is really a
1b369fae 865 `copy_body_data *'. */
aa4a53af 866
1b369fae 867tree
726a989a 868copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
d4e4baa9 869{
1b369fae
RH
870 copy_body_data *id = (copy_body_data *) data;
871 tree fn = id->src_fn;
acb8f212 872 tree new_block;
d4e4baa9 873
e21aff8a
SB
874 /* Begin by recognizing trees that we'll completely rewrite for the
875 inlining context. Our output for these trees is completely
876 different from out input (e.g. RETURN_EXPR is deleted, and morphs
877 into an edge). Further down, we'll handle trees that get
878 duplicated and/or tweaked. */
d4e4baa9 879
1b369fae 880 /* When requested, RETURN_EXPRs should be transformed to just the
726a989a 881 contained MODIFY_EXPR. The branch semantics of the return will
1b369fae
RH
882 be handled elsewhere by manipulating the CFG rather than a statement. */
883 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
d4e4baa9 884 {
e21aff8a 885 tree assignment = TREE_OPERAND (*tp, 0);
d4e4baa9
AO
886
887 /* If we're returning something, just turn that into an
e21aff8a
SB
888 assignment into the equivalent of the original RESULT_DECL.
889 If the "assignment" is just the result decl, the result
890 decl has already been set (e.g. a recent "foo (&result_decl,
891 ...)"); just toss the entire RETURN_EXPR. */
726a989a 892 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
e21aff8a
SB
893 {
894 /* Replace the RETURN_EXPR with (a copy of) the
726a989a 895 MODIFY_EXPR hanging underneath. */
e21aff8a
SB
896 *tp = copy_node (assignment);
897 }
898 else /* Else the RETURN_EXPR returns no value. */
899 {
900 *tp = NULL;
cceb1885 901 return (tree) (void *)1;
e21aff8a 902 }
d4e4baa9 903 }
110cfe1c
JH
904 else if (TREE_CODE (*tp) == SSA_NAME)
905 {
906 *tp = remap_ssa_name (*tp, id);
907 *walk_subtrees = 0;
908 return NULL;
909 }
e21aff8a 910
d4e4baa9
AO
911 /* Local variables and labels need to be replaced by equivalent
912 variables. We don't want to copy static variables; there's only
913 one of those, no matter how many times we inline the containing
5377d5ba 914 function. Similarly for globals from an outer function. */
50886bf1 915 else if (auto_var_in_fn_p (*tp, fn))
d4e4baa9
AO
916 {
917 tree new_decl;
918
919 /* Remap the declaration. */
920 new_decl = remap_decl (*tp, id);
1e128c5f 921 gcc_assert (new_decl);
d4e4baa9
AO
922 /* Replace this variable with the copy. */
923 STRIP_TYPE_NOPS (new_decl);
924 *tp = new_decl;
e4cf29ae 925 *walk_subtrees = 0;
d4e4baa9 926 }
6de9cd9a
DN
927 else if (TREE_CODE (*tp) == STATEMENT_LIST)
928 copy_statement_list (tp);
a406865a
RG
929 else if (TREE_CODE (*tp) == SAVE_EXPR
930 || TREE_CODE (*tp) == TARGET_EXPR)
82c82743 931 remap_save_expr (tp, id->decl_map, walk_subtrees);
17acc01a
JH
932 else if (TREE_CODE (*tp) == LABEL_DECL
933 && (! DECL_CONTEXT (*tp)
1b369fae 934 || decl_function_context (*tp) == id->src_fn))
e21aff8a 935 /* These may need to be remapped for EH handling. */
17acc01a 936 *tp = remap_decl (*tp, id);
6de9cd9a
DN
937 else if (TREE_CODE (*tp) == BIND_EXPR)
938 copy_bind_expr (tp, walk_subtrees, id);
3c2a7a6a
RH
939 /* Types may need remapping as well. */
940 else if (TYPE_P (*tp))
941 *tp = remap_type (*tp, id);
942
bb04998a
RK
943 /* If this is a constant, we have to copy the node iff the type will be
944 remapped. copy_tree_r will not copy a constant. */
3cf11075 945 else if (CONSTANT_CLASS_P (*tp))
bb04998a
RK
946 {
947 tree new_type = remap_type (TREE_TYPE (*tp), id);
948
949 if (new_type == TREE_TYPE (*tp))
950 *walk_subtrees = 0;
951
952 else if (TREE_CODE (*tp) == INTEGER_CST)
953 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
954 TREE_INT_CST_HIGH (*tp));
955 else
956 {
957 *tp = copy_node (*tp);
958 TREE_TYPE (*tp) = new_type;
959 }
960 }
961
d4e4baa9
AO
962 /* Otherwise, just copy the node. Note that copy_tree_r already
963 knows not to copy VAR_DECLs, etc., so this is safe. */
964 else
965 {
e21aff8a
SB
966 /* Here we handle trees that are not completely rewritten.
967 First we detect some inlining-induced bogosities for
968 discarding. */
726a989a
RB
969 if (TREE_CODE (*tp) == MODIFY_EXPR
970 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
971 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
d4e4baa9
AO
972 {
973 /* Some assignments VAR = VAR; don't generate any rtl code
974 and thus don't count as variable modification. Avoid
975 keeping bogosities like 0 = 0. */
726a989a 976 tree decl = TREE_OPERAND (*tp, 0), value;
6be42dd4 977 tree *n;
d4e4baa9 978
6be42dd4 979 n = (tree *) pointer_map_contains (id->decl_map, decl);
d4e4baa9
AO
980 if (n)
981 {
6be42dd4 982 value = *n;
d4e4baa9 983 STRIP_TYPE_NOPS (value);
becfd6e5 984 if (TREE_CONSTANT (value) || TREE_READONLY (value))
68594ce7 985 {
c2255bc4 986 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
726a989a 987 return copy_tree_body_r (tp, walk_subtrees, data);
68594ce7 988 }
d4e4baa9
AO
989 }
990 }
1b369fae 991 else if (TREE_CODE (*tp) == INDIRECT_REF)
6de9cd9a
DN
992 {
993 /* Get rid of *& from inline substitutions that can happen when a
994 pointer argument is an ADDR_EXPR. */
81cfbbc2 995 tree decl = TREE_OPERAND (*tp, 0);
6be42dd4 996 tree *n;
6de9cd9a 997
6be42dd4 998 n = (tree *) pointer_map_contains (id->decl_map, decl);
6de9cd9a
DN
999 if (n)
1000 {
82d6e6fc 1001 tree new_tree;
d84b37b0 1002 tree old;
30d2e943
RG
1003 /* If we happen to get an ADDR_EXPR in n->value, strip
1004 it manually here as we'll eventually get ADDR_EXPRs
1005 which lie about their types pointed to. In this case
1006 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
095ecc24
RG
1007 but we absolutely rely on that. As fold_indirect_ref
1008 does other useful transformations, try that first, though. */
6be42dd4 1009 tree type = TREE_TYPE (TREE_TYPE (*n));
f82a627c
EB
1010 if (id->do_not_unshare)
1011 new_tree = *n;
1012 else
1013 new_tree = unshare_expr (*n);
d84b37b0 1014 old = *tp;
82d6e6fc 1015 *tp = gimple_fold_indirect_ref (new_tree);
095ecc24
RG
1016 if (! *tp)
1017 {
82d6e6fc 1018 if (TREE_CODE (new_tree) == ADDR_EXPR)
de4af523 1019 {
db3927fb
AH
1020 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1021 type, new_tree);
de4af523
JJ
1022 /* ??? We should either assert here or build
1023 a VIEW_CONVERT_EXPR instead of blindly leaking
1024 incompatible types to our IL. */
1025 if (! *tp)
82d6e6fc 1026 *tp = TREE_OPERAND (new_tree, 0);
de4af523 1027 }
095ecc24 1028 else
d84b37b0 1029 {
82d6e6fc 1030 *tp = build1 (INDIRECT_REF, type, new_tree);
d84b37b0 1031 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
955f6531 1032 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
d84b37b0 1033 }
095ecc24 1034 }
81cfbbc2
JH
1035 *walk_subtrees = 0;
1036 return NULL;
68594ce7
JM
1037 }
1038 }
1039
e21aff8a
SB
1040 /* Here is the "usual case". Copy this tree node, and then
1041 tweak some special cases. */
1b369fae 1042 copy_tree_r (tp, walk_subtrees, NULL);
110cfe1c 1043
4f5c64b8
RG
1044 /* Global variables we haven't seen yet needs to go into referenced
1045 vars. If not referenced from types only. */
726a989a
RB
1046 if (gimple_in_ssa_p (cfun)
1047 && TREE_CODE (*tp) == VAR_DECL
4f5c64b8 1048 && id->remapping_type_depth == 0)
110cfe1c 1049 add_referenced_var (*tp);
19734dd8 1050
acb8f212
JH
1051 /* If EXPR has block defined, map it to newly constructed block.
1052 When inlining we want EXPRs without block appear in the block
1053 of function call. */
726a989a 1054 if (EXPR_P (*tp))
acb8f212
JH
1055 {
1056 new_block = id->block;
1057 if (TREE_BLOCK (*tp))
1058 {
6be42dd4
RG
1059 tree *n;
1060 n = (tree *) pointer_map_contains (id->decl_map,
1061 TREE_BLOCK (*tp));
acb8f212 1062 gcc_assert (n);
6be42dd4 1063 new_block = *n;
acb8f212
JH
1064 }
1065 TREE_BLOCK (*tp) = new_block;
1066 }
68594ce7 1067
e0704a46 1068 if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
e21aff8a 1069 TREE_OPERAND (*tp, 0) =
726a989a
RB
1070 build_int_cst (NULL_TREE,
1071 id->eh_region_offset
1072 + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
18c6ada9 1073
726a989a 1074 if (TREE_CODE (*tp) != OMP_CLAUSE)
07beea0d 1075 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
3c2a7a6a 1076
68594ce7
JM
1077 /* The copied TARGET_EXPR has never been expanded, even if the
1078 original node was expanded already. */
1079 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1080 {
1081 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1082 TREE_OPERAND (*tp, 3) = NULL_TREE;
1083 }
84cce55d
RH
1084
1085 /* Variable substitution need not be simple. In particular, the
1086 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1087 and friends are up-to-date. */
1088 else if (TREE_CODE (*tp) == ADDR_EXPR)
1089 {
ad6003f2 1090 int invariant = is_gimple_min_invariant (*tp);
726a989a
RB
1091 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1092
8e85fd14
RG
1093 /* Handle the case where we substituted an INDIRECT_REF
1094 into the operand of the ADDR_EXPR. */
1095 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1096 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1097 else
1098 recompute_tree_invariant_for_addr_expr (*tp);
726a989a 1099
416c991f
JJ
1100 /* If this used to be invariant, but is not any longer,
1101 then regimplification is probably needed. */
ad6003f2 1102 if (invariant && !is_gimple_min_invariant (*tp))
416c991f 1103 id->regimplify = true;
726a989a 1104
84cce55d
RH
1105 *walk_subtrees = 0;
1106 }
d4e4baa9
AO
1107 }
1108
1109 /* Keep iterating. */
1110 return NULL_TREE;
1111}
1112
726a989a
RB
1113
1114/* Helper for copy_bb. Remap statement STMT using the inlining
1115 information in ID. Return the new statement copy. */
1116
1117static gimple
1118remap_gimple_stmt (gimple stmt, copy_body_data *id)
1119{
1120 gimple copy = NULL;
1121 struct walk_stmt_info wi;
1122 tree new_block;
5a6e26b7 1123 bool skip_first = false;
726a989a
RB
1124
1125 /* Begin by recognizing trees that we'll completely rewrite for the
1126 inlining context. Our output for these trees is completely
1127 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1128 into an edge). Further down, we'll handle trees that get
1129 duplicated and/or tweaked. */
1130
1131 /* When requested, GIMPLE_RETURNs should be transformed to just the
1132 contained GIMPLE_ASSIGN. The branch semantics of the return will
1133 be handled elsewhere by manipulating the CFG rather than the
1134 statement. */
1135 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1136 {
1137 tree retval = gimple_return_retval (stmt);
1138
1139 /* If we're returning something, just turn that into an
1140 assignment into the equivalent of the original RESULT_DECL.
1141 If RETVAL is just the result decl, the result decl has
1142 already been set (e.g. a recent "foo (&result_decl, ...)");
1143 just toss the entire GIMPLE_RETURN. */
1144 if (retval && TREE_CODE (retval) != RESULT_DECL)
5a6e26b7
JH
1145 {
1146 copy = gimple_build_assign (id->retvar, retval);
1147 /* id->retvar is already substituted. Skip it on later remapping. */
1148 skip_first = true;
1149 }
726a989a
RB
1150 else
1151 return gimple_build_nop ();
1152 }
1153 else if (gimple_has_substatements (stmt))
1154 {
1155 gimple_seq s1, s2;
1156
1157 /* When cloning bodies from the C++ front end, we will be handed bodies
1158 in High GIMPLE form. Handle here all the High GIMPLE statements that
1159 have embedded statements. */
1160 switch (gimple_code (stmt))
1161 {
1162 case GIMPLE_BIND:
1163 copy = copy_gimple_bind (stmt, id);
1164 break;
1165
1166 case GIMPLE_CATCH:
1167 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1168 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1169 break;
1170
1171 case GIMPLE_EH_FILTER:
1172 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1173 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1174 break;
1175
1176 case GIMPLE_TRY:
1177 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1178 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1179 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1180 break;
1181
1182 case GIMPLE_WITH_CLEANUP_EXPR:
1183 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1184 copy = gimple_build_wce (s1);
1185 break;
1186
1187 case GIMPLE_OMP_PARALLEL:
1188 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1189 copy = gimple_build_omp_parallel
1190 (s1,
1191 gimple_omp_parallel_clauses (stmt),
1192 gimple_omp_parallel_child_fn (stmt),
1193 gimple_omp_parallel_data_arg (stmt));
1194 break;
1195
1196 case GIMPLE_OMP_TASK:
1197 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1198 copy = gimple_build_omp_task
1199 (s1,
1200 gimple_omp_task_clauses (stmt),
1201 gimple_omp_task_child_fn (stmt),
1202 gimple_omp_task_data_arg (stmt),
1203 gimple_omp_task_copy_fn (stmt),
1204 gimple_omp_task_arg_size (stmt),
1205 gimple_omp_task_arg_align (stmt));
1206 break;
1207
1208 case GIMPLE_OMP_FOR:
1209 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1210 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1211 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1212 gimple_omp_for_collapse (stmt), s2);
1213 {
1214 size_t i;
1215 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1216 {
1217 gimple_omp_for_set_index (copy, i,
1218 gimple_omp_for_index (stmt, i));
1219 gimple_omp_for_set_initial (copy, i,
1220 gimple_omp_for_initial (stmt, i));
1221 gimple_omp_for_set_final (copy, i,
1222 gimple_omp_for_final (stmt, i));
1223 gimple_omp_for_set_incr (copy, i,
1224 gimple_omp_for_incr (stmt, i));
1225 gimple_omp_for_set_cond (copy, i,
1226 gimple_omp_for_cond (stmt, i));
1227 }
1228 }
1229 break;
1230
1231 case GIMPLE_OMP_MASTER:
1232 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1233 copy = gimple_build_omp_master (s1);
1234 break;
1235
1236 case GIMPLE_OMP_ORDERED:
1237 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1238 copy = gimple_build_omp_ordered (s1);
1239 break;
1240
1241 case GIMPLE_OMP_SECTION:
1242 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1243 copy = gimple_build_omp_section (s1);
1244 break;
1245
1246 case GIMPLE_OMP_SECTIONS:
1247 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1248 copy = gimple_build_omp_sections
1249 (s1, gimple_omp_sections_clauses (stmt));
1250 break;
1251
1252 case GIMPLE_OMP_SINGLE:
1253 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1254 copy = gimple_build_omp_single
1255 (s1, gimple_omp_single_clauses (stmt));
1256 break;
1257
05a26161
JJ
1258 case GIMPLE_OMP_CRITICAL:
1259 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1260 copy
1261 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1262 break;
1263
726a989a
RB
1264 default:
1265 gcc_unreachable ();
1266 }
1267 }
1268 else
1269 {
1270 if (gimple_assign_copy_p (stmt)
1271 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1272 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1273 {
1274 /* Here we handle statements that are not completely rewritten.
1275 First we detect some inlining-induced bogosities for
1276 discarding. */
1277
1278 /* Some assignments VAR = VAR; don't generate any rtl code
1279 and thus don't count as variable modification. Avoid
1280 keeping bogosities like 0 = 0. */
1281 tree decl = gimple_assign_lhs (stmt), value;
1282 tree *n;
1283
1284 n = (tree *) pointer_map_contains (id->decl_map, decl);
1285 if (n)
1286 {
1287 value = *n;
1288 STRIP_TYPE_NOPS (value);
1289 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1290 return gimple_build_nop ();
1291 }
1292 }
1293
1294 /* Create a new deep copy of the statement. */
1295 copy = gimple_copy (stmt);
1296 }
1297
1298 /* If STMT has a block defined, map it to the newly constructed
1299 block. When inlining we want statements without a block to
1300 appear in the block of the function call. */
1301 new_block = id->block;
1302 if (gimple_block (copy))
1303 {
1304 tree *n;
1305 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1306 gcc_assert (n);
1307 new_block = *n;
1308 }
1309
1310 gimple_set_block (copy, new_block);
1311
1312 /* Remap all the operands in COPY. */
1313 memset (&wi, 0, sizeof (wi));
1314 wi.info = id;
5a6e26b7
JH
1315 if (skip_first)
1316 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1317 else
1318 walk_gimple_op (copy, remap_gimple_op_r, &wi);
726a989a 1319
5006671f
RG
1320 /* Clear the copied virtual operands. We are not remapping them here
1321 but are going to recreate them from scratch. */
1322 if (gimple_has_mem_ops (copy))
1323 {
1324 gimple_set_vdef (copy, NULL_TREE);
1325 gimple_set_vuse (copy, NULL_TREE);
1326 }
1327
726a989a
RB
1328 /* We have to handle EH region remapping of GIMPLE_RESX specially because
1329 the region number is not an operand. */
1330 if (gimple_code (stmt) == GIMPLE_RESX && id->eh_region_offset)
1331 {
1332 gimple_resx_set_region (copy, gimple_resx_region (stmt) + id->eh_region_offset);
1333 }
1334 return copy;
1335}
1336
1337
e21aff8a
SB
1338/* Copy basic block, scale profile accordingly. Edges will be taken care of
1339 later */
1340
1341static basic_block
0178d644
VR
1342copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1343 gcov_type count_scale)
e21aff8a 1344{
c2a4718a 1345 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
e21aff8a 1346 basic_block copy_basic_block;
726a989a 1347 tree decl;
e21aff8a
SB
1348
1349 /* create_basic_block() will append every new block to
1350 basic_block_info automatically. */
cceb1885
GDR
1351 copy_basic_block = create_basic_block (NULL, (void *) 0,
1352 (basic_block) bb->prev_bb->aux);
e21aff8a 1353 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
45a80bb9 1354
726a989a
RB
1355 /* We are going to rebuild frequencies from scratch. These values
1356 have just small importance to drive canonicalize_loop_headers. */
45a80bb9 1357 copy_basic_block->frequency = ((gcov_type)bb->frequency
726a989a
RB
1358 * frequency_scale / REG_BR_PROB_BASE);
1359
45a80bb9
JH
1360 if (copy_basic_block->frequency > BB_FREQ_MAX)
1361 copy_basic_block->frequency = BB_FREQ_MAX;
e21aff8a 1362
726a989a
RB
1363 copy_gsi = gsi_start_bb (copy_basic_block);
1364
1365 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
e21aff8a 1366 {
726a989a
RB
1367 gimple stmt = gsi_stmt (gsi);
1368 gimple orig_stmt = stmt;
e21aff8a 1369
416c991f 1370 id->regimplify = false;
726a989a
RB
1371 stmt = remap_gimple_stmt (stmt, id);
1372 if (gimple_nop_p (stmt))
1373 continue;
1374
1375 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
c2a4718a 1376 seq_gsi = copy_gsi;
726a989a
RB
1377
1378 /* With return slot optimization we can end up with
1379 non-gimple (foo *)&this->m, fix that here. */
4c29307d
JJ
1380 if (is_gimple_assign (stmt)
1381 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1382 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
e21aff8a 1383 {
726a989a 1384 tree new_rhs;
c2a4718a 1385 new_rhs = force_gimple_operand_gsi (&seq_gsi,
4a2b7f24
JJ
1386 gimple_assign_rhs1 (stmt),
1387 true, NULL, false, GSI_NEW_STMT);
726a989a 1388 gimple_assign_set_rhs1 (stmt, new_rhs);
c2a4718a 1389 id->regimplify = false;
726a989a 1390 }
2b65dae5 1391
c2a4718a
JJ
1392 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1393
1394 if (id->regimplify)
1395 gimple_regimplify_operands (stmt, &seq_gsi);
1396
1397 /* If copy_basic_block has been empty at the start of this iteration,
1398 call gsi_start_bb again to get at the newly added statements. */
1399 if (gsi_end_p (copy_gsi))
1400 copy_gsi = gsi_start_bb (copy_basic_block);
1401 else
1402 gsi_next (&copy_gsi);
110cfe1c 1403
726a989a
RB
1404 /* Process the new statement. The call to gimple_regimplify_operands
1405 possibly turned the statement into multiple statements, we
1406 need to process all of them. */
c2a4718a 1407 do
726a989a 1408 {
9187e02d
JH
1409 tree fn;
1410
c2a4718a 1411 stmt = gsi_stmt (copy_gsi);
726a989a
RB
1412 if (is_gimple_call (stmt)
1413 && gimple_call_va_arg_pack_p (stmt)
1414 && id->gimple_call)
1415 {
1416 /* __builtin_va_arg_pack () should be replaced by
1417 all arguments corresponding to ... in the caller. */
1418 tree p;
1419 gimple new_call;
1420 VEC(tree, heap) *argarray;
1421 size_t nargs = gimple_call_num_args (id->gimple_call);
1422 size_t n;
1423
1424 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1425 nargs--;
1426
1427 /* Create the new array of arguments. */
1428 n = nargs + gimple_call_num_args (stmt);
1429 argarray = VEC_alloc (tree, heap, n);
1430 VEC_safe_grow (tree, heap, argarray, n);
1431
1432 /* Copy all the arguments before '...' */
1433 memcpy (VEC_address (tree, argarray),
1434 gimple_call_arg_ptr (stmt, 0),
1435 gimple_call_num_args (stmt) * sizeof (tree));
1436
1437 /* Append the arguments passed in '...' */
1438 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1439 gimple_call_arg_ptr (id->gimple_call, 0)
1440 + (gimple_call_num_args (id->gimple_call) - nargs),
1441 nargs * sizeof (tree));
1442
1443 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1444 argarray);
1445
1446 VEC_free (tree, heap, argarray);
1447
1448 /* Copy all GIMPLE_CALL flags, location and block, except
1449 GF_CALL_VA_ARG_PACK. */
1450 gimple_call_copy_flags (new_call, stmt);
1451 gimple_call_set_va_arg_pack (new_call, false);
1452 gimple_set_location (new_call, gimple_location (stmt));
1453 gimple_set_block (new_call, gimple_block (stmt));
1454 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1455
1456 gsi_replace (&copy_gsi, new_call, false);
9cfa22be 1457 gimple_set_bb (stmt, NULL);
726a989a
RB
1458 stmt = new_call;
1459 }
1460 else if (is_gimple_call (stmt)
1461 && id->gimple_call
1462 && (decl = gimple_call_fndecl (stmt))
1463 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1464 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
e0704a46 1465 {
726a989a
RB
1466 /* __builtin_va_arg_pack_len () should be replaced by
1467 the number of anonymous arguments. */
1468 size_t nargs = gimple_call_num_args (id->gimple_call);
1469 tree count, p;
1470 gimple new_stmt;
1471
1472 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1473 nargs--;
1474
1475 count = build_int_cst (integer_type_node, nargs);
1476 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1477 gsi_replace (&copy_gsi, new_stmt, false);
1478 stmt = new_stmt;
1479 }
b8a00a4d 1480
726a989a
RB
1481 /* Statements produced by inlining can be unfolded, especially
1482 when we constant propagated some operands. We can't fold
1483 them right now for two reasons:
1484 1) folding require SSA_NAME_DEF_STMTs to be correct
1485 2) we can't change function calls to builtins.
1486 So we just mark statement for later folding. We mark
1487 all new statements, instead just statements that has changed
1488 by some nontrivial substitution so even statements made
1489 foldable indirectly are updated. If this turns out to be
1490 expensive, copy_body can be told to watch for nontrivial
1491 changes. */
1492 if (id->statements_to_fold)
1493 pointer_set_insert (id->statements_to_fold, stmt);
1494
1495 /* We're duplicating a CALL_EXPR. Find any corresponding
1496 callgraph edges and update or duplicate them. */
1497 if (is_gimple_call (stmt))
1498 {
9187e02d 1499 struct cgraph_edge *edge = cgraph_edge (id->src_node, orig_stmt);
f618d33e 1500 int flags;
6ef5231b 1501
726a989a 1502 switch (id->transform_call_graph_edges)
e0704a46 1503 {
726a989a 1504 case CB_CGE_DUPLICATE:
9187e02d 1505 if (edge)
726a989a 1506 cgraph_clone_edge (edge, id->dst_node, stmt,
3e293154
MJ
1507 REG_BR_PROB_BASE, 1,
1508 edge->frequency, true);
726a989a
RB
1509 break;
1510
1511 case CB_CGE_MOVE_CLONES:
9187e02d
JH
1512 cgraph_set_call_stmt_including_clones (id->dst_node, orig_stmt, stmt);
1513 break;
110cfe1c 1514
726a989a 1515 case CB_CGE_MOVE:
9187e02d 1516 if (edge)
726a989a
RB
1517 cgraph_set_call_stmt (edge, stmt);
1518 break;
110cfe1c 1519
726a989a
RB
1520 default:
1521 gcc_unreachable ();
110cfe1c 1522 }
f618d33e 1523
6ce2002b 1524 edge = cgraph_edge (id->src_node, orig_stmt);
9187e02d
JH
1525 /* Constant propagation on argument done during inlining
1526 may create new direct call. Produce an edge for it. */
6ce2002b
JH
1527 if ((!edge
1528 || (edge->indirect_call
1529 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1530 && is_gimple_call (stmt)
1531 && (fn = gimple_call_fndecl (stmt)) != NULL)
9187e02d
JH
1532 {
1533 struct cgraph_node *dest = cgraph_node (fn);
1534
1535 /* We have missing edge in the callgraph. This can happen in one case
1536 where previous inlining turned indirect call into direct call by
1537 constant propagating arguments. In all other cases we hit a bug
1538 (incorrect node sharing is most common reason for missing edges. */
1539 gcc_assert (dest->needed || !dest->analyzed);
1540 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1541 cgraph_create_edge_including_clones (id->dst_node, dest, stmt,
4b685e14
JH
1542 bb->count,
1543 compute_call_stmt_bb_frequency (id->dst_node->decl, bb),
9187e02d
JH
1544 bb->loop_depth,
1545 CIF_ORIGINALLY_INDIRECT_CALL);
1546 else
1547 cgraph_create_edge (id->dst_node, dest, stmt,
1548 bb->count, CGRAPH_FREQ_BASE,
1549 bb->loop_depth)->inline_failed
1550 = CIF_ORIGINALLY_INDIRECT_CALL;
1551 if (dump_file)
1552 {
1553 fprintf (dump_file, "Created new direct edge to %s",
1554 cgraph_node_name (dest));
1555 }
1556 }
1557
f618d33e
MJ
1558 flags = gimple_call_flags (stmt);
1559
1560 if (flags & ECF_MAY_BE_ALLOCA)
1561 cfun->calls_alloca = true;
1562 if (flags & ECF_RETURNS_TWICE)
1563 cfun->calls_setjmp = true;
726a989a 1564 }
e21aff8a 1565
726a989a
RB
1566 /* If you think we can abort here, you are wrong.
1567 There is no region 0 in gimple. */
1568 gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) != 0);
1569
1570 if (stmt_could_throw_p (stmt)
1571 /* When we are cloning for inlining, we are supposed to
1572 construct a clone that calls precisely the same functions
1573 as original. However IPA optimizers might've proved
1574 earlier some function calls as non-trapping that might
1575 render some basic blocks dead that might become
1576 unreachable.
1577
1578 We can't update SSA with unreachable blocks in CFG and thus
1579 we prevent the scenario by preserving even the "dead" eh
1580 edges until the point they are later removed by
1581 fixup_cfg pass. */
1582 || (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
1583 && lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) > 0))
1584 {
1585 int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
1586
1587 /* Add an entry for the copied tree in the EH hashtable.
1588 When cloning or versioning, use the hashtable in
1589 cfun, and just copy the EH number. When inlining, use the
1590 hashtable in the caller, and adjust the region number. */
1591 if (region > 0)
1592 add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
1593
1594 /* If this tree doesn't have a region associated with it,
1595 and there is a "current region,"
1596 then associate this tree with the current region
1597 and add edges associated with this region. */
1598 if (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) <= 0
1599 && id->eh_region > 0
1600 && stmt_could_throw_p (stmt))
1601 add_stmt_to_eh_region (stmt, id->eh_region);
e21aff8a 1602 }
726a989a
RB
1603
1604 if (gimple_in_ssa_p (cfun))
1605 {
1606 ssa_op_iter i;
1607 tree def;
1608
1609 find_new_referenced_vars (gsi_stmt (copy_gsi));
1610 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1611 if (TREE_CODE (def) == SSA_NAME)
1612 SSA_NAME_DEF_STMT (def) = stmt;
1613 }
1614
1615 gsi_next (&copy_gsi);
e21aff8a 1616 }
c2a4718a 1617 while (!gsi_end_p (copy_gsi));
726a989a
RB
1618
1619 copy_gsi = gsi_last_bb (copy_basic_block);
e21aff8a 1620 }
726a989a 1621
e21aff8a
SB
1622 return copy_basic_block;
1623}
1624
110cfe1c
JH
1625/* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1626 form is quite easy, since dominator relationship for old basic blocks does
1627 not change.
1628
1629 There is however exception where inlining might change dominator relation
1630 across EH edges from basic block within inlined functions destinating
5305a4cb 1631 to landing pads in function we inline into.
110cfe1c 1632
e9705dc5
AO
1633 The function fills in PHI_RESULTs of such PHI nodes if they refer
1634 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1635 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1636 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1637 set, and this means that there will be no overlapping live ranges
110cfe1c
JH
1638 for the underlying symbol.
1639
1640 This might change in future if we allow redirecting of EH edges and
1641 we might want to change way build CFG pre-inlining to include
1642 all the possible edges then. */
1643static void
e9705dc5
AO
1644update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1645 bool can_throw, bool nonlocal_goto)
110cfe1c
JH
1646{
1647 edge e;
1648 edge_iterator ei;
1649
1650 FOR_EACH_EDGE (e, ei, bb->succs)
1651 if (!e->dest->aux
1652 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1653 {
726a989a
RB
1654 gimple phi;
1655 gimple_stmt_iterator si;
110cfe1c 1656
e9705dc5
AO
1657 if (!nonlocal_goto)
1658 gcc_assert (e->flags & EDGE_EH);
726a989a 1659
e9705dc5
AO
1660 if (!can_throw)
1661 gcc_assert (!(e->flags & EDGE_EH));
726a989a
RB
1662
1663 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
110cfe1c 1664 {
e9705dc5
AO
1665 edge re;
1666
726a989a
RB
1667 phi = gsi_stmt (si);
1668
e9705dc5
AO
1669 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1670 gcc_assert (!e->dest->aux);
1671
496a4ef5
JH
1672 gcc_assert ((e->flags & EDGE_EH)
1673 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
e9705dc5
AO
1674
1675 if (!is_gimple_reg (PHI_RESULT (phi)))
1676 {
726a989a 1677 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
e9705dc5
AO
1678 continue;
1679 }
1680
1681 re = find_edge (ret_bb, e->dest);
1432b19f 1682 gcc_assert (re);
e9705dc5
AO
1683 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1684 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1685
1686 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1687 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
110cfe1c
JH
1688 }
1689 }
1690}
1691
726a989a 1692
128a79fb
KH
1693/* Copy edges from BB into its copy constructed earlier, scale profile
1694 accordingly. Edges will be taken care of later. Assume aux
1695 pointers to point to the copies of each BB. */
726a989a 1696
e21aff8a 1697static void
0178d644 1698copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
e21aff8a 1699{
cceb1885 1700 basic_block new_bb = (basic_block) bb->aux;
e21aff8a
SB
1701 edge_iterator ei;
1702 edge old_edge;
726a989a 1703 gimple_stmt_iterator si;
e21aff8a
SB
1704 int flags;
1705
1706 /* Use the indices from the original blocks to create edges for the
1707 new ones. */
1708 FOR_EACH_EDGE (old_edge, ei, bb->succs)
e0704a46
JH
1709 if (!(old_edge->flags & EDGE_EH))
1710 {
82d6e6fc 1711 edge new_edge;
e21aff8a 1712
e0704a46 1713 flags = old_edge->flags;
e21aff8a 1714
e0704a46
JH
1715 /* Return edges do get a FALLTHRU flag when the get inlined. */
1716 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1717 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1718 flags |= EDGE_FALLTHRU;
82d6e6fc
KG
1719 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1720 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1721 new_edge->probability = old_edge->probability;
e0704a46 1722 }
e21aff8a
SB
1723
1724 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1725 return;
1726
726a989a 1727 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
e21aff8a 1728 {
726a989a 1729 gimple copy_stmt;
e9705dc5 1730 bool can_throw, nonlocal_goto;
e21aff8a 1731
726a989a 1732 copy_stmt = gsi_stmt (si);
e21aff8a 1733 update_stmt (copy_stmt);
110cfe1c
JH
1734 if (gimple_in_ssa_p (cfun))
1735 mark_symbols_for_renaming (copy_stmt);
726a989a 1736
e21aff8a 1737 /* Do this before the possible split_block. */
726a989a 1738 gsi_next (&si);
e21aff8a
SB
1739
1740 /* If this tree could throw an exception, there are two
1741 cases where we need to add abnormal edge(s): the
1742 tree wasn't in a region and there is a "current
1743 region" in the caller; or the original tree had
1744 EH edges. In both cases split the block after the tree,
1745 and add abnormal edge(s) as needed; we need both
1746 those from the callee and the caller.
1747 We check whether the copy can throw, because the const
1748 propagation can change an INDIRECT_REF which throws
1749 into a COMPONENT_REF which doesn't. If the copy
1750 can throw, the original could also throw. */
726a989a
RB
1751 can_throw = stmt_can_throw_internal (copy_stmt);
1752 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
e9705dc5
AO
1753
1754 if (can_throw || nonlocal_goto)
e21aff8a 1755 {
726a989a 1756 if (!gsi_end_p (si))
e21aff8a
SB
1757 /* Note that bb's predecessor edges aren't necessarily
1758 right at this point; split_block doesn't care. */
1759 {
1760 edge e = split_block (new_bb, copy_stmt);
110cfe1c 1761
e21aff8a 1762 new_bb = e->dest;
110cfe1c 1763 new_bb->aux = e->src->aux;
726a989a 1764 si = gsi_start_bb (new_bb);
e21aff8a 1765 }
e9705dc5 1766 }
e21aff8a 1767
e9705dc5
AO
1768 if (can_throw)
1769 make_eh_edges (copy_stmt);
110cfe1c 1770
e9705dc5 1771 if (nonlocal_goto)
726a989a 1772 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
e9705dc5
AO
1773
1774 if ((can_throw || nonlocal_goto)
1775 && gimple_in_ssa_p (cfun))
726a989a 1776 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
e9705dc5 1777 can_throw, nonlocal_goto);
110cfe1c
JH
1778 }
1779}
1780
1781/* Copy the PHIs. All blocks and edges are copied, some blocks
1782 was possibly split and new outgoing EH edges inserted.
1783 BB points to the block of original function and AUX pointers links
1784 the original and newly copied blocks. */
1785
1786static void
1787copy_phis_for_bb (basic_block bb, copy_body_data *id)
1788{
3d9a9f94 1789 basic_block const new_bb = (basic_block) bb->aux;
110cfe1c 1790 edge_iterator ei;
726a989a
RB
1791 gimple phi;
1792 gimple_stmt_iterator si;
110cfe1c 1793
726a989a 1794 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
110cfe1c 1795 {
726a989a
RB
1796 tree res, new_res;
1797 gimple new_phi;
110cfe1c
JH
1798 edge new_edge;
1799
726a989a
RB
1800 phi = gsi_stmt (si);
1801 res = PHI_RESULT (phi);
1802 new_res = res;
110cfe1c
JH
1803 if (is_gimple_reg (res))
1804 {
726a989a 1805 walk_tree (&new_res, copy_tree_body_r, id, NULL);
110cfe1c
JH
1806 SSA_NAME_DEF_STMT (new_res)
1807 = new_phi = create_phi_node (new_res, new_bb);
1808 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1809 {
726a989a
RB
1810 edge const old_edge
1811 = find_edge ((basic_block) new_edge->src->aux, bb);
110cfe1c
JH
1812 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1813 tree new_arg = arg;
726a989a
RB
1814 tree block = id->block;
1815 id->block = NULL_TREE;
1816 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1817 id->block = block;
110cfe1c 1818 gcc_assert (new_arg);
36b6e793
JJ
1819 /* With return slot optimization we can end up with
1820 non-gimple (foo *)&this->m, fix that here. */
1821 if (TREE_CODE (new_arg) != SSA_NAME
1822 && TREE_CODE (new_arg) != FUNCTION_DECL
1823 && !is_gimple_val (new_arg))
1824 {
726a989a
RB
1825 gimple_seq stmts = NULL;
1826 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1827 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
36b6e793 1828 }
110cfe1c
JH
1829 add_phi_arg (new_phi, new_arg, new_edge);
1830 }
e21aff8a
SB
1831 }
1832 }
1833}
1834
726a989a 1835
e21aff8a 1836/* Wrapper for remap_decl so it can be used as a callback. */
726a989a 1837
e21aff8a
SB
1838static tree
1839remap_decl_1 (tree decl, void *data)
1840{
1b369fae 1841 return remap_decl (decl, (copy_body_data *) data);
e21aff8a
SB
1842}
1843
110cfe1c
JH
1844/* Build struct function and associated datastructures for the new clone
1845 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1846
1847static void
1848initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
1849 int frequency)
1850{
110cfe1c 1851 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
0178d644 1852 gcov_type count_scale, frequency_scale;
110cfe1c
JH
1853
1854 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1855 count_scale = (REG_BR_PROB_BASE * count
1856 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1857 else
1858 count_scale = 1;
1859
1860 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1861 frequency_scale = (REG_BR_PROB_BASE * frequency
1862 /
1863 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1864 else
1865 frequency_scale = count_scale;
1866
1867 /* Register specific tree functions. */
726a989a 1868 gimple_register_cfg_hooks ();
39ecc018
JH
1869
1870 /* Get clean struct function. */
1871 push_struct_function (new_fndecl);
1872
1873 /* We will rebuild these, so just sanity check that they are empty. */
1874 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
1875 gcc_assert (cfun->local_decls == NULL);
1876 gcc_assert (cfun->cfg == NULL);
1877 gcc_assert (cfun->decl == new_fndecl);
1878
39ecc018
JH
1879 /* Copy items we preserve during clonning. */
1880 cfun->static_chain_decl = src_cfun->static_chain_decl;
1881 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
1882 cfun->function_end_locus = src_cfun->function_end_locus;
1883 cfun->curr_properties = src_cfun->curr_properties;
1884 cfun->last_verified = src_cfun->last_verified;
1885 if (src_cfun->ipa_transforms_to_apply)
1886 cfun->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
1887 src_cfun->ipa_transforms_to_apply);
1888 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
1889 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
1890 cfun->function_frequency = src_cfun->function_frequency;
1891 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
1892 cfun->stdarg = src_cfun->stdarg;
1893 cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
1894 cfun->after_inlining = src_cfun->after_inlining;
1895 cfun->returns_struct = src_cfun->returns_struct;
1896 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
1897 cfun->after_tree_profile = src_cfun->after_tree_profile;
1898
110cfe1c
JH
1899 init_empty_tree_cfg ();
1900
1901 ENTRY_BLOCK_PTR->count =
1902 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1903 REG_BR_PROB_BASE);
1904 ENTRY_BLOCK_PTR->frequency =
1905 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1906 frequency_scale / REG_BR_PROB_BASE);
1907 EXIT_BLOCK_PTR->count =
1908 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1909 REG_BR_PROB_BASE);
1910 EXIT_BLOCK_PTR->frequency =
1911 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1912 frequency_scale / REG_BR_PROB_BASE);
1913 if (src_cfun->eh)
1914 init_eh_for_function ();
1915
1916 if (src_cfun->gimple_df)
1917 {
5db9ba0c 1918 init_tree_ssa (cfun);
110cfe1c
JH
1919 cfun->gimple_df->in_ssa_p = true;
1920 init_ssa_operands ();
1921 }
1922 pop_cfun ();
1923}
1924
e21aff8a
SB
1925/* Make a copy of the body of FN so that it can be inserted inline in
1926 another function. Walks FN via CFG, returns new fndecl. */
1927
1928static tree
1b369fae 1929copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
e21aff8a
SB
1930 basic_block entry_block_map, basic_block exit_block_map)
1931{
1b369fae 1932 tree callee_fndecl = id->src_fn;
e21aff8a 1933 /* Original cfun for the callee, doesn't change. */
1b369fae 1934 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
110cfe1c 1935 struct function *cfun_to_copy;
e21aff8a
SB
1936 basic_block bb;
1937 tree new_fndecl = NULL;
0178d644 1938 gcov_type count_scale, frequency_scale;
110cfe1c 1939 int last;
e21aff8a 1940
1b369fae 1941 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
e21aff8a 1942 count_scale = (REG_BR_PROB_BASE * count
1b369fae 1943 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
e21aff8a
SB
1944 else
1945 count_scale = 1;
1946
1b369fae 1947 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
e21aff8a
SB
1948 frequency_scale = (REG_BR_PROB_BASE * frequency
1949 /
1b369fae 1950 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
e21aff8a
SB
1951 else
1952 frequency_scale = count_scale;
1953
1954 /* Register specific tree functions. */
726a989a 1955 gimple_register_cfg_hooks ();
e21aff8a
SB
1956
1957 /* Must have a CFG here at this point. */
1958 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
1959 (DECL_STRUCT_FUNCTION (callee_fndecl)));
1960
110cfe1c
JH
1961 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1962
e21aff8a
SB
1963 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
1964 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
110cfe1c
JH
1965 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1966 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
e21aff8a 1967
e21aff8a
SB
1968 /* Duplicate any exception-handling regions. */
1969 if (cfun->eh)
1970 {
1b369fae 1971 id->eh_region_offset
fad41cd7
RH
1972 = duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
1973 0, id->eh_region);
e21aff8a 1974 }
726a989a 1975
e21aff8a
SB
1976 /* Use aux pointers to map the original blocks to copy. */
1977 FOR_EACH_BB_FN (bb, cfun_to_copy)
110cfe1c 1978 {
82d6e6fc
KG
1979 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
1980 bb->aux = new_bb;
1981 new_bb->aux = bb;
110cfe1c
JH
1982 }
1983
7c57be85 1984 last = last_basic_block;
726a989a 1985
e21aff8a
SB
1986 /* Now that we've duplicated the blocks, duplicate their edges. */
1987 FOR_ALL_BB_FN (bb, cfun_to_copy)
e9705dc5 1988 copy_edges_for_bb (bb, count_scale, exit_block_map);
726a989a 1989
110cfe1c
JH
1990 if (gimple_in_ssa_p (cfun))
1991 FOR_ALL_BB_FN (bb, cfun_to_copy)
1992 copy_phis_for_bb (bb, id);
726a989a 1993
e21aff8a 1994 FOR_ALL_BB_FN (bb, cfun_to_copy)
110cfe1c
JH
1995 {
1996 ((basic_block)bb->aux)->aux = NULL;
1997 bb->aux = NULL;
1998 }
726a989a 1999
110cfe1c
JH
2000 /* Zero out AUX fields of newly created block during EH edge
2001 insertion. */
7c57be85 2002 for (; last < last_basic_block; last++)
110cfe1c
JH
2003 BASIC_BLOCK (last)->aux = NULL;
2004 entry_block_map->aux = NULL;
2005 exit_block_map->aux = NULL;
e21aff8a
SB
2006
2007 return new_fndecl;
2008}
2009
f82a627c
EB
2010/* Make a copy of the body of SRC_FN so that it can be inserted inline in
2011 another function. */
2012
2013static tree
2014copy_tree_body (copy_body_data *id)
2015{
2016 tree fndecl = id->src_fn;
2017 tree body = DECL_SAVED_TREE (fndecl);
2018
2019 walk_tree (&body, copy_tree_body_r, id, NULL);
2020
2021 return body;
2022}
2023
e21aff8a 2024static tree
1b369fae 2025copy_body (copy_body_data *id, gcov_type count, int frequency,
e21aff8a
SB
2026 basic_block entry_block_map, basic_block exit_block_map)
2027{
1b369fae 2028 tree fndecl = id->src_fn;
e21aff8a
SB
2029 tree body;
2030
2031 /* If this body has a CFG, walk CFG and copy. */
2032 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2033 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
2034
2035 return body;
2036}
2037
04482133
AO
2038/* Return true if VALUE is an ADDR_EXPR of an automatic variable
2039 defined in function FN, or of a data member thereof. */
2040
2041static bool
2042self_inlining_addr_expr (tree value, tree fn)
2043{
2044 tree var;
2045
2046 if (TREE_CODE (value) != ADDR_EXPR)
2047 return false;
2048
2049 var = get_base_address (TREE_OPERAND (value, 0));
e21aff8a 2050
50886bf1 2051 return var && auto_var_in_fn_p (var, fn);
04482133
AO
2052}
2053
6de9cd9a 2054static void
0f1961a2
JH
2055insert_init_stmt (basic_block bb, gimple init_stmt)
2056{
0f1961a2
JH
2057 /* If VAR represents a zero-sized variable, it's possible that the
2058 assignment statement may result in no gimple statements. */
2059 if (init_stmt)
c2a4718a
JJ
2060 {
2061 gimple_stmt_iterator si = gsi_last_bb (bb);
0f1961a2 2062
bfb0b886
RG
2063 /* We can end up with init statements that store to a non-register
2064 from a rhs with a conversion. Handle that here by forcing the
2065 rhs into a temporary. gimple_regimplify_operands is not
2066 prepared to do this for us. */
2067 if (!is_gimple_reg (gimple_assign_lhs (init_stmt))
2068 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2069 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2070 {
2071 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2072 gimple_expr_type (init_stmt),
2073 gimple_assign_rhs1 (init_stmt));
2074 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2075 GSI_NEW_STMT);
2076 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2077 gimple_assign_set_rhs1 (init_stmt, rhs);
2078 }
c2a4718a
JJ
2079 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2080 gimple_regimplify_operands (init_stmt, &si);
2081 mark_symbols_for_renaming (init_stmt);
2082 }
0f1961a2
JH
2083}
2084
2085/* Initialize parameter P with VALUE. If needed, produce init statement
2086 at the end of BB. When BB is NULL, we return init statement to be
2087 output later. */
2088static gimple
1b369fae 2089setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
e21aff8a 2090 basic_block bb, tree *vars)
6de9cd9a 2091{
0f1961a2 2092 gimple init_stmt = NULL;
6de9cd9a 2093 tree var;
f4088621 2094 tree rhs = value;
110cfe1c
JH
2095 tree def = (gimple_in_ssa_p (cfun)
2096 ? gimple_default_def (id->src_cfun, p) : NULL);
6de9cd9a 2097
f4088621
RG
2098 if (value
2099 && value != error_mark_node
2100 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
c54e3854
RG
2101 {
2102 if (fold_convertible_p (TREE_TYPE (p), value))
2103 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2104 else
2105 /* ??? For valid (GIMPLE) programs we should not end up here.
2106 Still if something has gone wrong and we end up with truly
2107 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2108 to not leak invalid GIMPLE to the following passes. */
2109 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2110 }
f4088621 2111
110cfe1c
JH
2112 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2113 we may not need to create a new variable here at all. Instead, we may
2114 be able to just use the argument value. */
6de9cd9a
DN
2115 if (TREE_READONLY (p)
2116 && !TREE_ADDRESSABLE (p)
110cfe1c
JH
2117 && value && !TREE_SIDE_EFFECTS (value)
2118 && !def)
6de9cd9a 2119 {
84936f6f
RH
2120 /* We may produce non-gimple trees by adding NOPs or introduce
2121 invalid sharing when operand is not really constant.
2122 It is not big deal to prohibit constant propagation here as
2123 we will constant propagate in DOM1 pass anyway. */
2124 if (is_gimple_min_invariant (value)
f4088621
RG
2125 && useless_type_conversion_p (TREE_TYPE (p),
2126 TREE_TYPE (value))
04482133
AO
2127 /* We have to be very careful about ADDR_EXPR. Make sure
2128 the base variable isn't a local variable of the inlined
2129 function, e.g., when doing recursive inlining, direct or
2130 mutually-recursive or whatever, which is why we don't
2131 just test whether fn == current_function_decl. */
2132 && ! self_inlining_addr_expr (value, fn))
6de9cd9a 2133 {
6de9cd9a 2134 insert_decl_map (id, p, value);
0f1961a2 2135 return NULL;
6de9cd9a
DN
2136 }
2137 }
2138
5377d5ba
RK
2139 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2140 here since the type of this decl must be visible to the calling
8c27b7d4 2141 function. */
1b369fae 2142 var = copy_decl_to_var (p, id);
110cfe1c
JH
2143 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2144 {
2145 get_var_ann (var);
2146 add_referenced_var (var);
2147 }
e21aff8a 2148
6de9cd9a
DN
2149 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2150 that way, when the PARM_DECL is encountered, it will be
2151 automatically replaced by the VAR_DECL. */
7c7d3047 2152 insert_decl_map (id, p, var);
6de9cd9a
DN
2153
2154 /* Declare this new variable. */
2155 TREE_CHAIN (var) = *vars;
2156 *vars = var;
2157
2158 /* Make gimplifier happy about this variable. */
84936f6f 2159 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
6de9cd9a
DN
2160
2161 /* Even if P was TREE_READONLY, the new VAR should not be.
2162 In the original code, we would have constructed a
2163 temporary, and then the function body would have never
2164 changed the value of P. However, now, we will be
2165 constructing VAR directly. The constructor body may
2166 change its value multiple times as it is being
2167 constructed. Therefore, it must not be TREE_READONLY;
2168 the back-end assumes that TREE_READONLY variable is
2169 assigned to only once. */
2170 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2171 TREE_READONLY (var) = 0;
2172
110cfe1c
JH
2173 /* If there is no setup required and we are in SSA, take the easy route
2174 replacing all SSA names representing the function parameter by the
2175 SSA name passed to function.
2176
2177 We need to construct map for the variable anyway as it might be used
2178 in different SSA names when parameter is set in function.
2179
8454d27e
JH
2180 Do replacement at -O0 for const arguments replaced by constant.
2181 This is important for builtin_constant_p and other construct requiring
2182 constant argument to be visible in inlined function body.
2183
110cfe1c
JH
2184 FIXME: This usually kills the last connection in between inlined
2185 function parameter and the actual value in debug info. Can we do
2186 better here? If we just inserted the statement, copy propagation
2187 would kill it anyway as it always did in older versions of GCC.
2188
2189 We might want to introduce a notion that single SSA_NAME might
2190 represent multiple variables for purposes of debugging. */
2191 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
8454d27e
JH
2192 && (optimize
2193 || (TREE_READONLY (p)
2194 && is_gimple_min_invariant (rhs)))
110cfe1c 2195 && (TREE_CODE (rhs) == SSA_NAME
9b718f81
JH
2196 || is_gimple_min_invariant (rhs))
2197 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
110cfe1c
JH
2198 {
2199 insert_decl_map (id, def, rhs);
0f1961a2 2200 return NULL;
110cfe1c
JH
2201 }
2202
f6f2da7d
JH
2203 /* If the value of argument is never used, don't care about initializing
2204 it. */
1cf5abb3 2205 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
f6f2da7d
JH
2206 {
2207 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
0f1961a2 2208 return NULL;
f6f2da7d
JH
2209 }
2210
6de9cd9a
DN
2211 /* Initialize this VAR_DECL from the equivalent argument. Convert
2212 the argument to the proper type in case it was promoted. */
2213 if (value)
2214 {
6de9cd9a 2215 if (rhs == error_mark_node)
110cfe1c 2216 {
7c7d3047 2217 insert_decl_map (id, p, var);
0f1961a2 2218 return NULL;
110cfe1c 2219 }
afe08db5 2220
73dab33b 2221 STRIP_USELESS_TYPE_CONVERSION (rhs);
6de9cd9a 2222
726a989a 2223 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
6de9cd9a 2224 keep our trees in gimple form. */
110cfe1c
JH
2225 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2226 {
2227 def = remap_ssa_name (def, id);
726a989a 2228 init_stmt = gimple_build_assign (def, rhs);
110cfe1c
JH
2229 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2230 set_default_def (var, NULL);
2231 }
2232 else
726a989a 2233 init_stmt = gimple_build_assign (var, rhs);
6de9cd9a 2234
0f1961a2
JH
2235 if (bb && init_stmt)
2236 insert_init_stmt (bb, init_stmt);
6de9cd9a 2237 }
0f1961a2 2238 return init_stmt;
6de9cd9a
DN
2239}
2240
d4e4baa9 2241/* Generate code to initialize the parameters of the function at the
726a989a 2242 top of the stack in ID from the GIMPLE_CALL STMT. */
d4e4baa9 2243
e21aff8a 2244static void
726a989a 2245initialize_inlined_parameters (copy_body_data *id, gimple stmt,
e21aff8a 2246 tree fn, basic_block bb)
d4e4baa9 2247{
d4e4baa9 2248 tree parms;
726a989a 2249 size_t i;
d4e4baa9 2250 tree p;
d436bff8 2251 tree vars = NULL_TREE;
726a989a 2252 tree static_chain = gimple_call_chain (stmt);
d4e4baa9
AO
2253
2254 /* Figure out what the parameters are. */
18c6ada9 2255 parms = DECL_ARGUMENTS (fn);
d4e4baa9 2256
d4e4baa9
AO
2257 /* Loop through the parameter declarations, replacing each with an
2258 equivalent VAR_DECL, appropriately initialized. */
726a989a
RB
2259 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2260 {
2261 tree val;
2262 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2263 setup_one_parameter (id, p, val, fn, bb, &vars);
2264 }
4838c5ee 2265
6de9cd9a
DN
2266 /* Initialize the static chain. */
2267 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
ea99e0be 2268 gcc_assert (fn != current_function_decl);
6de9cd9a
DN
2269 if (p)
2270 {
2271 /* No static chain? Seems like a bug in tree-nested.c. */
1e128c5f 2272 gcc_assert (static_chain);
4838c5ee 2273
e21aff8a 2274 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
4838c5ee
AO
2275 }
2276
e21aff8a 2277 declare_inline_vars (id->block, vars);
d4e4baa9
AO
2278}
2279
726a989a 2280
e21aff8a
SB
2281/* Declare a return variable to replace the RESULT_DECL for the
2282 function we are calling. An appropriate DECL_STMT is returned.
2283 The USE_STMT is filled to contain a use of the declaration to
2284 indicate the return value of the function.
2285
110cfe1c
JH
2286 RETURN_SLOT, if non-null is place where to store the result. It
2287 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
726a989a 2288 was the LHS of the MODIFY_EXPR to which this call is the RHS.
7740f00d
RH
2289
2290 The return value is a (possibly null) value that is the result of the
2291 function as seen by the callee. *USE_P is a (possibly null) value that
2292 holds the result as seen by the caller. */
d4e4baa9 2293
d436bff8 2294static tree
110cfe1c
JH
2295declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2296 tree *use_p)
d4e4baa9 2297{
1b369fae
RH
2298 tree callee = id->src_fn;
2299 tree caller = id->dst_fn;
7740f00d
RH
2300 tree result = DECL_RESULT (callee);
2301 tree callee_type = TREE_TYPE (result);
2302 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
2303 tree var, use;
d4e4baa9
AO
2304
2305 /* We don't need to do anything for functions that don't return
2306 anything. */
7740f00d 2307 if (!result || VOID_TYPE_P (callee_type))
d4e4baa9 2308 {
6de9cd9a 2309 *use_p = NULL_TREE;
d4e4baa9
AO
2310 return NULL_TREE;
2311 }
2312
cc77ae10 2313 /* If there was a return slot, then the return value is the
7740f00d 2314 dereferenced address of that object. */
110cfe1c 2315 if (return_slot)
7740f00d 2316 {
110cfe1c 2317 /* The front end shouldn't have used both return_slot and
7740f00d 2318 a modify expression. */
1e128c5f 2319 gcc_assert (!modify_dest);
cc77ae10 2320 if (DECL_BY_REFERENCE (result))
110cfe1c
JH
2321 {
2322 tree return_slot_addr = build_fold_addr_expr (return_slot);
2323 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2324
2325 /* We are going to construct *&return_slot and we can't do that
2326 for variables believed to be not addressable.
2327
2328 FIXME: This check possibly can match, because values returned
2329 via return slot optimization are not believed to have address
2330 taken by alias analysis. */
2331 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2332 if (gimple_in_ssa_p (cfun))
2333 {
2334 HOST_WIDE_INT bitsize;
2335 HOST_WIDE_INT bitpos;
2336 tree offset;
2337 enum machine_mode mode;
2338 int unsignedp;
2339 int volatilep;
2340 tree base;
2341 base = get_inner_reference (return_slot, &bitsize, &bitpos,
2342 &offset,
2343 &mode, &unsignedp, &volatilep,
2344 false);
2345 if (TREE_CODE (base) == INDIRECT_REF)
2346 base = TREE_OPERAND (base, 0);
2347 if (TREE_CODE (base) == SSA_NAME)
2348 base = SSA_NAME_VAR (base);
2349 mark_sym_for_renaming (base);
2350 }
2351 var = return_slot_addr;
2352 }
cc77ae10 2353 else
110cfe1c
JH
2354 {
2355 var = return_slot;
2356 gcc_assert (TREE_CODE (var) != SSA_NAME);
b5ca517c 2357 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
110cfe1c 2358 }
0890b981
AP
2359 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2360 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2361 && !DECL_GIMPLE_REG_P (result)
22918034 2362 && DECL_P (var))
0890b981 2363 DECL_GIMPLE_REG_P (var) = 0;
7740f00d
RH
2364 use = NULL;
2365 goto done;
2366 }
2367
2368 /* All types requiring non-trivial constructors should have been handled. */
1e128c5f 2369 gcc_assert (!TREE_ADDRESSABLE (callee_type));
7740f00d
RH
2370
2371 /* Attempt to avoid creating a new temporary variable. */
110cfe1c
JH
2372 if (modify_dest
2373 && TREE_CODE (modify_dest) != SSA_NAME)
7740f00d
RH
2374 {
2375 bool use_it = false;
2376
2377 /* We can't use MODIFY_DEST if there's type promotion involved. */
f4088621 2378 if (!useless_type_conversion_p (callee_type, caller_type))
7740f00d
RH
2379 use_it = false;
2380
2381 /* ??? If we're assigning to a variable sized type, then we must
2382 reuse the destination variable, because we've no good way to
2383 create variable sized temporaries at this point. */
2384 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2385 use_it = true;
2386
2387 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2388 reuse it as the result of the call directly. Don't do this if
2389 it would promote MODIFY_DEST to addressable. */
e2f9fe42
RH
2390 else if (TREE_ADDRESSABLE (result))
2391 use_it = false;
2392 else
2393 {
2394 tree base_m = get_base_address (modify_dest);
2395
2396 /* If the base isn't a decl, then it's a pointer, and we don't
2397 know where that's going to go. */
2398 if (!DECL_P (base_m))
2399 use_it = false;
2400 else if (is_global_var (base_m))
2401 use_it = false;
0890b981
AP
2402 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2403 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2404 && !DECL_GIMPLE_REG_P (result)
2405 && DECL_GIMPLE_REG_P (base_m))
1d327c16 2406 use_it = false;
e2f9fe42
RH
2407 else if (!TREE_ADDRESSABLE (base_m))
2408 use_it = true;
2409 }
7740f00d
RH
2410
2411 if (use_it)
2412 {
2413 var = modify_dest;
2414 use = NULL;
2415 goto done;
2416 }
2417 }
2418
1e128c5f 2419 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
7740f00d 2420
c08cd4c1 2421 var = copy_result_decl_to_var (result, id);
110cfe1c
JH
2422 if (gimple_in_ssa_p (cfun))
2423 {
2424 get_var_ann (var);
2425 add_referenced_var (var);
2426 }
e21aff8a 2427
7740f00d 2428 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
cb91fab0 2429 DECL_STRUCT_FUNCTION (caller)->local_decls
7740f00d 2430 = tree_cons (NULL_TREE, var,
cb91fab0 2431 DECL_STRUCT_FUNCTION (caller)->local_decls);
7740f00d 2432
6de9cd9a 2433 /* Do not have the rest of GCC warn about this variable as it should
471854f8 2434 not be visible to the user. */
6de9cd9a 2435 TREE_NO_WARNING (var) = 1;
d4e4baa9 2436
c08cd4c1
JM
2437 declare_inline_vars (id->block, var);
2438
7740f00d
RH
2439 /* Build the use expr. If the return type of the function was
2440 promoted, convert it back to the expected type. */
2441 use = var;
f4088621 2442 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
7740f00d 2443 use = fold_convert (caller_type, var);
73dab33b
AP
2444
2445 STRIP_USELESS_TYPE_CONVERSION (use);
7740f00d 2446
c08cd4c1 2447 if (DECL_BY_REFERENCE (result))
32848948
RG
2448 {
2449 TREE_ADDRESSABLE (var) = 1;
2450 var = build_fold_addr_expr (var);
2451 }
c08cd4c1 2452
7740f00d 2453 done:
d4e4baa9
AO
2454 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2455 way, when the RESULT_DECL is encountered, it will be
2456 automatically replaced by the VAR_DECL. */
5e20bdd7 2457 insert_decl_map (id, result, var);
d4e4baa9 2458
6de9cd9a
DN
2459 /* Remember this so we can ignore it in remap_decls. */
2460 id->retvar = var;
2461
7740f00d
RH
2462 *use_p = use;
2463 return var;
d4e4baa9
AO
2464}
2465
27dbd3ac
RH
2466/* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2467 to a local label. */
4838c5ee 2468
27dbd3ac
RH
2469static tree
2470has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
4838c5ee 2471{
27dbd3ac
RH
2472 tree node = *nodep;
2473 tree fn = (tree) fnp;
726a989a 2474
27dbd3ac
RH
2475 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2476 return node;
2477
2478 if (TYPE_P (node))
2479 *walk_subtrees = 0;
2480
2481 return NULL_TREE;
2482}
726a989a 2483
27dbd3ac
RH
2484/* Callback through walk_tree. Determine if we've got an aggregate
2485 type that we can't support; return non-null if so. */
726a989a
RB
2486
2487static tree
27dbd3ac
RH
2488cannot_copy_type_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
2489 void *data ATTRIBUTE_UNUSED)
726a989a 2490{
27dbd3ac 2491 tree t, node = *nodep;
726a989a
RB
2492
2493 if (TREE_CODE (node) == RECORD_TYPE || TREE_CODE (node) == UNION_TYPE)
2494 {
2495 /* We cannot inline a function of the form
2496
2497 void F (int i) { struct S { int ar[i]; } s; }
2498
2499 Attempting to do so produces a catch-22.
2500 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
2501 UNION_TYPE nodes, then it goes into infinite recursion on a
2502 structure containing a pointer to its own type. If it doesn't,
2503 then the type node for S doesn't get adjusted properly when
2504 F is inlined.
2505
2506 ??? This is likely no longer true, but it's too late in the 4.0
2507 cycle to try to find out. This should be checked for 4.1. */
2508 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
2509 if (variably_modified_type_p (TREE_TYPE (t), NULL))
27dbd3ac 2510 return node;
726a989a
RB
2511 }
2512
2513 return NULL_TREE;
4838c5ee
AO
2514}
2515
726a989a 2516
27dbd3ac
RH
2517/* Determine if the function can be copied. If so return NULL. If
2518 not return a string describng the reason for failure. */
2519
2520static const char *
2521copy_forbidden (struct function *fun, tree fndecl)
2522{
2523 const char *reason = fun->cannot_be_copied_reason;
2524 tree step;
2525
2526 /* Only examine the function once. */
2527 if (fun->cannot_be_copied_set)
2528 return reason;
2529
2530 /* We cannot copy a function that receives a non-local goto
2531 because we cannot remap the destination label used in the
2532 function that is performing the non-local goto. */
2533 /* ??? Actually, this should be possible, if we work at it.
2534 No doubt there's just a handful of places that simply
2535 assume it doesn't happen and don't substitute properly. */
2536 if (fun->has_nonlocal_label)
2537 {
2538 reason = G_("function %q+F can never be copied "
2539 "because it receives a non-local goto");
2540 goto fail;
2541 }
2542
2543 for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2544 {
2545 tree decl = TREE_VALUE (step);
2546
2547 if (TREE_CODE (decl) == VAR_DECL
2548 && TREE_STATIC (decl)
2549 && !DECL_EXTERNAL (decl)
2550 && DECL_INITIAL (decl)
2551 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2552 has_label_address_in_static_1,
2553 fndecl))
2554 {
2555 reason = G_("function %q+F can never be copied because it saves "
2556 "address of local label in a static variable");
2557 goto fail;
2558 }
2559
2560 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2561 && variably_modified_type_p (TREE_TYPE (decl), NULL)
2562 && walk_tree_without_duplicates (&TREE_TYPE (decl),
2563 cannot_copy_type_1, NULL))
2564 {
2565 reason = G_("function %q+F can never be copied "
2566 "because it uses variable sized variables");
2567 goto fail;
2568 }
2569 }
2570
2571 fail:
2572 fun->cannot_be_copied_reason = reason;
2573 fun->cannot_be_copied_set = true;
2574 return reason;
2575}
2576
2577
2578static const char *inline_forbidden_reason;
2579
2580/* A callback for walk_gimple_seq to handle statements. Returns non-null
2581 iff a function can not be inlined. Also sets the reason why. */
c986baf6 2582
c986baf6 2583static tree
726a989a
RB
2584inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2585 struct walk_stmt_info *wip)
c986baf6 2586{
726a989a 2587 tree fn = (tree) wip->info;
f08545a8 2588 tree t;
726a989a 2589 gimple stmt = gsi_stmt (*gsi);
c986baf6 2590
726a989a 2591 switch (gimple_code (stmt))
f08545a8 2592 {
726a989a 2593 case GIMPLE_CALL:
3197c4fd
AS
2594 /* Refuse to inline alloca call unless user explicitly forced so as
2595 this may change program's memory overhead drastically when the
2596 function using alloca is called in loop. In GCC present in
2597 SPEC2000 inlining into schedule_block cause it to require 2GB of
2598 RAM instead of 256MB. */
726a989a 2599 if (gimple_alloca_call_p (stmt)
f08545a8
JH
2600 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2601 {
ddd2d57e 2602 inline_forbidden_reason
dee15844 2603 = G_("function %q+F can never be inlined because it uses "
ddd2d57e 2604 "alloca (override using the always_inline attribute)");
726a989a
RB
2605 *handled_ops_p = true;
2606 return fn;
f08545a8 2607 }
726a989a
RB
2608
2609 t = gimple_call_fndecl (stmt);
2610 if (t == NULL_TREE)
f08545a8 2611 break;
84f5e1b1 2612
f08545a8
JH
2613 /* We cannot inline functions that call setjmp. */
2614 if (setjmp_call_p (t))
2615 {
ddd2d57e 2616 inline_forbidden_reason
dee15844 2617 = G_("function %q+F can never be inlined because it uses setjmp");
726a989a
RB
2618 *handled_ops_p = true;
2619 return t;
f08545a8
JH
2620 }
2621
6de9cd9a 2622 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3197c4fd 2623 switch (DECL_FUNCTION_CODE (t))
f08545a8 2624 {
3197c4fd
AS
2625 /* We cannot inline functions that take a variable number of
2626 arguments. */
2627 case BUILT_IN_VA_START:
3197c4fd
AS
2628 case BUILT_IN_NEXT_ARG:
2629 case BUILT_IN_VA_END:
6de9cd9a 2630 inline_forbidden_reason
dee15844 2631 = G_("function %q+F can never be inlined because it "
6de9cd9a 2632 "uses variable argument lists");
726a989a
RB
2633 *handled_ops_p = true;
2634 return t;
6de9cd9a 2635
3197c4fd 2636 case BUILT_IN_LONGJMP:
6de9cd9a
DN
2637 /* We can't inline functions that call __builtin_longjmp at
2638 all. The non-local goto machinery really requires the
2639 destination be in a different function. If we allow the
2640 function calling __builtin_longjmp to be inlined into the
2641 function calling __builtin_setjmp, Things will Go Awry. */
2642 inline_forbidden_reason
dee15844 2643 = G_("function %q+F can never be inlined because "
6de9cd9a 2644 "it uses setjmp-longjmp exception handling");
726a989a
RB
2645 *handled_ops_p = true;
2646 return t;
6de9cd9a
DN
2647
2648 case BUILT_IN_NONLOCAL_GOTO:
2649 /* Similarly. */
2650 inline_forbidden_reason
dee15844 2651 = G_("function %q+F can never be inlined because "
6de9cd9a 2652 "it uses non-local goto");
726a989a
RB
2653 *handled_ops_p = true;
2654 return t;
f08545a8 2655
4b284111
JJ
2656 case BUILT_IN_RETURN:
2657 case BUILT_IN_APPLY_ARGS:
2658 /* If a __builtin_apply_args caller would be inlined,
2659 it would be saving arguments of the function it has
2660 been inlined into. Similarly __builtin_return would
2661 return from the function the inline has been inlined into. */
2662 inline_forbidden_reason
dee15844 2663 = G_("function %q+F can never be inlined because "
4b284111 2664 "it uses __builtin_return or __builtin_apply_args");
726a989a
RB
2665 *handled_ops_p = true;
2666 return t;
4b284111 2667
3197c4fd
AS
2668 default:
2669 break;
2670 }
f08545a8
JH
2671 break;
2672
726a989a
RB
2673 case GIMPLE_GOTO:
2674 t = gimple_goto_dest (stmt);
f08545a8
JH
2675
2676 /* We will not inline a function which uses computed goto. The
2677 addresses of its local labels, which may be tucked into
2678 global storage, are of course not constant across
2679 instantiations, which causes unexpected behavior. */
2680 if (TREE_CODE (t) != LABEL_DECL)
2681 {
ddd2d57e 2682 inline_forbidden_reason
dee15844 2683 = G_("function %q+F can never be inlined "
ddd2d57e 2684 "because it contains a computed goto");
726a989a
RB
2685 *handled_ops_p = true;
2686 return t;
f08545a8 2687 }
6de9cd9a 2688 break;
f08545a8 2689
f08545a8
JH
2690 default:
2691 break;
2692 }
2693
726a989a 2694 *handled_ops_p = false;
f08545a8 2695 return NULL_TREE;
84f5e1b1
RH
2696}
2697
726a989a
RB
2698/* Return true if FNDECL is a function that cannot be inlined into
2699 another one. */
2700
2701static bool
f08545a8 2702inline_forbidden_p (tree fndecl)
84f5e1b1 2703{
2092ee7d 2704 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
726a989a
RB
2705 struct walk_stmt_info wi;
2706 struct pointer_set_t *visited_nodes;
2707 basic_block bb;
2708 bool forbidden_p = false;
2709
27dbd3ac
RH
2710 /* First check for shared reasons not to copy the code. */
2711 inline_forbidden_reason = copy_forbidden (fun, fndecl);
2712 if (inline_forbidden_reason != NULL)
2713 return true;
2714
2715 /* Next, walk the statements of the function looking for
2716 constraucts we can't handle, or are non-optimal for inlining. */
726a989a
RB
2717 visited_nodes = pointer_set_create ();
2718 memset (&wi, 0, sizeof (wi));
2719 wi.info = (void *) fndecl;
2720 wi.pset = visited_nodes;
e21aff8a 2721
2092ee7d 2722 FOR_EACH_BB_FN (bb, fun)
726a989a
RB
2723 {
2724 gimple ret;
2725 gimple_seq seq = bb_seq (bb);
27dbd3ac 2726 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
726a989a
RB
2727 forbidden_p = (ret != NULL);
2728 if (forbidden_p)
27dbd3ac 2729 break;
2092ee7d
JJ
2730 }
2731
726a989a 2732 pointer_set_destroy (visited_nodes);
726a989a 2733 return forbidden_p;
84f5e1b1
RH
2734}
2735
b3c3af2f
SB
2736/* Returns nonzero if FN is a function that does not have any
2737 fundamental inline blocking properties. */
d4e4baa9 2738
27dbd3ac
RH
2739bool
2740tree_inlinable_function_p (tree fn)
d4e4baa9 2741{
b3c3af2f 2742 bool inlinable = true;
18177c7e
RG
2743 bool do_warning;
2744 tree always_inline;
d4e4baa9
AO
2745
2746 /* If we've already decided this function shouldn't be inlined,
2747 there's no need to check again. */
2748 if (DECL_UNINLINABLE (fn))
b3c3af2f 2749 return false;
d4e4baa9 2750
18177c7e
RG
2751 /* We only warn for functions declared `inline' by the user. */
2752 do_warning = (warn_inline
18177c7e 2753 && DECL_DECLARED_INLINE_P (fn)
0494626a 2754 && !DECL_NO_INLINE_WARNING_P (fn)
18177c7e
RG
2755 && !DECL_IN_SYSTEM_HEADER (fn));
2756
2757 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
2758
e90acd93 2759 if (flag_no_inline
18177c7e
RG
2760 && always_inline == NULL)
2761 {
2762 if (do_warning)
2763 warning (OPT_Winline, "function %q+F can never be inlined because it "
2764 "is suppressed using -fno-inline", fn);
2765 inlinable = false;
2766 }
2767
2768 /* Don't auto-inline anything that might not be bound within
2769 this unit of translation. */
2770 else if (!DECL_DECLARED_INLINE_P (fn)
2771 && DECL_REPLACEABLE_P (fn))
2772 inlinable = false;
2773
2774 else if (!function_attribute_inlinable_p (fn))
2775 {
2776 if (do_warning)
2777 warning (OPT_Winline, "function %q+F can never be inlined because it "
2778 "uses attributes conflicting with inlining", fn);
2779 inlinable = false;
2780 }
46c5ad27 2781
f08545a8 2782 else if (inline_forbidden_p (fn))
b3c3af2f
SB
2783 {
2784 /* See if we should warn about uninlinable functions. Previously,
2785 some of these warnings would be issued while trying to expand
2786 the function inline, but that would cause multiple warnings
2787 about functions that would for example call alloca. But since
2788 this a property of the function, just one warning is enough.
2789 As a bonus we can now give more details about the reason why a
18177c7e
RG
2790 function is not inlinable. */
2791 if (always_inline)
dee15844 2792 sorry (inline_forbidden_reason, fn);
2d327012 2793 else if (do_warning)
d2fcbf6f 2794 warning (OPT_Winline, inline_forbidden_reason, fn);
b3c3af2f
SB
2795
2796 inlinable = false;
2797 }
d4e4baa9
AO
2798
2799 /* Squirrel away the result so that we don't have to check again. */
b3c3af2f 2800 DECL_UNINLINABLE (fn) = !inlinable;
d4e4baa9 2801
b3c3af2f
SB
2802 return inlinable;
2803}
2804
e5c4f28a
RG
2805/* Estimate the cost of a memory move. Use machine dependent
2806 word size and take possible memcpy call into account. */
2807
2808int
2809estimate_move_cost (tree type)
2810{
2811 HOST_WIDE_INT size;
2812
078c3644
JH
2813 gcc_assert (!VOID_TYPE_P (type));
2814
e5c4f28a
RG
2815 size = int_size_in_bytes (type);
2816
e04ad03d 2817 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
e5c4f28a
RG
2818 /* Cost of a memcpy call, 3 arguments and the call. */
2819 return 4;
2820 else
2821 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
2822}
2823
726a989a 2824/* Returns cost of operation CODE, according to WEIGHTS */
7f9bc51b 2825
726a989a 2826static int
02f0b13a
JH
2827estimate_operator_cost (enum tree_code code, eni_weights *weights,
2828 tree op1 ATTRIBUTE_UNUSED, tree op2)
6de9cd9a 2829{
726a989a 2830 switch (code)
6de9cd9a 2831 {
726a989a
RB
2832 /* These are "free" conversions, or their presumed cost
2833 is folded into other operations. */
61fcaeec 2834 case RANGE_EXPR:
1a87cf0c 2835 CASE_CONVERT:
726a989a
RB
2836 case COMPLEX_EXPR:
2837 case PAREN_EXPR:
726a989a 2838 return 0;
6de9cd9a 2839
e5c4f28a
RG
2840 /* Assign cost of 1 to usual operations.
2841 ??? We may consider mapping RTL costs to this. */
6de9cd9a 2842 case COND_EXPR:
4151978d 2843 case VEC_COND_EXPR:
6de9cd9a
DN
2844
2845 case PLUS_EXPR:
5be014d5 2846 case POINTER_PLUS_EXPR:
6de9cd9a
DN
2847 case MINUS_EXPR:
2848 case MULT_EXPR:
2849
325217ed 2850 case FIXED_CONVERT_EXPR:
6de9cd9a 2851 case FIX_TRUNC_EXPR:
6de9cd9a
DN
2852
2853 case NEGATE_EXPR:
2854 case FLOAT_EXPR:
2855 case MIN_EXPR:
2856 case MAX_EXPR:
2857 case ABS_EXPR:
2858
2859 case LSHIFT_EXPR:
2860 case RSHIFT_EXPR:
2861 case LROTATE_EXPR:
2862 case RROTATE_EXPR:
a6b46ba2
DN
2863 case VEC_LSHIFT_EXPR:
2864 case VEC_RSHIFT_EXPR:
6de9cd9a
DN
2865
2866 case BIT_IOR_EXPR:
2867 case BIT_XOR_EXPR:
2868 case BIT_AND_EXPR:
2869 case BIT_NOT_EXPR:
2870
2871 case TRUTH_ANDIF_EXPR:
2872 case TRUTH_ORIF_EXPR:
2873 case TRUTH_AND_EXPR:
2874 case TRUTH_OR_EXPR:
2875 case TRUTH_XOR_EXPR:
2876 case TRUTH_NOT_EXPR:
2877
2878 case LT_EXPR:
2879 case LE_EXPR:
2880 case GT_EXPR:
2881 case GE_EXPR:
2882 case EQ_EXPR:
2883 case NE_EXPR:
2884 case ORDERED_EXPR:
2885 case UNORDERED_EXPR:
2886
2887 case UNLT_EXPR:
2888 case UNLE_EXPR:
2889 case UNGT_EXPR:
2890 case UNGE_EXPR:
2891 case UNEQ_EXPR:
d1a7edaf 2892 case LTGT_EXPR:
6de9cd9a 2893
6de9cd9a
DN
2894 case CONJ_EXPR:
2895
2896 case PREDECREMENT_EXPR:
2897 case PREINCREMENT_EXPR:
2898 case POSTDECREMENT_EXPR:
2899 case POSTINCREMENT_EXPR:
2900
16630a2c
DN
2901 case REALIGN_LOAD_EXPR:
2902
61d3cdbb
DN
2903 case REDUC_MAX_EXPR:
2904 case REDUC_MIN_EXPR:
2905 case REDUC_PLUS_EXPR:
20f06221 2906 case WIDEN_SUM_EXPR:
726a989a
RB
2907 case WIDEN_MULT_EXPR:
2908 case DOT_PROD_EXPR:
2909
89d67cca
DN
2910 case VEC_WIDEN_MULT_HI_EXPR:
2911 case VEC_WIDEN_MULT_LO_EXPR:
2912 case VEC_UNPACK_HI_EXPR:
2913 case VEC_UNPACK_LO_EXPR:
d9987fb4
UB
2914 case VEC_UNPACK_FLOAT_HI_EXPR:
2915 case VEC_UNPACK_FLOAT_LO_EXPR:
8115817b 2916 case VEC_PACK_TRUNC_EXPR:
89d67cca 2917 case VEC_PACK_SAT_EXPR:
d9987fb4 2918 case VEC_PACK_FIX_TRUNC_EXPR:
98b44b0e
IR
2919 case VEC_EXTRACT_EVEN_EXPR:
2920 case VEC_EXTRACT_ODD_EXPR:
2921 case VEC_INTERLEAVE_HIGH_EXPR:
2922 case VEC_INTERLEAVE_LOW_EXPR:
2923
726a989a 2924 return 1;
6de9cd9a 2925
1ea7e6ad 2926 /* Few special cases of expensive operations. This is useful
6de9cd9a
DN
2927 to avoid inlining on functions having too many of these. */
2928 case TRUNC_DIV_EXPR:
2929 case CEIL_DIV_EXPR:
2930 case FLOOR_DIV_EXPR:
2931 case ROUND_DIV_EXPR:
2932 case EXACT_DIV_EXPR:
2933 case TRUNC_MOD_EXPR:
2934 case CEIL_MOD_EXPR:
2935 case FLOOR_MOD_EXPR:
2936 case ROUND_MOD_EXPR:
2937 case RDIV_EXPR:
02f0b13a
JH
2938 if (TREE_CODE (op2) != INTEGER_CST)
2939 return weights->div_mod_cost;
2940 return 1;
726a989a
RB
2941
2942 default:
2943 /* We expect a copy assignment with no operator. */
2944 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
2945 return 0;
2946 }
2947}
2948
2949
2950/* Estimate number of instructions that will be created by expanding
2951 the statements in the statement sequence STMTS.
2952 WEIGHTS contains weights attributed to various constructs. */
2953
2954static
2955int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
2956{
2957 int cost;
2958 gimple_stmt_iterator gsi;
2959
2960 cost = 0;
2961 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
2962 cost += estimate_num_insns (gsi_stmt (gsi), weights);
2963
2964 return cost;
2965}
2966
2967
2968/* Estimate number of instructions that will be created by expanding STMT.
2969 WEIGHTS contains weights attributed to various constructs. */
2970
2971int
2972estimate_num_insns (gimple stmt, eni_weights *weights)
2973{
2974 unsigned cost, i;
2975 enum gimple_code code = gimple_code (stmt);
2976 tree lhs;
02f0b13a 2977 tree rhs;
726a989a
RB
2978
2979 switch (code)
2980 {
2981 case GIMPLE_ASSIGN:
2982 /* Try to estimate the cost of assignments. We have three cases to
2983 deal with:
2984 1) Simple assignments to registers;
2985 2) Stores to things that must live in memory. This includes
2986 "normal" stores to scalars, but also assignments of large
2987 structures, or constructors of big arrays;
2988
2989 Let us look at the first two cases, assuming we have "a = b + C":
2990 <GIMPLE_ASSIGN <var_decl "a">
2991 <plus_expr <var_decl "b"> <constant C>>
2992 If "a" is a GIMPLE register, the assignment to it is free on almost
2993 any target, because "a" usually ends up in a real register. Hence
2994 the only cost of this expression comes from the PLUS_EXPR, and we
2995 can ignore the GIMPLE_ASSIGN.
2996 If "a" is not a GIMPLE register, the assignment to "a" will most
2997 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
2998 of moving something into "a", which we compute using the function
2999 estimate_move_cost. */
3000 lhs = gimple_assign_lhs (stmt);
02f0b13a
JH
3001 rhs = gimple_assign_rhs1 (stmt);
3002
3003 /* EH magic stuff is most probably going to be optimized out.
3004 We rarely really need to save EH info for unwinding
3005 nested exceptions. */
3006 if (TREE_CODE (lhs) == FILTER_EXPR
3007 || TREE_CODE (lhs) == EXC_PTR_EXPR
3008 || TREE_CODE (rhs) == FILTER_EXPR
3009 || TREE_CODE (rhs) == EXC_PTR_EXPR)
3010 return 0;
726a989a
RB
3011 if (is_gimple_reg (lhs))
3012 cost = 0;
3013 else
3014 cost = estimate_move_cost (TREE_TYPE (lhs));
3015
02f0b13a
JH
3016 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3017 cost += estimate_move_cost (TREE_TYPE (rhs));
3018
3019 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3020 gimple_assign_rhs1 (stmt),
3021 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3022 == GIMPLE_BINARY_RHS
3023 ? gimple_assign_rhs2 (stmt) : NULL);
726a989a
RB
3024 break;
3025
3026 case GIMPLE_COND:
02f0b13a
JH
3027 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3028 gimple_op (stmt, 0),
3029 gimple_op (stmt, 1));
726a989a
RB
3030 break;
3031
3032 case GIMPLE_SWITCH:
3033 /* Take into account cost of the switch + guess 2 conditional jumps for
3034 each case label.
3035
3036 TODO: once the switch expansion logic is sufficiently separated, we can
3037 do better job on estimating cost of the switch. */
02f0b13a
JH
3038 if (weights->time_based)
3039 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3040 else
3041 cost = gimple_switch_num_labels (stmt) * 2;
6de9cd9a 3042 break;
726a989a
RB
3043
3044 case GIMPLE_CALL:
6de9cd9a 3045 {
726a989a
RB
3046 tree decl = gimple_call_fndecl (stmt);
3047 tree addr = gimple_call_fn (stmt);
8723e2fe
JH
3048 tree funtype = TREE_TYPE (addr);
3049
726a989a
RB
3050 if (POINTER_TYPE_P (funtype))
3051 funtype = TREE_TYPE (funtype);
6de9cd9a 3052
625a2efb 3053 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
726a989a 3054 cost = weights->target_builtin_call_cost;
625a2efb 3055 else
726a989a 3056 cost = weights->call_cost;
625a2efb 3057
8c96cd51 3058 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
6de9cd9a
DN
3059 switch (DECL_FUNCTION_CODE (decl))
3060 {
3061 case BUILT_IN_CONSTANT_P:
726a989a 3062 return 0;
6de9cd9a 3063 case BUILT_IN_EXPECT:
02f0b13a 3064 return 0;
726a989a 3065
7f9bc51b
ZD
3066 /* Prefetch instruction is not expensive. */
3067 case BUILT_IN_PREFETCH:
726a989a 3068 cost = weights->target_builtin_call_cost;
7f9bc51b 3069 break;
726a989a 3070
6de9cd9a
DN
3071 default:
3072 break;
3073 }
e5c4f28a 3074
8723e2fe
JH
3075 if (decl)
3076 funtype = TREE_TYPE (decl);
3077
02f0b13a
JH
3078 if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3079 cost += estimate_move_cost (TREE_TYPE (funtype));
726a989a
RB
3080 /* Our cost must be kept in sync with
3081 cgraph_estimate_size_after_inlining that does use function
3082 declaration to figure out the arguments. */
8723e2fe
JH
3083 if (decl && DECL_ARGUMENTS (decl))
3084 {
3085 tree arg;
3086 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
078c3644
JH
3087 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3088 cost += estimate_move_cost (TREE_TYPE (arg));
8723e2fe
JH
3089 }
3090 else if (funtype && prototype_p (funtype))
3091 {
3092 tree t;
078c3644
JH
3093 for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3094 t = TREE_CHAIN (t))
3095 if (!VOID_TYPE_P (TREE_VALUE (t)))
3096 cost += estimate_move_cost (TREE_VALUE (t));
8723e2fe
JH
3097 }
3098 else
c7f599d0 3099 {
726a989a
RB
3100 for (i = 0; i < gimple_call_num_args (stmt); i++)
3101 {
3102 tree arg = gimple_call_arg (stmt, i);
078c3644
JH
3103 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3104 cost += estimate_move_cost (TREE_TYPE (arg));
726a989a 3105 }
c7f599d0 3106 }
e5c4f28a 3107
6de9cd9a
DN
3108 break;
3109 }
88f4034b 3110
726a989a
RB
3111 case GIMPLE_GOTO:
3112 case GIMPLE_LABEL:
3113 case GIMPLE_NOP:
3114 case GIMPLE_PHI:
3115 case GIMPLE_RETURN:
726a989a
RB
3116 case GIMPLE_PREDICT:
3117 return 0;
3118
3119 case GIMPLE_ASM:
3120 case GIMPLE_RESX:
3121 return 1;
3122
3123 case GIMPLE_BIND:
3124 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3125
3126 case GIMPLE_EH_FILTER:
3127 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3128
3129 case GIMPLE_CATCH:
3130 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3131
3132 case GIMPLE_TRY:
3133 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3134 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3135
3136 /* OpenMP directives are generally very expensive. */
3137
3138 case GIMPLE_OMP_RETURN:
3139 case GIMPLE_OMP_SECTIONS_SWITCH:
3140 case GIMPLE_OMP_ATOMIC_STORE:
3141 case GIMPLE_OMP_CONTINUE:
3142 /* ...except these, which are cheap. */
3143 return 0;
3144
3145 case GIMPLE_OMP_ATOMIC_LOAD:
3146 return weights->omp_cost;
3147
3148 case GIMPLE_OMP_FOR:
3149 return (weights->omp_cost
3150 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3151 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3152
3153 case GIMPLE_OMP_PARALLEL:
3154 case GIMPLE_OMP_TASK:
3155 case GIMPLE_OMP_CRITICAL:
3156 case GIMPLE_OMP_MASTER:
3157 case GIMPLE_OMP_ORDERED:
3158 case GIMPLE_OMP_SECTION:
3159 case GIMPLE_OMP_SECTIONS:
3160 case GIMPLE_OMP_SINGLE:
3161 return (weights->omp_cost
3162 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
88f4034b 3163
6de9cd9a 3164 default:
1e128c5f 3165 gcc_unreachable ();
6de9cd9a 3166 }
726a989a
RB
3167
3168 return cost;
6de9cd9a
DN
3169}
3170
726a989a
RB
3171/* Estimate number of instructions that will be created by expanding
3172 function FNDECL. WEIGHTS contains weights attributed to various
3173 constructs. */
aa4a53af 3174
6de9cd9a 3175int
726a989a 3176estimate_num_insns_fn (tree fndecl, eni_weights *weights)
6de9cd9a 3177{
726a989a
RB
3178 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3179 gimple_stmt_iterator bsi;
e21aff8a 3180 basic_block bb;
726a989a 3181 int n = 0;
e21aff8a 3182
726a989a
RB
3183 gcc_assert (my_function && my_function->cfg);
3184 FOR_EACH_BB_FN (bb, my_function)
e21aff8a 3185 {
726a989a
RB
3186 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3187 n += estimate_num_insns (gsi_stmt (bsi), weights);
e21aff8a 3188 }
e21aff8a 3189
726a989a 3190 return n;
7f9bc51b
ZD
3191}
3192
726a989a 3193
7f9bc51b
ZD
3194/* Initializes weights used by estimate_num_insns. */
3195
3196void
3197init_inline_once (void)
3198{
7f9bc51b 3199 eni_size_weights.call_cost = 1;
625a2efb 3200 eni_size_weights.target_builtin_call_cost = 1;
7f9bc51b 3201 eni_size_weights.div_mod_cost = 1;
7f9bc51b 3202 eni_size_weights.omp_cost = 40;
02f0b13a 3203 eni_size_weights.time_based = false;
7f9bc51b
ZD
3204
3205 /* Estimating time for call is difficult, since we have no idea what the
3206 called function does. In the current uses of eni_time_weights,
3207 underestimating the cost does less harm than overestimating it, so
ea2c620c 3208 we choose a rather small value here. */
7f9bc51b 3209 eni_time_weights.call_cost = 10;
625a2efb 3210 eni_time_weights.target_builtin_call_cost = 10;
7f9bc51b 3211 eni_time_weights.div_mod_cost = 10;
7f9bc51b 3212 eni_time_weights.omp_cost = 40;
02f0b13a 3213 eni_time_weights.time_based = true;
6de9cd9a
DN
3214}
3215
726a989a
RB
3216/* Estimate the number of instructions in a gimple_seq. */
3217
3218int
3219count_insns_seq (gimple_seq seq, eni_weights *weights)
3220{
3221 gimple_stmt_iterator gsi;
3222 int n = 0;
3223 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3224 n += estimate_num_insns (gsi_stmt (gsi), weights);
3225
3226 return n;
3227}
3228
3229
e21aff8a 3230/* Install new lexical TREE_BLOCK underneath 'current_block'. */
726a989a 3231
e21aff8a 3232static void
4a283090 3233prepend_lexical_block (tree current_block, tree new_block)
e21aff8a 3234{
4a283090
JH
3235 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3236 BLOCK_SUBBLOCKS (current_block) = new_block;
e21aff8a 3237 BLOCK_SUPERCONTEXT (new_block) = current_block;
e21aff8a
SB
3238}
3239
3e293154
MJ
3240/* Fetch callee declaration from the call graph edge going from NODE and
3241 associated with STMR call statement. Return NULL_TREE if not found. */
3242static tree
726a989a 3243get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3e293154
MJ
3244{
3245 struct cgraph_edge *cs;
3246
3247 cs = cgraph_edge (node, stmt);
3248 if (cs)
3249 return cs->callee->decl;
3250
3251 return NULL_TREE;
3252}
3253
726a989a 3254/* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
d4e4baa9 3255
e21aff8a 3256static bool
726a989a 3257expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
d4e4baa9 3258{
1ea193c2 3259 tree retvar, use_retvar;
d436bff8 3260 tree fn;
6be42dd4 3261 struct pointer_map_t *st;
110cfe1c 3262 tree return_slot;
7740f00d 3263 tree modify_dest;
6de9cd9a 3264 location_t saved_location;
e21aff8a 3265 struct cgraph_edge *cg_edge;
61a05df1 3266 cgraph_inline_failed_t reason;
e21aff8a
SB
3267 basic_block return_block;
3268 edge e;
726a989a 3269 gimple_stmt_iterator gsi, stmt_gsi;
e21aff8a 3270 bool successfully_inlined = FALSE;
4f6c2131 3271 bool purge_dead_abnormal_edges;
e21aff8a
SB
3272 tree t_step;
3273 tree var;
d4e4baa9 3274
6de9cd9a
DN
3275 /* Set input_location here so we get the right instantiation context
3276 if we call instantiate_decl from inlinable_function_p. */
3277 saved_location = input_location;
726a989a
RB
3278 if (gimple_has_location (stmt))
3279 input_location = gimple_location (stmt);
6de9cd9a 3280
d4e4baa9 3281 /* From here on, we're only interested in CALL_EXPRs. */
726a989a 3282 if (gimple_code (stmt) != GIMPLE_CALL)
6de9cd9a 3283 goto egress;
d4e4baa9
AO
3284
3285 /* First, see if we can figure out what function is being called.
3286 If we cannot, then there is no hope of inlining the function. */
726a989a 3287 fn = gimple_call_fndecl (stmt);
d4e4baa9 3288 if (!fn)
3e293154
MJ
3289 {
3290 fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3291 if (!fn)
3292 goto egress;
3293 }
d4e4baa9 3294
b58b1157 3295 /* Turn forward declarations into real ones. */
d4d1ebc1 3296 fn = cgraph_node (fn)->decl;
b58b1157 3297
726a989a 3298 /* If FN is a declaration of a function in a nested scope that was
a1a0fd4e
AO
3299 globally declared inline, we don't set its DECL_INITIAL.
3300 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3301 C++ front-end uses it for cdtors to refer to their internal
3302 declarations, that are not real functions. Fortunately those
3303 don't have trees to be saved, so we can tell by checking their
726a989a
RB
3304 gimple_body. */
3305 if (!DECL_INITIAL (fn)
a1a0fd4e 3306 && DECL_ABSTRACT_ORIGIN (fn)
39ecc018 3307 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
a1a0fd4e
AO
3308 fn = DECL_ABSTRACT_ORIGIN (fn);
3309
18c6ada9
JH
3310 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3311 Kill this check once this is fixed. */
1b369fae 3312 if (!id->dst_node->analyzed)
6de9cd9a 3313 goto egress;
18c6ada9 3314
1b369fae 3315 cg_edge = cgraph_edge (id->dst_node, stmt);
18c6ada9 3316
d4e4baa9
AO
3317 /* Don't try to inline functions that are not well-suited to
3318 inlining. */
e21aff8a 3319 if (!cgraph_inline_p (cg_edge, &reason))
a833faa5 3320 {
3e293154
MJ
3321 /* If this call was originally indirect, we do not want to emit any
3322 inlining related warnings or sorry messages because there are no
3323 guarantees regarding those. */
3324 if (cg_edge->indirect_call)
3325 goto egress;
3326
7fac66d4
JH
3327 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3328 /* Avoid warnings during early inline pass. */
7e8b322a 3329 && cgraph_global_info_ready)
2d327012 3330 {
61a05df1
JH
3331 sorry ("inlining failed in call to %q+F: %s", fn,
3332 cgraph_inline_failed_string (reason));
2d327012
JH
3333 sorry ("called from here");
3334 }
3335 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3336 && !DECL_IN_SYSTEM_HEADER (fn)
61a05df1 3337 && reason != CIF_UNSPECIFIED
d63db217
JH
3338 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3339 /* Avoid warnings during early inline pass. */
7e8b322a 3340 && cgraph_global_info_ready)
a833faa5 3341 {
dee15844 3342 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
61a05df1 3343 fn, cgraph_inline_failed_string (reason));
3176a0c2 3344 warning (OPT_Winline, "called from here");
a833faa5 3345 }
6de9cd9a 3346 goto egress;
a833faa5 3347 }
ea99e0be 3348 fn = cg_edge->callee->decl;
d4e4baa9 3349
18c6ada9 3350#ifdef ENABLE_CHECKING
1b369fae 3351 if (cg_edge->callee->decl != id->dst_node->decl)
e21aff8a 3352 verify_cgraph_node (cg_edge->callee);
18c6ada9
JH
3353#endif
3354
e21aff8a 3355 /* We will be inlining this callee. */
e21aff8a
SB
3356 id->eh_region = lookup_stmt_eh_region (stmt);
3357
726a989a 3358 /* Split the block holding the GIMPLE_CALL. */
e21aff8a
SB
3359 e = split_block (bb, stmt);
3360 bb = e->src;
3361 return_block = e->dest;
3362 remove_edge (e);
3363
4f6c2131
EB
3364 /* split_block splits after the statement; work around this by
3365 moving the call into the second block manually. Not pretty,
3366 but seems easier than doing the CFG manipulation by hand
726a989a
RB
3367 when the GIMPLE_CALL is in the last statement of BB. */
3368 stmt_gsi = gsi_last_bb (bb);
3369 gsi_remove (&stmt_gsi, false);
4f6c2131 3370
726a989a 3371 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4f6c2131
EB
3372 been the source of abnormal edges. In this case, schedule
3373 the removal of dead abnormal edges. */
726a989a
RB
3374 gsi = gsi_start_bb (return_block);
3375 if (gsi_end_p (gsi))
e21aff8a 3376 {
726a989a 3377 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4f6c2131 3378 purge_dead_abnormal_edges = true;
e21aff8a 3379 }
4f6c2131
EB
3380 else
3381 {
726a989a 3382 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4f6c2131
EB
3383 purge_dead_abnormal_edges = false;
3384 }
3385
726a989a 3386 stmt_gsi = gsi_start_bb (return_block);
742a37d5 3387
d436bff8
AH
3388 /* Build a block containing code to initialize the arguments, the
3389 actual inline expansion of the body, and a label for the return
3390 statements within the function to jump to. The type of the
3391 statement expression is the return type of the function call. */
e21aff8a
SB
3392 id->block = make_node (BLOCK);
3393 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3e2844cb 3394 BLOCK_SOURCE_LOCATION (id->block) = input_location;
4a283090 3395 prepend_lexical_block (gimple_block (stmt), id->block);
e21aff8a 3396
d4e4baa9
AO
3397 /* Local declarations will be replaced by their equivalents in this
3398 map. */
3399 st = id->decl_map;
6be42dd4 3400 id->decl_map = pointer_map_create ();
d4e4baa9 3401
e21aff8a 3402 /* Record the function we are about to inline. */
1b369fae
RH
3403 id->src_fn = fn;
3404 id->src_node = cg_edge->callee;
110cfe1c 3405 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
726a989a 3406 id->gimple_call = stmt;
1b369fae 3407
3c8da8a5
AO
3408 gcc_assert (!id->src_cfun->after_inlining);
3409
045685a9 3410 id->entry_bb = bb;
7299cb99
JH
3411 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3412 {
3413 gimple_stmt_iterator si = gsi_last_bb (bb);
3414 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3415 NOT_TAKEN),
3416 GSI_NEW_STMT);
3417 }
726a989a 3418 initialize_inlined_parameters (id, stmt, fn, bb);
d4e4baa9 3419
ea99e0be 3420 if (DECL_INITIAL (fn))
4a283090 3421 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
acb8f212 3422
d4e4baa9
AO
3423 /* Return statements in the function body will be replaced by jumps
3424 to the RET_LABEL. */
1e128c5f
GB
3425 gcc_assert (DECL_INITIAL (fn));
3426 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
23700f65 3427
726a989a 3428 /* Find the LHS to which the result of this call is assigned. */
110cfe1c 3429 return_slot = NULL;
726a989a 3430 if (gimple_call_lhs (stmt))
81bafd36 3431 {
726a989a 3432 modify_dest = gimple_call_lhs (stmt);
81bafd36
ILT
3433
3434 /* The function which we are inlining might not return a value,
3435 in which case we should issue a warning that the function
3436 does not return a value. In that case the optimizers will
3437 see that the variable to which the value is assigned was not
3438 initialized. We do not want to issue a warning about that
3439 uninitialized variable. */
3440 if (DECL_P (modify_dest))
3441 TREE_NO_WARNING (modify_dest) = 1;
726a989a
RB
3442
3443 if (gimple_call_return_slot_opt_p (stmt))
fa47911c 3444 {
110cfe1c 3445 return_slot = modify_dest;
fa47911c
JM
3446 modify_dest = NULL;
3447 }
81bafd36 3448 }
7740f00d
RH
3449 else
3450 modify_dest = NULL;
3451
1ea193c2
ILT
3452 /* If we are inlining a call to the C++ operator new, we don't want
3453 to use type based alias analysis on the return value. Otherwise
3454 we may get confused if the compiler sees that the inlined new
3455 function returns a pointer which was just deleted. See bug
3456 33407. */
3457 if (DECL_IS_OPERATOR_NEW (fn))
3458 {
3459 return_slot = NULL;
3460 modify_dest = NULL;
3461 }
3462
d4e4baa9 3463 /* Declare the return variable for the function. */
726a989a 3464 retvar = declare_return_variable (id, return_slot, modify_dest, &use_retvar);
1ea193c2 3465
acb8f212 3466 /* Add local vars in this inlined callee to caller. */
cb91fab0 3467 t_step = id->src_cfun->local_decls;
acb8f212
JH
3468 for (; t_step; t_step = TREE_CHAIN (t_step))
3469 {
3470 var = TREE_VALUE (t_step);
3471 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
eb50f5f4 3472 {
65401a0b 3473 if (var_ann (var) && add_referenced_var (var))
eb50f5f4
JH
3474 cfun->local_decls = tree_cons (NULL_TREE, var,
3475 cfun->local_decls);
3476 }
526d73ab
JH
3477 else if (!can_be_nonlocal (var, id))
3478 cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3479 cfun->local_decls);
acb8f212
JH
3480 }
3481
eb50f5f4
JH
3482 /* This is it. Duplicate the callee body. Assume callee is
3483 pre-gimplified. Note that we must not alter the caller
3484 function in any way before this point, as this CALL_EXPR may be
3485 a self-referential call; if we're calling ourselves, we need to
3486 duplicate our body before altering anything. */
3487 copy_body (id, bb->count, bb->frequency, bb, return_block);
3488
6b8ed145
RG
3489 /* Reset the escaped and callused solutions. */
3490 if (cfun->gimple_df)
3491 {
3492 pt_solution_reset (&cfun->gimple_df->escaped);
3493 pt_solution_reset (&cfun->gimple_df->callused);
3494 }
3495
d4e4baa9 3496 /* Clean up. */
6be42dd4 3497 pointer_map_destroy (id->decl_map);
d4e4baa9
AO
3498 id->decl_map = st;
3499
5006671f
RG
3500 /* Unlink the calls virtual operands before replacing it. */
3501 unlink_stmt_vdef (stmt);
3502
84936f6f 3503 /* If the inlined function returns a result that we care about,
726a989a
RB
3504 substitute the GIMPLE_CALL with an assignment of the return
3505 variable to the LHS of the call. That is, if STMT was
3506 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3507 if (use_retvar && gimple_call_lhs (stmt))
e21aff8a 3508 {
726a989a
RB
3509 gimple old_stmt = stmt;
3510 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3511 gsi_replace (&stmt_gsi, stmt, false);
110cfe1c 3512 if (gimple_in_ssa_p (cfun))
5006671f 3513 mark_symbols_for_renaming (stmt);
726a989a 3514 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
e21aff8a 3515 }
6de9cd9a 3516 else
110cfe1c 3517 {
726a989a
RB
3518 /* Handle the case of inlining a function with no return
3519 statement, which causes the return value to become undefined. */
3520 if (gimple_call_lhs (stmt)
3521 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
110cfe1c 3522 {
726a989a
RB
3523 tree name = gimple_call_lhs (stmt);
3524 tree var = SSA_NAME_VAR (name);
110cfe1c
JH
3525 tree def = gimple_default_def (cfun, var);
3526
110cfe1c
JH
3527 if (def)
3528 {
726a989a
RB
3529 /* If the variable is used undefined, make this name
3530 undefined via a move. */
3531 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3532 gsi_replace (&stmt_gsi, stmt, true);
110cfe1c 3533 }
110cfe1c
JH
3534 else
3535 {
726a989a
RB
3536 /* Otherwise make this variable undefined. */
3537 gsi_remove (&stmt_gsi, true);
110cfe1c 3538 set_default_def (var, name);
726a989a 3539 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
110cfe1c
JH
3540 }
3541 }
3542 else
726a989a 3543 gsi_remove (&stmt_gsi, true);
110cfe1c 3544 }
d4e4baa9 3545
4f6c2131 3546 if (purge_dead_abnormal_edges)
726a989a 3547 gimple_purge_dead_abnormal_call_edges (return_block);
84936f6f 3548
e21aff8a
SB
3549 /* If the value of the new expression is ignored, that's OK. We
3550 don't warn about this for CALL_EXPRs, so we shouldn't warn about
3551 the equivalent inlined version either. */
726a989a
RB
3552 if (is_gimple_assign (stmt))
3553 {
3554 gcc_assert (gimple_assign_single_p (stmt)
1a87cf0c 3555 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
726a989a
RB
3556 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3557 }
84936f6f 3558
1eb3331e
DB
3559 /* Output the inlining info for this abstract function, since it has been
3560 inlined. If we don't do this now, we can lose the information about the
3561 variables in the function when the blocks get blown away as soon as we
3562 remove the cgraph node. */
e21aff8a 3563 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
84936f6f 3564
e72fcfe8 3565 /* Update callgraph if needed. */
e21aff8a 3566 cgraph_remove_node (cg_edge->callee);
e72fcfe8 3567
e21aff8a 3568 id->block = NULL_TREE;
e21aff8a 3569 successfully_inlined = TRUE;
742a37d5 3570
6de9cd9a
DN
3571 egress:
3572 input_location = saved_location;
e21aff8a 3573 return successfully_inlined;
d4e4baa9 3574}
6de9cd9a 3575
e21aff8a
SB
3576/* Expand call statements reachable from STMT_P.
3577 We can only have CALL_EXPRs as the "toplevel" tree code or nested
726a989a 3578 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
e21aff8a
SB
3579 unfortunately not use that function here because we need a pointer
3580 to the CALL_EXPR, not the tree itself. */
3581
3582static bool
1b369fae 3583gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
6de9cd9a 3584{
726a989a 3585 gimple_stmt_iterator gsi;
6de9cd9a 3586
726a989a 3587 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 3588 {
726a989a 3589 gimple stmt = gsi_stmt (gsi);
e21aff8a 3590
726a989a
RB
3591 if (is_gimple_call (stmt)
3592 && expand_call_inline (bb, stmt, id))
3593 return true;
6de9cd9a 3594 }
726a989a 3595
e21aff8a 3596 return false;
6de9cd9a
DN
3597}
3598
726a989a 3599
b8a00a4d
JH
3600/* Walk all basic blocks created after FIRST and try to fold every statement
3601 in the STATEMENTS pointer set. */
726a989a 3602
b8a00a4d
JH
3603static void
3604fold_marked_statements (int first, struct pointer_set_t *statements)
3605{
726a989a 3606 for (; first < n_basic_blocks; first++)
b8a00a4d
JH
3607 if (BASIC_BLOCK (first))
3608 {
726a989a
RB
3609 gimple_stmt_iterator gsi;
3610
3611 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
3612 !gsi_end_p (gsi);
3613 gsi_next (&gsi))
3614 if (pointer_set_contains (statements, gsi_stmt (gsi)))
9477eb38 3615 {
726a989a 3616 gimple old_stmt = gsi_stmt (gsi);
4b685e14 3617 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
2bafad93 3618
726a989a 3619 if (fold_stmt (&gsi))
9477eb38 3620 {
726a989a
RB
3621 /* Re-read the statement from GSI as fold_stmt() may
3622 have changed it. */
3623 gimple new_stmt = gsi_stmt (gsi);
3624 update_stmt (new_stmt);
3625
4b685e14
JH
3626 if (is_gimple_call (old_stmt)
3627 || is_gimple_call (new_stmt))
3628 cgraph_update_edges_for_call_stmt (old_stmt, old_decl, new_stmt);
726a989a
RB
3629
3630 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
3631 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
9477eb38
JH
3632 }
3633 }
b8a00a4d
JH
3634 }
3635}
3636
1084e689
JH
3637/* Return true if BB has at least one abnormal outgoing edge. */
3638
3639static inline bool
3640has_abnormal_outgoing_edge_p (basic_block bb)
3641{
3642 edge e;
3643 edge_iterator ei;
3644
3645 FOR_EACH_EDGE (e, ei, bb->succs)
3646 if (e->flags & EDGE_ABNORMAL)
3647 return true;
3648
3649 return false;
3650}
3651
d4e4baa9
AO
3652/* Expand calls to inline functions in the body of FN. */
3653
873aa8f5 3654unsigned int
46c5ad27 3655optimize_inline_calls (tree fn)
d4e4baa9 3656{
1b369fae 3657 copy_body_data id;
d4e4baa9 3658 tree prev_fn;
e21aff8a 3659 basic_block bb;
b8a00a4d 3660 int last = n_basic_blocks;
d406b663
JJ
3661 struct gimplify_ctx gctx;
3662
c5b6f18e
MM
3663 /* There is no point in performing inlining if errors have already
3664 occurred -- and we might crash if we try to inline invalid
3665 code. */
3666 if (errorcount || sorrycount)
873aa8f5 3667 return 0;
c5b6f18e 3668
d4e4baa9
AO
3669 /* Clear out ID. */
3670 memset (&id, 0, sizeof (id));
3671
1b369fae
RH
3672 id.src_node = id.dst_node = cgraph_node (fn);
3673 id.dst_fn = fn;
d4e4baa9
AO
3674 /* Or any functions that aren't finished yet. */
3675 prev_fn = NULL_TREE;
3676 if (current_function_decl)
3677 {
1b369fae 3678 id.dst_fn = current_function_decl;
d4e4baa9
AO
3679 prev_fn = current_function_decl;
3680 }
1b369fae
RH
3681
3682 id.copy_decl = copy_decl_maybe_to_var;
3683 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3684 id.transform_new_cfg = false;
3685 id.transform_return_to_modify = true;
9ff420f1 3686 id.transform_lang_insert_block = NULL;
b8a00a4d 3687 id.statements_to_fold = pointer_set_create ();
1b369fae 3688
d406b663 3689 push_gimplify_context (&gctx);
d4e4baa9 3690
672987e8
ZD
3691 /* We make no attempts to keep dominance info up-to-date. */
3692 free_dominance_info (CDI_DOMINATORS);
3693 free_dominance_info (CDI_POST_DOMINATORS);
3694
726a989a
RB
3695 /* Register specific gimple functions. */
3696 gimple_register_cfg_hooks ();
3697
e21aff8a
SB
3698 /* Reach the trees by walking over the CFG, and note the
3699 enclosing basic-blocks in the call edges. */
3700 /* We walk the blocks going forward, because inlined function bodies
3701 will split id->current_basic_block, and the new blocks will
3702 follow it; we'll trudge through them, processing their CALL_EXPRs
3703 along the way. */
3704 FOR_EACH_BB (bb)
3705 gimple_expand_calls_inline (bb, &id);
d4e4baa9 3706
e21aff8a 3707 pop_gimplify_context (NULL);
6de9cd9a 3708
18c6ada9
JH
3709#ifdef ENABLE_CHECKING
3710 {
3711 struct cgraph_edge *e;
3712
1b369fae 3713 verify_cgraph_node (id.dst_node);
18c6ada9
JH
3714
3715 /* Double check that we inlined everything we are supposed to inline. */
1b369fae 3716 for (e = id.dst_node->callees; e; e = e->next_callee)
1e128c5f 3717 gcc_assert (e->inline_failed);
18c6ada9
JH
3718 }
3719#endif
a9eafe81
AP
3720
3721 /* Fold the statements before compacting/renumbering the basic blocks. */
3722 fold_marked_statements (last, id.statements_to_fold);
3723 pointer_set_destroy (id.statements_to_fold);
3724
3725 /* Renumber the (code) basic_blocks consecutively. */
3726 compact_blocks ();
3727 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3728 number_blocks (fn);
b8a00a4d 3729
873aa8f5 3730 fold_cond_expr_cond ();
078c3644
JH
3731 delete_unreachable_blocks_update_callgraph (&id);
3732#ifdef ENABLE_CHECKING
3733 verify_cgraph_node (id.dst_node);
3734#endif
726a989a 3735
110cfe1c
JH
3736 /* It would be nice to check SSA/CFG/statement consistency here, but it is
3737 not possible yet - the IPA passes might make various functions to not
3738 throw and they don't care to proactively update local EH info. This is
3739 done later in fixup_cfg pass that also execute the verification. */
726a989a
RB
3740 return (TODO_update_ssa
3741 | TODO_cleanup_cfg
45a80bb9
JH
3742 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
3743 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
d4e4baa9
AO
3744}
3745
d4e4baa9
AO
3746/* Passed to walk_tree. Copies the node pointed to, if appropriate. */
3747
3748tree
46c5ad27 3749copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
d4e4baa9
AO
3750{
3751 enum tree_code code = TREE_CODE (*tp);
07beea0d 3752 enum tree_code_class cl = TREE_CODE_CLASS (code);
d4e4baa9
AO
3753
3754 /* We make copies of most nodes. */
07beea0d 3755 if (IS_EXPR_CODE_CLASS (cl)
d4e4baa9
AO
3756 || code == TREE_LIST
3757 || code == TREE_VEC
8843c120
DN
3758 || code == TYPE_DECL
3759 || code == OMP_CLAUSE)
d4e4baa9
AO
3760 {
3761 /* Because the chain gets clobbered when we make a copy, we save it
3762 here. */
82d6e6fc 3763 tree chain = NULL_TREE, new_tree;
07beea0d 3764
726a989a 3765 chain = TREE_CHAIN (*tp);
d4e4baa9
AO
3766
3767 /* Copy the node. */
82d6e6fc 3768 new_tree = copy_node (*tp);
6de9cd9a
DN
3769
3770 /* Propagate mudflap marked-ness. */
3771 if (flag_mudflap && mf_marked_p (*tp))
82d6e6fc 3772 mf_mark (new_tree);
6de9cd9a 3773
82d6e6fc 3774 *tp = new_tree;
d4e4baa9
AO
3775
3776 /* Now, restore the chain, if appropriate. That will cause
3777 walk_tree to walk into the chain as well. */
50674e96
DN
3778 if (code == PARM_DECL
3779 || code == TREE_LIST
aaf46ef9 3780 || code == OMP_CLAUSE)
d4e4baa9
AO
3781 TREE_CHAIN (*tp) = chain;
3782
3783 /* For now, we don't update BLOCKs when we make copies. So, we
6de9cd9a
DN
3784 have to nullify all BIND_EXPRs. */
3785 if (TREE_CODE (*tp) == BIND_EXPR)
3786 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
d4e4baa9 3787 }
4038c495
GB
3788 else if (code == CONSTRUCTOR)
3789 {
3790 /* CONSTRUCTOR nodes need special handling because
3791 we need to duplicate the vector of elements. */
82d6e6fc 3792 tree new_tree;
4038c495 3793
82d6e6fc 3794 new_tree = copy_node (*tp);
4038c495
GB
3795
3796 /* Propagate mudflap marked-ness. */
3797 if (flag_mudflap && mf_marked_p (*tp))
82d6e6fc 3798 mf_mark (new_tree);
9f63daea 3799
82d6e6fc 3800 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4038c495 3801 CONSTRUCTOR_ELTS (*tp));
82d6e6fc 3802 *tp = new_tree;
4038c495 3803 }
6615c446 3804 else if (TREE_CODE_CLASS (code) == tcc_type)
d4e4baa9 3805 *walk_subtrees = 0;
6615c446 3806 else if (TREE_CODE_CLASS (code) == tcc_declaration)
6de9cd9a 3807 *walk_subtrees = 0;
a396f8ae
GK
3808 else if (TREE_CODE_CLASS (code) == tcc_constant)
3809 *walk_subtrees = 0;
1e128c5f
GB
3810 else
3811 gcc_assert (code != STATEMENT_LIST);
d4e4baa9
AO
3812 return NULL_TREE;
3813}
3814
3815/* The SAVE_EXPR pointed to by TP is being copied. If ST contains
aa4a53af 3816 information indicating to what new SAVE_EXPR this one should be mapped,
e21aff8a
SB
3817 use that one. Otherwise, create a new node and enter it in ST. FN is
3818 the function into which the copy will be placed. */
d4e4baa9 3819
892c7e1e 3820static void
82c82743 3821remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
d4e4baa9 3822{
6be42dd4
RG
3823 struct pointer_map_t *st = (struct pointer_map_t *) st_;
3824 tree *n;
5e20bdd7 3825 tree t;
d4e4baa9
AO
3826
3827 /* See if we already encountered this SAVE_EXPR. */
6be42dd4 3828 n = (tree *) pointer_map_contains (st, *tp);
d92b4486 3829
d4e4baa9
AO
3830 /* If we didn't already remap this SAVE_EXPR, do so now. */
3831 if (!n)
3832 {
5e20bdd7 3833 t = copy_node (*tp);
d4e4baa9 3834
d4e4baa9 3835 /* Remember this SAVE_EXPR. */
6be42dd4 3836 *pointer_map_insert (st, *tp) = t;
350ebd54 3837 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
6be42dd4 3838 *pointer_map_insert (st, t) = t;
d4e4baa9
AO
3839 }
3840 else
5e20bdd7
JZ
3841 {
3842 /* We've already walked into this SAVE_EXPR; don't do it again. */
3843 *walk_subtrees = 0;
6be42dd4 3844 t = *n;
5e20bdd7 3845 }
d4e4baa9
AO
3846
3847 /* Replace this SAVE_EXPR with the copy. */
5e20bdd7 3848 *tp = t;
d4e4baa9 3849}
d436bff8 3850
aa4a53af
RK
3851/* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
3852 copies the declaration and enters it in the splay_tree in DATA (which is
1b369fae 3853 really an `copy_body_data *'). */
6de9cd9a
DN
3854
3855static tree
3856mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
3857 void *data)
3858{
1b369fae 3859 copy_body_data *id = (copy_body_data *) data;
6de9cd9a
DN
3860
3861 /* Don't walk into types. */
350fae66
RK
3862 if (TYPE_P (*tp))
3863 *walk_subtrees = 0;
6de9cd9a 3864
350fae66 3865 else if (TREE_CODE (*tp) == LABEL_EXPR)
6de9cd9a 3866 {
350fae66 3867 tree decl = TREE_OPERAND (*tp, 0);
6de9cd9a 3868
350fae66 3869 /* Copy the decl and remember the copy. */
1b369fae 3870 insert_decl_map (id, decl, id->copy_decl (decl, id));
6de9cd9a
DN
3871 }
3872
3873 return NULL_TREE;
3874}
3875
19114537
EC
3876/* Perform any modifications to EXPR required when it is unsaved. Does
3877 not recurse into EXPR's subtrees. */
3878
3879static void
3880unsave_expr_1 (tree expr)
3881{
3882 switch (TREE_CODE (expr))
3883 {
3884 case TARGET_EXPR:
3885 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
3886 It's OK for this to happen if it was part of a subtree that
3887 isn't immediately expanded, such as operand 2 of another
3888 TARGET_EXPR. */
3889 if (TREE_OPERAND (expr, 1))
3890 break;
3891
3892 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
3893 TREE_OPERAND (expr, 3) = NULL_TREE;
3894 break;
3895
3896 default:
3897 break;
3898 }
3899}
3900
6de9cd9a
DN
3901/* Called via walk_tree when an expression is unsaved. Using the
3902 splay_tree pointed to by ST (which is really a `splay_tree'),
3903 remaps all local declarations to appropriate replacements. */
d436bff8
AH
3904
3905static tree
6de9cd9a 3906unsave_r (tree *tp, int *walk_subtrees, void *data)
d436bff8 3907{
1b369fae 3908 copy_body_data *id = (copy_body_data *) data;
6be42dd4
RG
3909 struct pointer_map_t *st = id->decl_map;
3910 tree *n;
6de9cd9a
DN
3911
3912 /* Only a local declaration (variable or label). */
3913 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
3914 || TREE_CODE (*tp) == LABEL_DECL)
3915 {
3916 /* Lookup the declaration. */
6be42dd4 3917 n = (tree *) pointer_map_contains (st, *tp);
9f63daea 3918
6de9cd9a
DN
3919 /* If it's there, remap it. */
3920 if (n)
6be42dd4 3921 *tp = *n;
6de9cd9a 3922 }
aa4a53af 3923
6de9cd9a 3924 else if (TREE_CODE (*tp) == STATEMENT_LIST)
726a989a 3925 gcc_unreachable ();
6de9cd9a
DN
3926 else if (TREE_CODE (*tp) == BIND_EXPR)
3927 copy_bind_expr (tp, walk_subtrees, id);
a406865a
RG
3928 else if (TREE_CODE (*tp) == SAVE_EXPR
3929 || TREE_CODE (*tp) == TARGET_EXPR)
82c82743 3930 remap_save_expr (tp, st, walk_subtrees);
d436bff8 3931 else
6de9cd9a
DN
3932 {
3933 copy_tree_r (tp, walk_subtrees, NULL);
3934
3935 /* Do whatever unsaving is required. */
3936 unsave_expr_1 (*tp);
3937 }
3938
3939 /* Keep iterating. */
3940 return NULL_TREE;
d436bff8
AH
3941}
3942
19114537
EC
3943/* Copies everything in EXPR and replaces variables, labels
3944 and SAVE_EXPRs local to EXPR. */
6de9cd9a
DN
3945
3946tree
19114537 3947unsave_expr_now (tree expr)
6de9cd9a 3948{
1b369fae 3949 copy_body_data id;
6de9cd9a
DN
3950
3951 /* There's nothing to do for NULL_TREE. */
3952 if (expr == 0)
3953 return expr;
3954
3955 /* Set up ID. */
3956 memset (&id, 0, sizeof (id));
1b369fae
RH
3957 id.src_fn = current_function_decl;
3958 id.dst_fn = current_function_decl;
6be42dd4 3959 id.decl_map = pointer_map_create ();
6de9cd9a 3960
1b369fae
RH
3961 id.copy_decl = copy_decl_no_change;
3962 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3963 id.transform_new_cfg = false;
3964 id.transform_return_to_modify = false;
9ff420f1 3965 id.transform_lang_insert_block = NULL;
1b369fae 3966
6de9cd9a
DN
3967 /* Walk the tree once to find local labels. */
3968 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
3969
3970 /* Walk the tree again, copying, remapping, and unsaving. */
3971 walk_tree (&expr, unsave_r, &id, NULL);
3972
3973 /* Clean up. */
6be42dd4 3974 pointer_map_destroy (id.decl_map);
6de9cd9a
DN
3975
3976 return expr;
3977}
3978
726a989a
RB
3979/* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
3980 label, copies the declaration and enters it in the splay_tree in DATA (which
3981 is really a 'copy_body_data *'. */
3982
3983static tree
3984mark_local_labels_stmt (gimple_stmt_iterator *gsip,
3985 bool *handled_ops_p ATTRIBUTE_UNUSED,
3986 struct walk_stmt_info *wi)
3987{
3988 copy_body_data *id = (copy_body_data *) wi->info;
3989 gimple stmt = gsi_stmt (*gsip);
3990
3991 if (gimple_code (stmt) == GIMPLE_LABEL)
3992 {
3993 tree decl = gimple_label_label (stmt);
3994
3995 /* Copy the decl and remember the copy. */
3996 insert_decl_map (id, decl, id->copy_decl (decl, id));
3997 }
3998
3999 return NULL_TREE;
4000}
4001
4002
4003/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4004 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4005 remaps all local declarations to appropriate replacements in gimple
4006 operands. */
4007
4008static tree
4009replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4010{
4011 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4012 copy_body_data *id = (copy_body_data *) wi->info;
4013 struct pointer_map_t *st = id->decl_map;
4014 tree *n;
4015 tree expr = *tp;
4016
4017 /* Only a local declaration (variable or label). */
4018 if ((TREE_CODE (expr) == VAR_DECL
4019 && !TREE_STATIC (expr))
4020 || TREE_CODE (expr) == LABEL_DECL)
4021 {
4022 /* Lookup the declaration. */
4023 n = (tree *) pointer_map_contains (st, expr);
4024
4025 /* If it's there, remap it. */
4026 if (n)
4027 *tp = *n;
4028 *walk_subtrees = 0;
4029 }
4030 else if (TREE_CODE (expr) == STATEMENT_LIST
4031 || TREE_CODE (expr) == BIND_EXPR
4032 || TREE_CODE (expr) == SAVE_EXPR)
4033 gcc_unreachable ();
4034 else if (TREE_CODE (expr) == TARGET_EXPR)
4035 {
4036 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4037 It's OK for this to happen if it was part of a subtree that
4038 isn't immediately expanded, such as operand 2 of another
4039 TARGET_EXPR. */
4040 if (!TREE_OPERAND (expr, 1))
4041 {
4042 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4043 TREE_OPERAND (expr, 3) = NULL_TREE;
4044 }
4045 }
4046
4047 /* Keep iterating. */
4048 return NULL_TREE;
4049}
4050
4051
4052/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4053 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4054 remaps all local declarations to appropriate replacements in gimple
4055 statements. */
4056
4057static tree
4058replace_locals_stmt (gimple_stmt_iterator *gsip,
4059 bool *handled_ops_p ATTRIBUTE_UNUSED,
4060 struct walk_stmt_info *wi)
4061{
4062 copy_body_data *id = (copy_body_data *) wi->info;
4063 gimple stmt = gsi_stmt (*gsip);
4064
4065 if (gimple_code (stmt) == GIMPLE_BIND)
4066 {
4067 tree block = gimple_bind_block (stmt);
4068
4069 if (block)
4070 {
4071 remap_block (&block, id);
4072 gimple_bind_set_block (stmt, block);
4073 }
4074
4075 /* This will remap a lot of the same decls again, but this should be
4076 harmless. */
4077 if (gimple_bind_vars (stmt))
526d73ab 4078 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
726a989a
RB
4079 }
4080
4081 /* Keep iterating. */
4082 return NULL_TREE;
4083}
4084
4085
4086/* Copies everything in SEQ and replaces variables and labels local to
4087 current_function_decl. */
4088
4089gimple_seq
4090copy_gimple_seq_and_replace_locals (gimple_seq seq)
4091{
4092 copy_body_data id;
4093 struct walk_stmt_info wi;
4094 struct pointer_set_t *visited;
4095 gimple_seq copy;
4096
4097 /* There's nothing to do for NULL_TREE. */
4098 if (seq == NULL)
4099 return seq;
4100
4101 /* Set up ID. */
4102 memset (&id, 0, sizeof (id));
4103 id.src_fn = current_function_decl;
4104 id.dst_fn = current_function_decl;
4105 id.decl_map = pointer_map_create ();
4106
4107 id.copy_decl = copy_decl_no_change;
4108 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4109 id.transform_new_cfg = false;
4110 id.transform_return_to_modify = false;
4111 id.transform_lang_insert_block = NULL;
4112
4113 /* Walk the tree once to find local labels. */
4114 memset (&wi, 0, sizeof (wi));
4115 visited = pointer_set_create ();
4116 wi.info = &id;
4117 wi.pset = visited;
4118 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4119 pointer_set_destroy (visited);
4120
4121 copy = gimple_seq_copy (seq);
4122
4123 /* Walk the copy, remapping decls. */
4124 memset (&wi, 0, sizeof (wi));
4125 wi.info = &id;
4126 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4127
4128 /* Clean up. */
4129 pointer_map_destroy (id.decl_map);
4130
4131 return copy;
4132}
4133
4134
6de9cd9a 4135/* Allow someone to determine if SEARCH is a child of TOP from gdb. */
aa4a53af 4136
6de9cd9a
DN
4137static tree
4138debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4139{
4140 if (*tp == data)
4141 return (tree) data;
4142 else
4143 return NULL;
4144}
4145
6de9cd9a
DN
4146bool
4147debug_find_tree (tree top, tree search)
4148{
4149 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4150}
4151
e21aff8a 4152
6de9cd9a
DN
4153/* Declare the variables created by the inliner. Add all the variables in
4154 VARS to BIND_EXPR. */
4155
4156static void
e21aff8a 4157declare_inline_vars (tree block, tree vars)
6de9cd9a 4158{
84936f6f
RH
4159 tree t;
4160 for (t = vars; t; t = TREE_CHAIN (t))
9659ce8b
JH
4161 {
4162 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4163 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
cb91fab0 4164 cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
9659ce8b 4165 }
6de9cd9a 4166
e21aff8a
SB
4167 if (block)
4168 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4169}
4170
19734dd8 4171/* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
1b369fae
RH
4172 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4173 VAR_DECL translation. */
19734dd8 4174
1b369fae
RH
4175static tree
4176copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
19734dd8 4177{
19734dd8
RL
4178 /* Don't generate debug information for the copy if we wouldn't have
4179 generated it for the copy either. */
4180 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4181 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4182
4183 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4184 declaration inspired this copy. */
4185 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4186
4187 /* The new variable/label has no RTL, yet. */
68a976f2
RL
4188 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4189 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
19734dd8
RL
4190 SET_DECL_RTL (copy, NULL_RTX);
4191
4192 /* These args would always appear unused, if not for this. */
4193 TREE_USED (copy) = 1;
4194
4195 /* Set the context for the new declaration. */
4196 if (!DECL_CONTEXT (decl))
4197 /* Globals stay global. */
4198 ;
1b369fae 4199 else if (DECL_CONTEXT (decl) != id->src_fn)
19734dd8
RL
4200 /* Things that weren't in the scope of the function we're inlining
4201 from aren't in the scope we're inlining to, either. */
4202 ;
4203 else if (TREE_STATIC (decl))
4204 /* Function-scoped static variables should stay in the original
4205 function. */
4206 ;
4207 else
4208 /* Ordinary automatic local variables are now in the scope of the
4209 new function. */
1b369fae 4210 DECL_CONTEXT (copy) = id->dst_fn;
19734dd8
RL
4211
4212 return copy;
4213}
4214
1b369fae
RH
4215static tree
4216copy_decl_to_var (tree decl, copy_body_data *id)
4217{
4218 tree copy, type;
4219
4220 gcc_assert (TREE_CODE (decl) == PARM_DECL
4221 || TREE_CODE (decl) == RESULT_DECL);
4222
4223 type = TREE_TYPE (decl);
4224
c2255bc4
AH
4225 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4226 VAR_DECL, DECL_NAME (decl), type);
1b369fae
RH
4227 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4228 TREE_READONLY (copy) = TREE_READONLY (decl);
4229 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
0890b981 4230 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
1b369fae
RH
4231
4232 return copy_decl_for_dup_finish (id, decl, copy);
4233}
4234
c08cd4c1
JM
4235/* Like copy_decl_to_var, but create a return slot object instead of a
4236 pointer variable for return by invisible reference. */
4237
4238static tree
4239copy_result_decl_to_var (tree decl, copy_body_data *id)
4240{
4241 tree copy, type;
4242
4243 gcc_assert (TREE_CODE (decl) == PARM_DECL
4244 || TREE_CODE (decl) == RESULT_DECL);
4245
4246 type = TREE_TYPE (decl);
4247 if (DECL_BY_REFERENCE (decl))
4248 type = TREE_TYPE (type);
4249
c2255bc4
AH
4250 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4251 VAR_DECL, DECL_NAME (decl), type);
c08cd4c1
JM
4252 TREE_READONLY (copy) = TREE_READONLY (decl);
4253 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4254 if (!DECL_BY_REFERENCE (decl))
4255 {
4256 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
0890b981 4257 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
c08cd4c1
JM
4258 }
4259
4260 return copy_decl_for_dup_finish (id, decl, copy);
4261}
4262
9ff420f1 4263tree
1b369fae
RH
4264copy_decl_no_change (tree decl, copy_body_data *id)
4265{
4266 tree copy;
4267
4268 copy = copy_node (decl);
4269
4270 /* The COPY is not abstract; it will be generated in DST_FN. */
4271 DECL_ABSTRACT (copy) = 0;
4272 lang_hooks.dup_lang_specific_decl (copy);
4273
4274 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4275 been taken; it's for internal bookkeeping in expand_goto_internal. */
4276 if (TREE_CODE (copy) == LABEL_DECL)
4277 {
4278 TREE_ADDRESSABLE (copy) = 0;
4279 LABEL_DECL_UID (copy) = -1;
4280 }
4281
4282 return copy_decl_for_dup_finish (id, decl, copy);
4283}
4284
4285static tree
4286copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4287{
4288 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4289 return copy_decl_to_var (decl, id);
4290 else
4291 return copy_decl_no_change (decl, id);
4292}
4293
19734dd8
RL
4294/* Return a copy of the function's argument tree. */
4295static tree
c6f7cfc1
JH
4296copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4297 bitmap args_to_skip, tree *vars)
19734dd8 4298{
c6f7cfc1
JH
4299 tree arg, *parg;
4300 tree new_parm = NULL;
4301 int i = 0;
19734dd8 4302
c6f7cfc1
JH
4303 parg = &new_parm;
4304
4305 for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4306 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4307 {
4308 tree new_tree = remap_decl (arg, id);
4309 lang_hooks.dup_lang_specific_decl (new_tree);
4310 *parg = new_tree;
4311 parg = &TREE_CHAIN (new_tree);
4312 }
eb50f5f4 4313 else if (!pointer_map_contains (id->decl_map, arg))
c6f7cfc1
JH
4314 {
4315 /* Make an equivalent VAR_DECL. If the argument was used
4316 as temporary variable later in function, the uses will be
4317 replaced by local variable. */
4318 tree var = copy_decl_to_var (arg, id);
4319 get_var_ann (var);
4320 add_referenced_var (var);
4321 insert_decl_map (id, arg, var);
4322 /* Declare this new variable. */
4323 TREE_CHAIN (var) = *vars;
4324 *vars = var;
4325 }
4326 return new_parm;
19734dd8
RL
4327}
4328
4329/* Return a copy of the function's static chain. */
4330static tree
1b369fae 4331copy_static_chain (tree static_chain, copy_body_data * id)
19734dd8
RL
4332{
4333 tree *chain_copy, *pvar;
4334
4335 chain_copy = &static_chain;
4336 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4337 {
82d6e6fc
KG
4338 tree new_tree = remap_decl (*pvar, id);
4339 lang_hooks.dup_lang_specific_decl (new_tree);
4340 TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4341 *pvar = new_tree;
19734dd8
RL
4342 }
4343 return static_chain;
4344}
4345
4346/* Return true if the function is allowed to be versioned.
4347 This is a guard for the versioning functionality. */
27dbd3ac 4348
19734dd8
RL
4349bool
4350tree_versionable_function_p (tree fndecl)
4351{
86631ea3
MJ
4352 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4353 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
19734dd8
RL
4354}
4355
9187e02d
JH
4356/* Delete all unreachable basic blocks and update callgraph.
4357 Doing so is somewhat nontrivial because we need to update all clones and
4358 remove inline function that become unreachable. */
9f5e9983 4359
9187e02d
JH
4360static bool
4361delete_unreachable_blocks_update_callgraph (copy_body_data *id)
9f5e9983 4362{
9187e02d
JH
4363 bool changed = false;
4364 basic_block b, next_bb;
4365
4366 find_unreachable_blocks ();
4367
4368 /* Delete all unreachable basic blocks. */
4369
4370 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4371 {
4372 next_bb = b->next_bb;
4373
4374 if (!(b->flags & BB_REACHABLE))
4375 {
4376 gimple_stmt_iterator bsi;
4377
4378 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4379 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4380 {
4381 struct cgraph_edge *e;
4382 struct cgraph_node *node;
4383
4384 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4385 {
4386 if (!e->inline_failed)
4387 cgraph_remove_node_and_inline_clones (e->callee);
4388 else
4389 cgraph_remove_edge (e);
4390 }
4391 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4392 && id->dst_node->clones)
4393 for (node = id->dst_node->clones; node != id->dst_node;)
4394 {
4395 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4396 {
4397 if (!e->inline_failed)
4398 cgraph_remove_node_and_inline_clones (e->callee);
4399 else
4400 cgraph_remove_edge (e);
4401 }
4402
4403 if (node->clones)
4404 node = node->clones;
4405 else if (node->next_sibling_clone)
4406 node = node->next_sibling_clone;
4407 else
4408 {
4409 while (node != id->dst_node && !node->next_sibling_clone)
4410 node = node->clone_of;
4411 if (node != id->dst_node)
4412 node = node->next_sibling_clone;
4413 }
4414 }
4415 }
4416 delete_basic_block (b);
4417 changed = true;
4418 }
4419 }
4420
4421 if (changed)
4422 tidy_fallthru_edges ();
4423#ifdef ENABLE_CHECKING0
4424 verify_cgraph_node (id->dst_node);
4425 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4426 && id->dst_node->clones)
4427 {
4428 struct cgraph_node *node;
4429 for (node = id->dst_node->clones; node != id->dst_node;)
4430 {
4431 verify_cgraph_node (node);
4432
4433 if (node->clones)
4434 node = node->clones;
4435 else if (node->next_sibling_clone)
4436 node = node->next_sibling_clone;
4437 else
4438 {
4439 while (node != id->dst_node && !node->next_sibling_clone)
4440 node = node->clone_of;
4441 if (node != id->dst_node)
4442 node = node->next_sibling_clone;
4443 }
4444 }
4445 }
9f5e9983 4446#endif
9187e02d 4447 return changed;
9f5e9983
JJ
4448}
4449
08ad1d6d
JH
4450/* Update clone info after duplication. */
4451
4452static void
4453update_clone_info (copy_body_data * id)
4454{
4455 struct cgraph_node *node;
4456 if (!id->dst_node->clones)
4457 return;
4458 for (node = id->dst_node->clones; node != id->dst_node;)
4459 {
4460 /* First update replace maps to match the new body. */
4461 if (node->clone.tree_map)
4462 {
4463 unsigned int i;
4464 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4465 {
4466 struct ipa_replace_map *replace_info;
4467 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4468 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4469 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4470 }
4471 }
4472 if (node->clones)
4473 node = node->clones;
4474 else if (node->next_sibling_clone)
4475 node = node->next_sibling_clone;
4476 else
4477 {
4478 while (node != id->dst_node && !node->next_sibling_clone)
4479 node = node->clone_of;
4480 if (node != id->dst_node)
4481 node = node->next_sibling_clone;
4482 }
4483 }
4484}
4485
19734dd8
RL
4486/* Create a copy of a function's tree.
4487 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4488 of the original function and the new copied function
4489 respectively. In case we want to replace a DECL
4490 tree with another tree while duplicating the function's
4491 body, TREE_MAP represents the mapping between these
ea99e0be
JH
4492 trees. If UPDATE_CLONES is set, the call_stmt fields
4493 of edges of clones of the function will be updated. */
19734dd8 4494void
27dbd3ac
RH
4495tree_function_versioning (tree old_decl, tree new_decl,
4496 VEC(ipa_replace_map_p,gc)* tree_map,
c6f7cfc1 4497 bool update_clones, bitmap args_to_skip)
19734dd8
RL
4498{
4499 struct cgraph_node *old_version_node;
4500 struct cgraph_node *new_version_node;
1b369fae 4501 copy_body_data id;
110cfe1c 4502 tree p;
19734dd8
RL
4503 unsigned i;
4504 struct ipa_replace_map *replace_info;
4505 basic_block old_entry_block;
0f1961a2
JH
4506 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4507
19734dd8 4508 tree t_step;
873aa8f5 4509 tree old_current_function_decl = current_function_decl;
0f1961a2 4510 tree vars = NULL_TREE;
19734dd8
RL
4511
4512 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4513 && TREE_CODE (new_decl) == FUNCTION_DECL);
4514 DECL_POSSIBLY_INLINED (old_decl) = 1;
4515
4516 old_version_node = cgraph_node (old_decl);
4517 new_version_node = cgraph_node (new_decl);
4518
a3aadcc5
JH
4519 /* Output the inlining info for this abstract function, since it has been
4520 inlined. If we don't do this now, we can lose the information about the
4521 variables in the function when the blocks get blown away as soon as we
4522 remove the cgraph node. */
4523 (*debug_hooks->outlining_inline_function) (old_decl);
4524
19734dd8
RL
4525 DECL_ARTIFICIAL (new_decl) = 1;
4526 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
4527
3d283195
JH
4528 /* Prepare the data structures for the tree copy. */
4529 memset (&id, 0, sizeof (id));
4530
19734dd8 4531 /* Generate a new name for the new version. */
9187e02d 4532 id.statements_to_fold = pointer_set_create ();
19734dd8 4533
6be42dd4 4534 id.decl_map = pointer_map_create ();
1b369fae
RH
4535 id.src_fn = old_decl;
4536 id.dst_fn = new_decl;
4537 id.src_node = old_version_node;
4538 id.dst_node = new_version_node;
4539 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
19734dd8 4540
1b369fae
RH
4541 id.copy_decl = copy_decl_no_change;
4542 id.transform_call_graph_edges
4543 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
4544 id.transform_new_cfg = true;
4545 id.transform_return_to_modify = false;
9ff420f1 4546 id.transform_lang_insert_block = NULL;
1b369fae 4547
19734dd8 4548 current_function_decl = new_decl;
110cfe1c
JH
4549 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
4550 (DECL_STRUCT_FUNCTION (old_decl));
4551 initialize_cfun (new_decl, old_decl,
4552 old_entry_block->count,
4553 old_entry_block->frequency);
4554 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
19734dd8
RL
4555
4556 /* Copy the function's static chain. */
4557 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
4558 if (p)
4559 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
4560 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
4561 &id);
0f1961a2 4562
19734dd8
RL
4563 /* If there's a tree_map, prepare for substitution. */
4564 if (tree_map)
9187e02d 4565 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
19734dd8 4566 {
0f1961a2 4567 gimple init;
9187e02d 4568 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
1b369fae 4569 if (replace_info->replace_p)
00fc2333 4570 {
657c0925
JH
4571 tree op = replace_info->new_tree;
4572
4573 STRIP_NOPS (op);
4574
4575 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
4576 op = TREE_OPERAND (op, 0);
4577
4578 if (TREE_CODE (op) == ADDR_EXPR)
00fc2333 4579 {
657c0925 4580 op = TREE_OPERAND (op, 0);
00fc2333
JH
4581 while (handled_component_p (op))
4582 op = TREE_OPERAND (op, 0);
4583 if (TREE_CODE (op) == VAR_DECL)
4584 add_referenced_var (op);
4585 }
0f1961a2
JH
4586 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
4587 init = setup_one_parameter (&id, replace_info->old_tree,
4588 replace_info->new_tree, id.src_fn,
4589 NULL,
4590 &vars);
4591 if (init)
4592 VEC_safe_push (gimple, heap, init_stmts, init);
00fc2333 4593 }
19734dd8 4594 }
eb50f5f4
JH
4595 /* Copy the function's arguments. */
4596 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
4597 DECL_ARGUMENTS (new_decl) =
4598 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
4599 args_to_skip, &vars);
4600
4601 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
4602
4603 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4604 number_blocks (id.dst_fn);
19734dd8 4605
0f1961a2 4606 declare_inline_vars (DECL_INITIAL (new_decl), vars);
9187e02d 4607
cb91fab0 4608 if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
19734dd8 4609 /* Add local vars. */
cb91fab0 4610 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
19734dd8
RL
4611 t_step; t_step = TREE_CHAIN (t_step))
4612 {
4613 tree var = TREE_VALUE (t_step);
4614 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
cb91fab0 4615 cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
526d73ab 4616 else if (!can_be_nonlocal (var, &id))
cb91fab0 4617 cfun->local_decls =
19734dd8 4618 tree_cons (NULL_TREE, remap_decl (var, &id),
cb91fab0 4619 cfun->local_decls);
19734dd8
RL
4620 }
4621
4622 /* Copy the Function's body. */
27dbd3ac
RH
4623 copy_body (&id, old_entry_block->count, old_entry_block->frequency,
4624 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
19734dd8 4625
19734dd8
RL
4626 if (DECL_RESULT (old_decl) != NULL_TREE)
4627 {
4628 tree *res_decl = &DECL_RESULT (old_decl);
4629 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
4630 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
4631 }
4632
19734dd8
RL
4633 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4634 number_blocks (new_decl);
4635
0f1961a2
JH
4636 if (VEC_length (gimple, init_stmts))
4637 {
4638 basic_block bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
4639 while (VEC_length (gimple, init_stmts))
4640 insert_init_stmt (bb, VEC_pop (gimple, init_stmts));
4641 }
08ad1d6d 4642 update_clone_info (&id);
0f1961a2 4643
27dbd3ac
RH
4644 /* Remap the nonlocal_goto_save_area, if any. */
4645 if (cfun->nonlocal_goto_save_area)
4646 {
4647 struct walk_stmt_info wi;
4648
4649 memset (&wi, 0, sizeof (wi));
4650 wi.info = &id;
4651 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
4652 }
4653
19734dd8 4654 /* Clean up. */
6be42dd4 4655 pointer_map_destroy (id.decl_map);
5006671f
RG
4656 free_dominance_info (CDI_DOMINATORS);
4657 free_dominance_info (CDI_POST_DOMINATORS);
9187e02d
JH
4658
4659 fold_marked_statements (0, id.statements_to_fold);
4660 pointer_set_destroy (id.statements_to_fold);
4661 fold_cond_expr_cond ();
4662 delete_unreachable_blocks_update_callgraph (&id);
4663 update_ssa (TODO_update_ssa);
4664 free_dominance_info (CDI_DOMINATORS);
4665 free_dominance_info (CDI_POST_DOMINATORS);
4666
0f1961a2 4667 VEC_free (gimple, heap, init_stmts);
110cfe1c 4668 pop_cfun ();
873aa8f5
JH
4669 current_function_decl = old_current_function_decl;
4670 gcc_assert (!current_function_decl
4671 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
19734dd8
RL
4672 return;
4673}
4674
f82a627c
EB
4675/* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
4676 the callee and return the inlined body on success. */
4677
4678tree
4679maybe_inline_call_in_expr (tree exp)
4680{
4681 tree fn = get_callee_fndecl (exp);
4682
4683 /* We can only try to inline "const" functions. */
4684 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
4685 {
4686 struct pointer_map_t *decl_map = pointer_map_create ();
4687 call_expr_arg_iterator iter;
4688 copy_body_data id;
4689 tree param, arg, t;
4690
4691 /* Remap the parameters. */
4692 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
4693 param;
4694 param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
4695 *pointer_map_insert (decl_map, param) = arg;
4696
4697 memset (&id, 0, sizeof (id));
4698 id.src_fn = fn;
4699 id.dst_fn = current_function_decl;
4700 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
4701 id.decl_map = decl_map;
4702
4703 id.copy_decl = copy_decl_no_change;
4704 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4705 id.transform_new_cfg = false;
4706 id.transform_return_to_modify = true;
4707 id.transform_lang_insert_block = false;
4708
4709 /* Make sure not to unshare trees behind the front-end's back
4710 since front-end specific mechanisms may rely on sharing. */
4711 id.regimplify = false;
4712 id.do_not_unshare = true;
4713
4714 /* We're not inside any EH region. */
4715 id.eh_region = -1;
4716
4717 t = copy_tree_body (&id);
4718 pointer_map_destroy (decl_map);
4719
4720 /* We can only return something suitable for use in a GENERIC
4721 expression tree. */
4722 if (TREE_CODE (t) == MODIFY_EXPR)
4723 return TREE_OPERAND (t, 1);
4724 }
4725
4726 return NULL_TREE;
4727}
4728
52dd234b
RH
4729/* Duplicate a type, fields and all. */
4730
4731tree
4732build_duplicate_type (tree type)
4733{
1b369fae 4734 struct copy_body_data id;
52dd234b
RH
4735
4736 memset (&id, 0, sizeof (id));
1b369fae
RH
4737 id.src_fn = current_function_decl;
4738 id.dst_fn = current_function_decl;
4739 id.src_cfun = cfun;
6be42dd4 4740 id.decl_map = pointer_map_create ();
4009f2e7 4741 id.copy_decl = copy_decl_no_change;
52dd234b
RH
4742
4743 type = remap_type_1 (type, &id);
4744
6be42dd4 4745 pointer_map_destroy (id.decl_map);
52dd234b 4746
f31c9f09
DG
4747 TYPE_CANONICAL (type) = type;
4748
52dd234b
RH
4749 return type;
4750}
ab442df7
MM
4751
4752/* Return whether it is safe to inline a function because it used different
4753 target specific options or different optimization options. */
4754bool
4755tree_can_inline_p (tree caller, tree callee)
4756{
5779e713
MM
4757#if 0
4758 /* This causes a regression in SPEC in that it prevents a cold function from
4759 inlining a hot function. Perhaps this should only apply to functions
4760 that the user declares hot/cold/optimize explicitly. */
4761
ab442df7
MM
4762 /* Don't inline a function with a higher optimization level than the
4763 caller, or with different space constraints (hot/cold functions). */
4764 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
4765 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
4766
4767 if (caller_tree != callee_tree)
4768 {
4769 struct cl_optimization *caller_opt
4770 = TREE_OPTIMIZATION ((caller_tree)
4771 ? caller_tree
4772 : optimization_default_node);
4773
4774 struct cl_optimization *callee_opt
4775 = TREE_OPTIMIZATION ((callee_tree)
4776 ? callee_tree
4777 : optimization_default_node);
4778
4779 if ((caller_opt->optimize > callee_opt->optimize)
4780 || (caller_opt->optimize_size != callee_opt->optimize_size))
4781 return false;
4782 }
5779e713 4783#endif
ab442df7
MM
4784
4785 /* Allow the backend to decide if inlining is ok. */
4786 return targetm.target_option.can_inline_p (caller, callee);
4787}