]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-inline.c
tree-into-ssa.c (regs_to_rename, [...]): Remove.
[thirdparty/gcc.git] / gcc / tree-inline.c
CommitLineData
ac534736 1/* Tree inlining.
65401a0b 2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
ebb07520 3 Free Software Foundation, Inc.
588d3ade
AO
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
5
54a7b573 6This file is part of GCC.
588d3ade 7
54a7b573 8GCC is free software; you can redistribute it and/or modify
588d3ade 9it under the terms of the GNU General Public License as published by
9dcd6f09 10the Free Software Foundation; either version 3, or (at your option)
588d3ade
AO
11any later version.
12
54a7b573 13GCC is distributed in the hope that it will be useful,
588d3ade
AO
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
588d3ade
AO
21
22#include "config.h"
23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
69dcadff 26#include "toplev.h"
588d3ade
AO
27#include "tree.h"
28#include "tree-inline.h"
d4e4baa9
AO
29#include "rtl.h"
30#include "expr.h"
31#include "flags.h"
32#include "params.h"
33#include "input.h"
34#include "insn-config.h"
d4e4baa9
AO
35#include "varray.h"
36#include "hashtab.h"
d23c55c2 37#include "langhooks.h"
e21aff8a
SB
38#include "basic-block.h"
39#include "tree-iterator.h"
1c4a429a 40#include "cgraph.h"
ddd2d57e 41#include "intl.h"
6de9cd9a 42#include "tree-mudflap.h"
089efaa4 43#include "tree-flow.h"
18c6ada9 44#include "function.h"
e21aff8a
SB
45#include "ggc.h"
46#include "tree-flow.h"
6de9cd9a 47#include "diagnostic.h"
e21aff8a 48#include "except.h"
1eb3331e 49#include "debug.h"
e21aff8a 50#include "pointer-set.h"
19734dd8 51#include "ipa-prop.h"
6946b3f7 52#include "value-prof.h"
110cfe1c 53#include "tree-pass.h"
18177c7e
RG
54#include "target.h"
55#include "integrate.h"
d4e4baa9 56
6de9cd9a
DN
57/* I'm not real happy about this, but we need to handle gimple and
58 non-gimple trees. */
726a989a 59#include "gimple.h"
588d3ade 60
1b369fae 61/* Inlining, Cloning, Versioning, Parallelization
e21aff8a
SB
62
63 Inlining: a function body is duplicated, but the PARM_DECLs are
64 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
726a989a 65 MODIFY_EXPRs that store to a dedicated returned-value variable.
e21aff8a
SB
66 The duplicated eh_region info of the copy will later be appended
67 to the info for the caller; the eh_region info in copied throwing
68 statements and RESX_EXPRs is adjusted accordingly.
69
e21aff8a
SB
70 Cloning: (only in C++) We have one body for a con/de/structor, and
71 multiple function decls, each with a unique parameter list.
72 Duplicate the body, using the given splay tree; some parameters
73 will become constants (like 0 or 1).
74
1b369fae
RH
75 Versioning: a function body is duplicated and the result is a new
76 function rather than into blocks of an existing function as with
77 inlining. Some parameters will become constants.
78
79 Parallelization: a region of a function is duplicated resulting in
80 a new function. Variables may be replaced with complex expressions
81 to enable shared variable semantics.
82
e21aff8a
SB
83 All of these will simultaneously lookup any callgraph edges. If
84 we're going to inline the duplicated function body, and the given
85 function has some cloned callgraph nodes (one for each place this
86 function will be inlined) those callgraph edges will be duplicated.
1b369fae 87 If we're cloning the body, those callgraph edges will be
e21aff8a
SB
88 updated to point into the new body. (Note that the original
89 callgraph node and edge list will not be altered.)
90
726a989a 91 See the CALL_EXPR handling case in copy_tree_body_r (). */
e21aff8a 92
d4e4baa9
AO
93/* To Do:
94
95 o In order to make inlining-on-trees work, we pessimized
96 function-local static constants. In particular, they are now
97 always output, even when not addressed. Fix this by treating
98 function-local static constants just like global static
99 constants; the back-end already knows not to output them if they
100 are not needed.
101
102 o Provide heuristics to clamp inlining of recursive template
103 calls? */
104
7f9bc51b
ZD
105
106/* Weights that estimate_num_insns uses for heuristics in inlining. */
107
108eni_weights eni_inlining_weights;
109
110/* Weights that estimate_num_insns uses to estimate the size of the
111 produced code. */
112
113eni_weights eni_size_weights;
114
115/* Weights that estimate_num_insns uses to estimate the time necessary
116 to execute the produced code. */
117
118eni_weights eni_time_weights;
119
d4e4baa9
AO
120/* Prototypes. */
121
1b369fae 122static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
b3c3af2f 123static bool inlinable_function_p (tree);
1b369fae 124static void remap_block (tree *, copy_body_data *);
1b369fae 125static void copy_bind_expr (tree *, int *, copy_body_data *);
6de9cd9a 126static tree mark_local_for_remap_r (tree *, int *, void *);
19114537 127static void unsave_expr_1 (tree);
6de9cd9a 128static tree unsave_r (tree *, int *, void *);
e21aff8a 129static void declare_inline_vars (tree, tree);
892c7e1e 130static void remap_save_expr (tree *, void *, int *);
4a283090 131static void prepend_lexical_block (tree current_block, tree new_block);
1b369fae 132static tree copy_decl_to_var (tree, copy_body_data *);
c08cd4c1 133static tree copy_result_decl_to_var (tree, copy_body_data *);
1b369fae 134static tree copy_decl_maybe_to_var (tree, copy_body_data *);
726a989a 135static gimple remap_gimple_stmt (gimple, copy_body_data *);
e21aff8a 136
5e20bdd7
JZ
137/* Insert a tree->tree mapping for ID. Despite the name suggests
138 that the trees should be variables, it is used for more than that. */
139
1b369fae
RH
140void
141insert_decl_map (copy_body_data *id, tree key, tree value)
5e20bdd7 142{
6be42dd4 143 *pointer_map_insert (id->decl_map, key) = value;
5e20bdd7
JZ
144
145 /* Always insert an identity map as well. If we see this same new
146 node again, we won't want to duplicate it a second time. */
147 if (key != value)
6be42dd4 148 *pointer_map_insert (id->decl_map, value) = value;
5e20bdd7
JZ
149}
150
110cfe1c
JH
151/* Construct new SSA name for old NAME. ID is the inline context. */
152
153static tree
154remap_ssa_name (tree name, copy_body_data *id)
155{
82d6e6fc 156 tree new_tree;
6be42dd4 157 tree *n;
110cfe1c
JH
158
159 gcc_assert (TREE_CODE (name) == SSA_NAME);
160
6be42dd4 161 n = (tree *) pointer_map_contains (id->decl_map, name);
110cfe1c 162 if (n)
129a37fc 163 return unshare_expr (*n);
110cfe1c
JH
164
165 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
166 in copy_bb. */
82d6e6fc 167 new_tree = remap_decl (SSA_NAME_VAR (name), id);
726a989a 168
110cfe1c
JH
169 /* We might've substituted constant or another SSA_NAME for
170 the variable.
171
172 Replace the SSA name representing RESULT_DECL by variable during
173 inlining: this saves us from need to introduce PHI node in a case
174 return value is just partly initialized. */
82d6e6fc 175 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
110cfe1c
JH
176 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
177 || !id->transform_return_to_modify))
178 {
82d6e6fc
KG
179 new_tree = make_ssa_name (new_tree, NULL);
180 insert_decl_map (id, name, new_tree);
181 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
110cfe1c 182 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
82d6e6fc 183 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
726a989a 184 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
045685a9
JH
185 {
186 /* By inlining function having uninitialized variable, we might
187 extend the lifetime (variable might get reused). This cause
188 ICE in the case we end up extending lifetime of SSA name across
fa10beec 189 abnormal edge, but also increase register pressure.
045685a9 190
726a989a
RB
191 We simply initialize all uninitialized vars by 0 except
192 for case we are inlining to very first BB. We can avoid
193 this for all BBs that are not inside strongly connected
194 regions of the CFG, but this is expensive to test. */
195 if (id->entry_bb
196 && is_gimple_reg (SSA_NAME_VAR (name))
045685a9 197 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
0723b99a 198 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
045685a9
JH
199 || EDGE_COUNT (id->entry_bb->preds) != 1))
200 {
726a989a
RB
201 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
202 gimple init_stmt;
203
82d6e6fc
KG
204 init_stmt = gimple_build_assign (new_tree,
205 fold_convert (TREE_TYPE (new_tree),
045685a9 206 integer_zero_node));
726a989a 207 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
82d6e6fc 208 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
045685a9
JH
209 }
210 else
211 {
82d6e6fc 212 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
726a989a
RB
213 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
214 == name)
82d6e6fc 215 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
045685a9
JH
216 }
217 }
110cfe1c
JH
218 }
219 else
82d6e6fc
KG
220 insert_decl_map (id, name, new_tree);
221 return new_tree;
110cfe1c
JH
222}
223
e21aff8a 224/* Remap DECL during the copying of the BLOCK tree for the function. */
d4e4baa9 225
1b369fae
RH
226tree
227remap_decl (tree decl, copy_body_data *id)
d4e4baa9 228{
6be42dd4 229 tree *n;
e21aff8a
SB
230 tree fn;
231
232 /* We only remap local variables in the current function. */
1b369fae 233 fn = id->src_fn;
3c2a7a6a 234
e21aff8a
SB
235 /* See if we have remapped this declaration. */
236
6be42dd4 237 n = (tree *) pointer_map_contains (id->decl_map, decl);
e21aff8a
SB
238
239 /* If we didn't already have an equivalent for this declaration,
240 create one now. */
d4e4baa9
AO
241 if (!n)
242 {
d4e4baa9 243 /* Make a copy of the variable or label. */
1b369fae 244 tree t = id->copy_decl (decl, id);
19734dd8 245
596b98ce
AO
246 /* Remember it, so that if we encounter this local entity again
247 we can reuse this copy. Do this early because remap_type may
248 need this decl for TYPE_STUB_DECL. */
249 insert_decl_map (id, decl, t);
250
1b369fae
RH
251 if (!DECL_P (t))
252 return t;
253
3c2a7a6a
RH
254 /* Remap types, if necessary. */
255 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
256 if (TREE_CODE (t) == TYPE_DECL)
257 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
3c2a7a6a
RH
258
259 /* Remap sizes as necessary. */
726a989a
RB
260 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
261 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
d4e4baa9 262
8c27b7d4 263 /* If fields, do likewise for offset and qualifier. */
5377d5ba
RK
264 if (TREE_CODE (t) == FIELD_DECL)
265 {
726a989a 266 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
5377d5ba 267 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
726a989a 268 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
5377d5ba
RK
269 }
270
110cfe1c
JH
271 if (cfun && gimple_in_ssa_p (cfun)
272 && (TREE_CODE (t) == VAR_DECL
273 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
274 {
275 tree def = gimple_default_def (id->src_cfun, decl);
276 get_var_ann (t);
277 if (TREE_CODE (decl) != PARM_DECL && def)
278 {
279 tree map = remap_ssa_name (def, id);
280 /* Watch out RESULT_DECLs whose SSA names map directly
281 to them. */
045685a9 282 if (TREE_CODE (map) == SSA_NAME
726a989a 283 && gimple_nop_p (SSA_NAME_DEF_STMT (map)))
110cfe1c
JH
284 set_default_def (t, map);
285 }
286 add_referenced_var (t);
287 }
5e20bdd7 288 return t;
d4e4baa9
AO
289 }
290
6be42dd4 291 return unshare_expr (*n);
d4e4baa9
AO
292}
293
3c2a7a6a 294static tree
1b369fae 295remap_type_1 (tree type, copy_body_data *id)
3c2a7a6a 296{
82d6e6fc 297 tree new_tree, t;
3c2a7a6a 298
ed397c43
RK
299 /* We do need a copy. build and register it now. If this is a pointer or
300 reference type, remap the designated type and make a new pointer or
301 reference type. */
302 if (TREE_CODE (type) == POINTER_TYPE)
303 {
82d6e6fc 304 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
ed397c43
RK
305 TYPE_MODE (type),
306 TYPE_REF_CAN_ALIAS_ALL (type));
82d6e6fc
KG
307 insert_decl_map (id, type, new_tree);
308 return new_tree;
ed397c43
RK
309 }
310 else if (TREE_CODE (type) == REFERENCE_TYPE)
311 {
82d6e6fc 312 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
ed397c43
RK
313 TYPE_MODE (type),
314 TYPE_REF_CAN_ALIAS_ALL (type));
82d6e6fc
KG
315 insert_decl_map (id, type, new_tree);
316 return new_tree;
ed397c43
RK
317 }
318 else
82d6e6fc 319 new_tree = copy_node (type);
ed397c43 320
82d6e6fc 321 insert_decl_map (id, type, new_tree);
3c2a7a6a
RH
322
323 /* This is a new type, not a copy of an old type. Need to reassociate
324 variants. We can handle everything except the main variant lazily. */
325 t = TYPE_MAIN_VARIANT (type);
326 if (type != t)
327 {
328 t = remap_type (t, id);
82d6e6fc
KG
329 TYPE_MAIN_VARIANT (new_tree) = t;
330 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
331 TYPE_NEXT_VARIANT (t) = new_tree;
3c2a7a6a
RH
332 }
333 else
334 {
82d6e6fc
KG
335 TYPE_MAIN_VARIANT (new_tree) = new_tree;
336 TYPE_NEXT_VARIANT (new_tree) = NULL;
3c2a7a6a
RH
337 }
338
596b98ce 339 if (TYPE_STUB_DECL (type))
82d6e6fc 340 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
596b98ce 341
3c2a7a6a 342 /* Lazily create pointer and reference types. */
82d6e6fc
KG
343 TYPE_POINTER_TO (new_tree) = NULL;
344 TYPE_REFERENCE_TO (new_tree) = NULL;
3c2a7a6a 345
82d6e6fc 346 switch (TREE_CODE (new_tree))
3c2a7a6a
RH
347 {
348 case INTEGER_TYPE:
349 case REAL_TYPE:
325217ed 350 case FIXED_POINT_TYPE:
3c2a7a6a
RH
351 case ENUMERAL_TYPE:
352 case BOOLEAN_TYPE:
82d6e6fc 353 t = TYPE_MIN_VALUE (new_tree);
3c2a7a6a 354 if (t && TREE_CODE (t) != INTEGER_CST)
82d6e6fc 355 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
1c9766da 356
82d6e6fc 357 t = TYPE_MAX_VALUE (new_tree);
3c2a7a6a 358 if (t && TREE_CODE (t) != INTEGER_CST)
82d6e6fc
KG
359 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
360 return new_tree;
9f63daea 361
3c2a7a6a 362 case FUNCTION_TYPE:
82d6e6fc
KG
363 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
364 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
365 return new_tree;
3c2a7a6a
RH
366
367 case ARRAY_TYPE:
82d6e6fc
KG
368 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
369 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
3c2a7a6a
RH
370 break;
371
372 case RECORD_TYPE:
373 case UNION_TYPE:
374 case QUAL_UNION_TYPE:
52dd234b
RH
375 {
376 tree f, nf = NULL;
377
82d6e6fc 378 for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
52dd234b
RH
379 {
380 t = remap_decl (f, id);
82d6e6fc 381 DECL_CONTEXT (t) = new_tree;
52dd234b
RH
382 TREE_CHAIN (t) = nf;
383 nf = t;
384 }
82d6e6fc 385 TYPE_FIELDS (new_tree) = nreverse (nf);
52dd234b 386 }
3c2a7a6a
RH
387 break;
388
3c2a7a6a
RH
389 case OFFSET_TYPE:
390 default:
391 /* Shouldn't have been thought variable sized. */
1e128c5f 392 gcc_unreachable ();
3c2a7a6a
RH
393 }
394
82d6e6fc
KG
395 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
396 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
3c2a7a6a 397
82d6e6fc 398 return new_tree;
3c2a7a6a
RH
399}
400
1b369fae
RH
401tree
402remap_type (tree type, copy_body_data *id)
52dd234b 403{
6be42dd4 404 tree *node;
4f5c64b8 405 tree tmp;
52dd234b
RH
406
407 if (type == NULL)
408 return type;
409
410 /* See if we have remapped this type. */
6be42dd4 411 node = (tree *) pointer_map_contains (id->decl_map, type);
52dd234b 412 if (node)
6be42dd4 413 return *node;
52dd234b
RH
414
415 /* The type only needs remapping if it's variably modified. */
1b369fae 416 if (! variably_modified_type_p (type, id->src_fn))
52dd234b
RH
417 {
418 insert_decl_map (id, type, type);
419 return type;
420 }
421
4f5c64b8
RG
422 id->remapping_type_depth++;
423 tmp = remap_type_1 (type, id);
424 id->remapping_type_depth--;
425
426 return tmp;
52dd234b
RH
427}
428
13e4e36e
L
429/* Return previously remapped type of TYPE in ID. Return NULL if TYPE
430 is NULL or TYPE has not been remapped before. */
431
432static tree
433remapped_type (tree type, copy_body_data *id)
434{
435 tree *node;
436
437 if (type == NULL)
438 return type;
439
440 /* See if we have remapped this type. */
441 node = (tree *) pointer_map_contains (id->decl_map, type);
442 if (node)
443 return *node;
444 else
445 return NULL;
446}
447
448 /* The type only needs remapping if it's variably modified. */
526d73ab
JH
449/* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
450
451static bool
452can_be_nonlocal (tree decl, copy_body_data *id)
453{
454 /* We can not duplicate function decls. */
455 if (TREE_CODE (decl) == FUNCTION_DECL)
456 return true;
457
458 /* Local static vars must be non-local or we get multiple declaration
459 problems. */
460 if (TREE_CODE (decl) == VAR_DECL
461 && !auto_var_in_fn_p (decl, id->src_fn))
462 return true;
463
464 /* At the moment dwarf2out can handle only these types of nodes. We
465 can support more later. */
466 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
467 return false;
468
13e4e36e
L
469 /* We must use global type. We call remapped_type instead of
470 remap_type since we don't want to remap this type here if it
471 hasn't been remapped before. */
472 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
526d73ab
JH
473 return false;
474
475 /* Wihtout SSA we can't tell if variable is used. */
476 if (!gimple_in_ssa_p (cfun))
477 return false;
478
479 /* Live variables must be copied so we can attach DECL_RTL. */
480 if (var_ann (decl))
481 return false;
482
483 return true;
484}
485
6de9cd9a 486static tree
526d73ab 487remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
d4e4baa9 488{
6de9cd9a
DN
489 tree old_var;
490 tree new_decls = NULL_TREE;
d4e4baa9 491
6de9cd9a
DN
492 /* Remap its variables. */
493 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
d4e4baa9 494 {
6de9cd9a 495 tree new_var;
526d73ab 496 tree origin_var = DECL_ORIGIN (old_var);
6de9cd9a 497
526d73ab 498 if (can_be_nonlocal (old_var, id))
30be951a 499 {
526d73ab
JH
500 if (TREE_CODE (old_var) == VAR_DECL
501 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
502 cfun->local_decls = tree_cons (NULL_TREE, old_var,
503 cfun->local_decls);
504 if (debug_info_level > DINFO_LEVEL_TERSE
505 && !DECL_IGNORED_P (old_var)
506 && nonlocalized_list)
507 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
30be951a
JH
508 continue;
509 }
510
6de9cd9a
DN
511 /* Remap the variable. */
512 new_var = remap_decl (old_var, id);
513
726a989a 514 /* If we didn't remap this variable, we can't mess with its
6de9cd9a
DN
515 TREE_CHAIN. If we remapped this variable to the return slot, it's
516 already declared somewhere else, so don't declare it here. */
526d73ab
JH
517
518 if (new_var == id->retvar)
6de9cd9a 519 ;
526d73ab
JH
520 else if (!new_var)
521 {
522 if (debug_info_level > DINFO_LEVEL_TERSE
523 && !DECL_IGNORED_P (old_var)
524 && nonlocalized_list)
525 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
526 }
d4e4baa9
AO
527 else
528 {
1e128c5f 529 gcc_assert (DECL_P (new_var));
6de9cd9a
DN
530 TREE_CHAIN (new_var) = new_decls;
531 new_decls = new_var;
d4e4baa9 532 }
d4e4baa9 533 }
d4e4baa9 534
6de9cd9a
DN
535 return nreverse (new_decls);
536}
537
538/* Copy the BLOCK to contain remapped versions of the variables
539 therein. And hook the new block into the block-tree. */
540
541static void
1b369fae 542remap_block (tree *block, copy_body_data *id)
6de9cd9a 543{
d436bff8
AH
544 tree old_block;
545 tree new_block;
d436bff8
AH
546 tree fn;
547
548 /* Make the new block. */
549 old_block = *block;
550 new_block = make_node (BLOCK);
551 TREE_USED (new_block) = TREE_USED (old_block);
552 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
3e2844cb 553 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
526d73ab
JH
554 BLOCK_NONLOCALIZED_VARS (new_block)
555 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
d436bff8
AH
556 *block = new_block;
557
558 /* Remap its variables. */
526d73ab
JH
559 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
560 &BLOCK_NONLOCALIZED_VARS (new_block),
561 id);
d436bff8 562
1b369fae
RH
563 fn = id->dst_fn;
564
565 if (id->transform_lang_insert_block)
9ff420f1 566 id->transform_lang_insert_block (new_block);
1b369fae 567
d436bff8 568 /* Remember the remapped block. */
6de9cd9a 569 insert_decl_map (id, old_block, new_block);
d4e4baa9
AO
570}
571
acb8f212
JH
572/* Copy the whole block tree and root it in id->block. */
573static tree
1b369fae 574remap_blocks (tree block, copy_body_data *id)
acb8f212
JH
575{
576 tree t;
82d6e6fc 577 tree new_tree = block;
acb8f212
JH
578
579 if (!block)
580 return NULL;
581
82d6e6fc
KG
582 remap_block (&new_tree, id);
583 gcc_assert (new_tree != block);
acb8f212 584 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
4a283090
JH
585 prepend_lexical_block (new_tree, remap_blocks (t, id));
586 /* Blocks are in arbitrary order, but make things slightly prettier and do
587 not swap order when producing a copy. */
588 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
82d6e6fc 589 return new_tree;
acb8f212
JH
590}
591
d4e4baa9 592static void
6de9cd9a 593copy_statement_list (tree *tp)
d4e4baa9 594{
6de9cd9a 595 tree_stmt_iterator oi, ni;
82d6e6fc 596 tree new_tree;
6de9cd9a 597
82d6e6fc
KG
598 new_tree = alloc_stmt_list ();
599 ni = tsi_start (new_tree);
6de9cd9a 600 oi = tsi_start (*tp);
82d6e6fc 601 *tp = new_tree;
6de9cd9a
DN
602
603 for (; !tsi_end_p (oi); tsi_next (&oi))
604 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
605}
d4e4baa9 606
6de9cd9a 607static void
1b369fae 608copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
6de9cd9a
DN
609{
610 tree block = BIND_EXPR_BLOCK (*tp);
d4e4baa9
AO
611 /* Copy (and replace) the statement. */
612 copy_tree_r (tp, walk_subtrees, NULL);
6de9cd9a
DN
613 if (block)
614 {
615 remap_block (&block, id);
616 BIND_EXPR_BLOCK (*tp) = block;
617 }
d4e4baa9 618
6de9cd9a
DN
619 if (BIND_EXPR_VARS (*tp))
620 /* This will remap a lot of the same decls again, but this should be
621 harmless. */
526d73ab 622 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
d4e4baa9
AO
623}
624
726a989a
RB
625
626/* Create a new gimple_seq by remapping all the statements in BODY
627 using the inlining information in ID. */
628
629gimple_seq
630remap_gimple_seq (gimple_seq body, copy_body_data *id)
631{
632 gimple_stmt_iterator si;
633 gimple_seq new_body = NULL;
634
635 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
636 {
637 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
638 gimple_seq_add_stmt (&new_body, new_stmt);
639 }
640
641 return new_body;
642}
643
644
645/* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
646 block using the mapping information in ID. */
647
648static gimple
649copy_gimple_bind (gimple stmt, copy_body_data *id)
650{
651 gimple new_bind;
652 tree new_block, new_vars;
653 gimple_seq body, new_body;
654
655 /* Copy the statement. Note that we purposely don't use copy_stmt
656 here because we need to remap statements as we copy. */
657 body = gimple_bind_body (stmt);
658 new_body = remap_gimple_seq (body, id);
659
660 new_block = gimple_bind_block (stmt);
661 if (new_block)
662 remap_block (&new_block, id);
663
664 /* This will remap a lot of the same decls again, but this should be
665 harmless. */
666 new_vars = gimple_bind_vars (stmt);
667 if (new_vars)
526d73ab 668 new_vars = remap_decls (new_vars, NULL, id);
726a989a
RB
669
670 new_bind = gimple_build_bind (new_vars, new_body, new_block);
671
672 return new_bind;
673}
674
675
676/* Remap the GIMPLE operand pointed to by *TP. DATA is really a
677 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
678 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
679 recursing into the children nodes of *TP. */
680
681static tree
682remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
683{
684 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
685 copy_body_data *id = (copy_body_data *) wi_p->info;
686 tree fn = id->src_fn;
687
688 if (TREE_CODE (*tp) == SSA_NAME)
689 {
690 *tp = remap_ssa_name (*tp, id);
691 *walk_subtrees = 0;
692 return NULL;
693 }
694 else if (auto_var_in_fn_p (*tp, fn))
695 {
696 /* Local variables and labels need to be replaced by equivalent
697 variables. We don't want to copy static variables; there's
698 only one of those, no matter how many times we inline the
699 containing function. Similarly for globals from an outer
700 function. */
701 tree new_decl;
702
703 /* Remap the declaration. */
704 new_decl = remap_decl (*tp, id);
705 gcc_assert (new_decl);
706 /* Replace this variable with the copy. */
707 STRIP_TYPE_NOPS (new_decl);
708 *tp = new_decl;
709 *walk_subtrees = 0;
710 }
711 else if (TREE_CODE (*tp) == STATEMENT_LIST)
712 gcc_unreachable ();
713 else if (TREE_CODE (*tp) == SAVE_EXPR)
714 gcc_unreachable ();
715 else if (TREE_CODE (*tp) == LABEL_DECL
716 && (!DECL_CONTEXT (*tp)
717 || decl_function_context (*tp) == id->src_fn))
718 /* These may need to be remapped for EH handling. */
719 *tp = remap_decl (*tp, id);
720 else if (TYPE_P (*tp))
721 /* Types may need remapping as well. */
722 *tp = remap_type (*tp, id);
723 else if (CONSTANT_CLASS_P (*tp))
724 {
725 /* If this is a constant, we have to copy the node iff the type
726 will be remapped. copy_tree_r will not copy a constant. */
727 tree new_type = remap_type (TREE_TYPE (*tp), id);
728
729 if (new_type == TREE_TYPE (*tp))
730 *walk_subtrees = 0;
731
732 else if (TREE_CODE (*tp) == INTEGER_CST)
733 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
734 TREE_INT_CST_HIGH (*tp));
735 else
736 {
737 *tp = copy_node (*tp);
738 TREE_TYPE (*tp) = new_type;
739 }
740 }
741 else
742 {
743 /* Otherwise, just copy the node. Note that copy_tree_r already
744 knows not to copy VAR_DECLs, etc., so this is safe. */
745 if (TREE_CODE (*tp) == INDIRECT_REF)
746 {
747 /* Get rid of *& from inline substitutions that can happen when a
748 pointer argument is an ADDR_EXPR. */
749 tree decl = TREE_OPERAND (*tp, 0);
750 tree *n;
751
752 n = (tree *) pointer_map_contains (id->decl_map, decl);
753 if (n)
754 {
82d6e6fc 755 tree type, new_tree, old;
726a989a
RB
756
757 /* If we happen to get an ADDR_EXPR in n->value, strip
758 it manually here as we'll eventually get ADDR_EXPRs
759 which lie about their types pointed to. In this case
760 build_fold_indirect_ref wouldn't strip the
761 INDIRECT_REF, but we absolutely rely on that. As
762 fold_indirect_ref does other useful transformations,
763 try that first, though. */
764 type = TREE_TYPE (TREE_TYPE (*n));
82d6e6fc 765 new_tree = unshare_expr (*n);
726a989a 766 old = *tp;
82d6e6fc 767 *tp = gimple_fold_indirect_ref (new_tree);
726a989a
RB
768 if (!*tp)
769 {
82d6e6fc 770 if (TREE_CODE (new_tree) == ADDR_EXPR)
726a989a 771 {
82d6e6fc 772 *tp = fold_indirect_ref_1 (type, new_tree);
726a989a
RB
773 /* ??? We should either assert here or build
774 a VIEW_CONVERT_EXPR instead of blindly leaking
775 incompatible types to our IL. */
776 if (! *tp)
82d6e6fc 777 *tp = TREE_OPERAND (new_tree, 0);
726a989a
RB
778 }
779 else
780 {
82d6e6fc 781 *tp = build1 (INDIRECT_REF, type, new_tree);
726a989a 782 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
ce1b6498 783 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
726a989a
RB
784 }
785 }
786 *walk_subtrees = 0;
787 return NULL;
788 }
789 }
790
791 /* Here is the "usual case". Copy this tree node, and then
792 tweak some special cases. */
793 copy_tree_r (tp, walk_subtrees, NULL);
794
795 /* Global variables we haven't seen yet need to go into referenced
796 vars. If not referenced from types only. */
797 if (gimple_in_ssa_p (cfun)
798 && TREE_CODE (*tp) == VAR_DECL
799 && id->remapping_type_depth == 0)
800 add_referenced_var (*tp);
801
802 /* We should never have TREE_BLOCK set on non-statements. */
803 if (EXPR_P (*tp))
804 gcc_assert (!TREE_BLOCK (*tp));
805
806 if (TREE_CODE (*tp) != OMP_CLAUSE)
807 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
808
809 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
810 {
811 /* The copied TARGET_EXPR has never been expanded, even if the
812 original node was expanded already. */
813 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
814 TREE_OPERAND (*tp, 3) = NULL_TREE;
815 }
816 else if (TREE_CODE (*tp) == ADDR_EXPR)
817 {
818 /* Variable substitution need not be simple. In particular,
819 the INDIRECT_REF substitution above. Make sure that
820 TREE_CONSTANT and friends are up-to-date. But make sure
821 to not improperly set TREE_BLOCK on some sub-expressions. */
822 int invariant = is_gimple_min_invariant (*tp);
823 tree block = id->block;
824 id->block = NULL_TREE;
825 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
826 id->block = block;
827
828 /* Handle the case where we substituted an INDIRECT_REF
829 into the operand of the ADDR_EXPR. */
830 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
831 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
832 else
833 recompute_tree_invariant_for_addr_expr (*tp);
834
835 /* If this used to be invariant, but is not any longer,
836 then regimplification is probably needed. */
837 if (invariant && !is_gimple_min_invariant (*tp))
838 id->regimplify = true;
839
840 *walk_subtrees = 0;
841 }
842 }
843
844 /* Keep iterating. */
845 return NULL_TREE;
846}
847
848
849/* Called from copy_body_id via walk_tree. DATA is really a
1b369fae 850 `copy_body_data *'. */
aa4a53af 851
1b369fae 852tree
726a989a 853copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
d4e4baa9 854{
1b369fae
RH
855 copy_body_data *id = (copy_body_data *) data;
856 tree fn = id->src_fn;
acb8f212 857 tree new_block;
d4e4baa9 858
e21aff8a
SB
859 /* Begin by recognizing trees that we'll completely rewrite for the
860 inlining context. Our output for these trees is completely
861 different from out input (e.g. RETURN_EXPR is deleted, and morphs
862 into an edge). Further down, we'll handle trees that get
863 duplicated and/or tweaked. */
d4e4baa9 864
1b369fae 865 /* When requested, RETURN_EXPRs should be transformed to just the
726a989a 866 contained MODIFY_EXPR. The branch semantics of the return will
1b369fae
RH
867 be handled elsewhere by manipulating the CFG rather than a statement. */
868 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
d4e4baa9 869 {
e21aff8a 870 tree assignment = TREE_OPERAND (*tp, 0);
d4e4baa9
AO
871
872 /* If we're returning something, just turn that into an
e21aff8a
SB
873 assignment into the equivalent of the original RESULT_DECL.
874 If the "assignment" is just the result decl, the result
875 decl has already been set (e.g. a recent "foo (&result_decl,
876 ...)"); just toss the entire RETURN_EXPR. */
726a989a 877 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
e21aff8a
SB
878 {
879 /* Replace the RETURN_EXPR with (a copy of) the
726a989a 880 MODIFY_EXPR hanging underneath. */
e21aff8a
SB
881 *tp = copy_node (assignment);
882 }
883 else /* Else the RETURN_EXPR returns no value. */
884 {
885 *tp = NULL;
cceb1885 886 return (tree) (void *)1;
e21aff8a 887 }
d4e4baa9 888 }
110cfe1c
JH
889 else if (TREE_CODE (*tp) == SSA_NAME)
890 {
891 *tp = remap_ssa_name (*tp, id);
892 *walk_subtrees = 0;
893 return NULL;
894 }
e21aff8a 895
d4e4baa9
AO
896 /* Local variables and labels need to be replaced by equivalent
897 variables. We don't want to copy static variables; there's only
898 one of those, no matter how many times we inline the containing
5377d5ba 899 function. Similarly for globals from an outer function. */
50886bf1 900 else if (auto_var_in_fn_p (*tp, fn))
d4e4baa9
AO
901 {
902 tree new_decl;
903
904 /* Remap the declaration. */
905 new_decl = remap_decl (*tp, id);
1e128c5f 906 gcc_assert (new_decl);
d4e4baa9
AO
907 /* Replace this variable with the copy. */
908 STRIP_TYPE_NOPS (new_decl);
909 *tp = new_decl;
e4cf29ae 910 *walk_subtrees = 0;
d4e4baa9 911 }
6de9cd9a
DN
912 else if (TREE_CODE (*tp) == STATEMENT_LIST)
913 copy_statement_list (tp);
d4e4baa9 914 else if (TREE_CODE (*tp) == SAVE_EXPR)
82c82743 915 remap_save_expr (tp, id->decl_map, walk_subtrees);
17acc01a
JH
916 else if (TREE_CODE (*tp) == LABEL_DECL
917 && (! DECL_CONTEXT (*tp)
1b369fae 918 || decl_function_context (*tp) == id->src_fn))
e21aff8a 919 /* These may need to be remapped for EH handling. */
17acc01a 920 *tp = remap_decl (*tp, id);
6de9cd9a
DN
921 else if (TREE_CODE (*tp) == BIND_EXPR)
922 copy_bind_expr (tp, walk_subtrees, id);
3c2a7a6a
RH
923 /* Types may need remapping as well. */
924 else if (TYPE_P (*tp))
925 *tp = remap_type (*tp, id);
926
bb04998a
RK
927 /* If this is a constant, we have to copy the node iff the type will be
928 remapped. copy_tree_r will not copy a constant. */
3cf11075 929 else if (CONSTANT_CLASS_P (*tp))
bb04998a
RK
930 {
931 tree new_type = remap_type (TREE_TYPE (*tp), id);
932
933 if (new_type == TREE_TYPE (*tp))
934 *walk_subtrees = 0;
935
936 else if (TREE_CODE (*tp) == INTEGER_CST)
937 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
938 TREE_INT_CST_HIGH (*tp));
939 else
940 {
941 *tp = copy_node (*tp);
942 TREE_TYPE (*tp) = new_type;
943 }
944 }
945
d4e4baa9
AO
946 /* Otherwise, just copy the node. Note that copy_tree_r already
947 knows not to copy VAR_DECLs, etc., so this is safe. */
948 else
949 {
e21aff8a
SB
950 /* Here we handle trees that are not completely rewritten.
951 First we detect some inlining-induced bogosities for
952 discarding. */
726a989a
RB
953 if (TREE_CODE (*tp) == MODIFY_EXPR
954 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
955 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
d4e4baa9
AO
956 {
957 /* Some assignments VAR = VAR; don't generate any rtl code
958 and thus don't count as variable modification. Avoid
959 keeping bogosities like 0 = 0. */
726a989a 960 tree decl = TREE_OPERAND (*tp, 0), value;
6be42dd4 961 tree *n;
d4e4baa9 962
6be42dd4 963 n = (tree *) pointer_map_contains (id->decl_map, decl);
d4e4baa9
AO
964 if (n)
965 {
6be42dd4 966 value = *n;
d4e4baa9 967 STRIP_TYPE_NOPS (value);
becfd6e5 968 if (TREE_CONSTANT (value) || TREE_READONLY (value))
68594ce7 969 {
b03c0b93 970 *tp = build_empty_stmt ();
726a989a 971 return copy_tree_body_r (tp, walk_subtrees, data);
68594ce7 972 }
d4e4baa9
AO
973 }
974 }
1b369fae 975 else if (TREE_CODE (*tp) == INDIRECT_REF)
6de9cd9a
DN
976 {
977 /* Get rid of *& from inline substitutions that can happen when a
978 pointer argument is an ADDR_EXPR. */
81cfbbc2 979 tree decl = TREE_OPERAND (*tp, 0);
6be42dd4 980 tree *n;
6de9cd9a 981
6be42dd4 982 n = (tree *) pointer_map_contains (id->decl_map, decl);
6de9cd9a
DN
983 if (n)
984 {
82d6e6fc 985 tree new_tree;
d84b37b0 986 tree old;
30d2e943
RG
987 /* If we happen to get an ADDR_EXPR in n->value, strip
988 it manually here as we'll eventually get ADDR_EXPRs
989 which lie about their types pointed to. In this case
990 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
095ecc24
RG
991 but we absolutely rely on that. As fold_indirect_ref
992 does other useful transformations, try that first, though. */
6be42dd4 993 tree type = TREE_TYPE (TREE_TYPE (*n));
82d6e6fc 994 new_tree = unshare_expr (*n);
d84b37b0 995 old = *tp;
82d6e6fc 996 *tp = gimple_fold_indirect_ref (new_tree);
095ecc24
RG
997 if (! *tp)
998 {
82d6e6fc 999 if (TREE_CODE (new_tree) == ADDR_EXPR)
de4af523 1000 {
82d6e6fc 1001 *tp = fold_indirect_ref_1 (type, new_tree);
de4af523
JJ
1002 /* ??? We should either assert here or build
1003 a VIEW_CONVERT_EXPR instead of blindly leaking
1004 incompatible types to our IL. */
1005 if (! *tp)
82d6e6fc 1006 *tp = TREE_OPERAND (new_tree, 0);
de4af523 1007 }
095ecc24 1008 else
d84b37b0 1009 {
82d6e6fc 1010 *tp = build1 (INDIRECT_REF, type, new_tree);
d84b37b0 1011 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
955f6531 1012 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
d84b37b0 1013 }
095ecc24 1014 }
81cfbbc2
JH
1015 *walk_subtrees = 0;
1016 return NULL;
68594ce7
JM
1017 }
1018 }
1019
e21aff8a
SB
1020 /* Here is the "usual case". Copy this tree node, and then
1021 tweak some special cases. */
1b369fae 1022 copy_tree_r (tp, walk_subtrees, NULL);
110cfe1c 1023
4f5c64b8
RG
1024 /* Global variables we haven't seen yet needs to go into referenced
1025 vars. If not referenced from types only. */
726a989a
RB
1026 if (gimple_in_ssa_p (cfun)
1027 && TREE_CODE (*tp) == VAR_DECL
4f5c64b8 1028 && id->remapping_type_depth == 0)
110cfe1c 1029 add_referenced_var (*tp);
19734dd8 1030
acb8f212
JH
1031 /* If EXPR has block defined, map it to newly constructed block.
1032 When inlining we want EXPRs without block appear in the block
1033 of function call. */
726a989a 1034 if (EXPR_P (*tp))
acb8f212
JH
1035 {
1036 new_block = id->block;
1037 if (TREE_BLOCK (*tp))
1038 {
6be42dd4
RG
1039 tree *n;
1040 n = (tree *) pointer_map_contains (id->decl_map,
1041 TREE_BLOCK (*tp));
acb8f212 1042 gcc_assert (n);
6be42dd4 1043 new_block = *n;
acb8f212
JH
1044 }
1045 TREE_BLOCK (*tp) = new_block;
1046 }
68594ce7 1047
e0704a46 1048 if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
e21aff8a 1049 TREE_OPERAND (*tp, 0) =
726a989a
RB
1050 build_int_cst (NULL_TREE,
1051 id->eh_region_offset
1052 + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
18c6ada9 1053
726a989a 1054 if (TREE_CODE (*tp) != OMP_CLAUSE)
07beea0d 1055 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
3c2a7a6a 1056
68594ce7
JM
1057 /* The copied TARGET_EXPR has never been expanded, even if the
1058 original node was expanded already. */
1059 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1060 {
1061 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1062 TREE_OPERAND (*tp, 3) = NULL_TREE;
1063 }
84cce55d
RH
1064
1065 /* Variable substitution need not be simple. In particular, the
1066 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1067 and friends are up-to-date. */
1068 else if (TREE_CODE (*tp) == ADDR_EXPR)
1069 {
ad6003f2 1070 int invariant = is_gimple_min_invariant (*tp);
726a989a
RB
1071 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1072
8e85fd14
RG
1073 /* Handle the case where we substituted an INDIRECT_REF
1074 into the operand of the ADDR_EXPR. */
1075 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1076 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1077 else
1078 recompute_tree_invariant_for_addr_expr (*tp);
726a989a 1079
416c991f
JJ
1080 /* If this used to be invariant, but is not any longer,
1081 then regimplification is probably needed. */
ad6003f2 1082 if (invariant && !is_gimple_min_invariant (*tp))
416c991f 1083 id->regimplify = true;
726a989a 1084
84cce55d
RH
1085 *walk_subtrees = 0;
1086 }
d4e4baa9
AO
1087 }
1088
1089 /* Keep iterating. */
1090 return NULL_TREE;
1091}
1092
726a989a
RB
1093
1094/* Helper for copy_bb. Remap statement STMT using the inlining
1095 information in ID. Return the new statement copy. */
1096
1097static gimple
1098remap_gimple_stmt (gimple stmt, copy_body_data *id)
1099{
1100 gimple copy = NULL;
1101 struct walk_stmt_info wi;
1102 tree new_block;
5a6e26b7 1103 bool skip_first = false;
726a989a
RB
1104
1105 /* Begin by recognizing trees that we'll completely rewrite for the
1106 inlining context. Our output for these trees is completely
1107 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1108 into an edge). Further down, we'll handle trees that get
1109 duplicated and/or tweaked. */
1110
1111 /* When requested, GIMPLE_RETURNs should be transformed to just the
1112 contained GIMPLE_ASSIGN. The branch semantics of the return will
1113 be handled elsewhere by manipulating the CFG rather than the
1114 statement. */
1115 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1116 {
1117 tree retval = gimple_return_retval (stmt);
1118
1119 /* If we're returning something, just turn that into an
1120 assignment into the equivalent of the original RESULT_DECL.
1121 If RETVAL is just the result decl, the result decl has
1122 already been set (e.g. a recent "foo (&result_decl, ...)");
1123 just toss the entire GIMPLE_RETURN. */
1124 if (retval && TREE_CODE (retval) != RESULT_DECL)
5a6e26b7
JH
1125 {
1126 copy = gimple_build_assign (id->retvar, retval);
1127 /* id->retvar is already substituted. Skip it on later remapping. */
1128 skip_first = true;
1129 }
726a989a
RB
1130 else
1131 return gimple_build_nop ();
1132 }
1133 else if (gimple_has_substatements (stmt))
1134 {
1135 gimple_seq s1, s2;
1136
1137 /* When cloning bodies from the C++ front end, we will be handed bodies
1138 in High GIMPLE form. Handle here all the High GIMPLE statements that
1139 have embedded statements. */
1140 switch (gimple_code (stmt))
1141 {
1142 case GIMPLE_BIND:
1143 copy = copy_gimple_bind (stmt, id);
1144 break;
1145
1146 case GIMPLE_CATCH:
1147 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1148 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1149 break;
1150
1151 case GIMPLE_EH_FILTER:
1152 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1153 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1154 break;
1155
1156 case GIMPLE_TRY:
1157 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1158 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1159 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1160 break;
1161
1162 case GIMPLE_WITH_CLEANUP_EXPR:
1163 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1164 copy = gimple_build_wce (s1);
1165 break;
1166
1167 case GIMPLE_OMP_PARALLEL:
1168 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1169 copy = gimple_build_omp_parallel
1170 (s1,
1171 gimple_omp_parallel_clauses (stmt),
1172 gimple_omp_parallel_child_fn (stmt),
1173 gimple_omp_parallel_data_arg (stmt));
1174 break;
1175
1176 case GIMPLE_OMP_TASK:
1177 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1178 copy = gimple_build_omp_task
1179 (s1,
1180 gimple_omp_task_clauses (stmt),
1181 gimple_omp_task_child_fn (stmt),
1182 gimple_omp_task_data_arg (stmt),
1183 gimple_omp_task_copy_fn (stmt),
1184 gimple_omp_task_arg_size (stmt),
1185 gimple_omp_task_arg_align (stmt));
1186 break;
1187
1188 case GIMPLE_OMP_FOR:
1189 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1190 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1191 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1192 gimple_omp_for_collapse (stmt), s2);
1193 {
1194 size_t i;
1195 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1196 {
1197 gimple_omp_for_set_index (copy, i,
1198 gimple_omp_for_index (stmt, i));
1199 gimple_omp_for_set_initial (copy, i,
1200 gimple_omp_for_initial (stmt, i));
1201 gimple_omp_for_set_final (copy, i,
1202 gimple_omp_for_final (stmt, i));
1203 gimple_omp_for_set_incr (copy, i,
1204 gimple_omp_for_incr (stmt, i));
1205 gimple_omp_for_set_cond (copy, i,
1206 gimple_omp_for_cond (stmt, i));
1207 }
1208 }
1209 break;
1210
1211 case GIMPLE_OMP_MASTER:
1212 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1213 copy = gimple_build_omp_master (s1);
1214 break;
1215
1216 case GIMPLE_OMP_ORDERED:
1217 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1218 copy = gimple_build_omp_ordered (s1);
1219 break;
1220
1221 case GIMPLE_OMP_SECTION:
1222 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1223 copy = gimple_build_omp_section (s1);
1224 break;
1225
1226 case GIMPLE_OMP_SECTIONS:
1227 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1228 copy = gimple_build_omp_sections
1229 (s1, gimple_omp_sections_clauses (stmt));
1230 break;
1231
1232 case GIMPLE_OMP_SINGLE:
1233 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1234 copy = gimple_build_omp_single
1235 (s1, gimple_omp_single_clauses (stmt));
1236 break;
1237
05a26161
JJ
1238 case GIMPLE_OMP_CRITICAL:
1239 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1240 copy
1241 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1242 break;
1243
726a989a
RB
1244 default:
1245 gcc_unreachable ();
1246 }
1247 }
1248 else
1249 {
1250 if (gimple_assign_copy_p (stmt)
1251 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1252 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1253 {
1254 /* Here we handle statements that are not completely rewritten.
1255 First we detect some inlining-induced bogosities for
1256 discarding. */
1257
1258 /* Some assignments VAR = VAR; don't generate any rtl code
1259 and thus don't count as variable modification. Avoid
1260 keeping bogosities like 0 = 0. */
1261 tree decl = gimple_assign_lhs (stmt), value;
1262 tree *n;
1263
1264 n = (tree *) pointer_map_contains (id->decl_map, decl);
1265 if (n)
1266 {
1267 value = *n;
1268 STRIP_TYPE_NOPS (value);
1269 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1270 return gimple_build_nop ();
1271 }
1272 }
1273
1274 /* Create a new deep copy of the statement. */
1275 copy = gimple_copy (stmt);
1276 }
1277
1278 /* If STMT has a block defined, map it to the newly constructed
1279 block. When inlining we want statements without a block to
1280 appear in the block of the function call. */
1281 new_block = id->block;
1282 if (gimple_block (copy))
1283 {
1284 tree *n;
1285 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1286 gcc_assert (n);
1287 new_block = *n;
1288 }
1289
1290 gimple_set_block (copy, new_block);
1291
1292 /* Remap all the operands in COPY. */
1293 memset (&wi, 0, sizeof (wi));
1294 wi.info = id;
5a6e26b7
JH
1295 if (skip_first)
1296 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1297 else
1298 walk_gimple_op (copy, remap_gimple_op_r, &wi);
726a989a 1299
5006671f
RG
1300 /* Clear the copied virtual operands. We are not remapping them here
1301 but are going to recreate them from scratch. */
1302 if (gimple_has_mem_ops (copy))
1303 {
1304 gimple_set_vdef (copy, NULL_TREE);
1305 gimple_set_vuse (copy, NULL_TREE);
1306 }
1307
726a989a
RB
1308 /* We have to handle EH region remapping of GIMPLE_RESX specially because
1309 the region number is not an operand. */
1310 if (gimple_code (stmt) == GIMPLE_RESX && id->eh_region_offset)
1311 {
1312 gimple_resx_set_region (copy, gimple_resx_region (stmt) + id->eh_region_offset);
1313 }
1314 return copy;
1315}
1316
1317
e21aff8a
SB
1318/* Copy basic block, scale profile accordingly. Edges will be taken care of
1319 later */
1320
1321static basic_block
0178d644
VR
1322copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1323 gcov_type count_scale)
e21aff8a 1324{
c2a4718a 1325 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
e21aff8a 1326 basic_block copy_basic_block;
726a989a 1327 tree decl;
e21aff8a
SB
1328
1329 /* create_basic_block() will append every new block to
1330 basic_block_info automatically. */
cceb1885
GDR
1331 copy_basic_block = create_basic_block (NULL, (void *) 0,
1332 (basic_block) bb->prev_bb->aux);
e21aff8a 1333 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
45a80bb9 1334
726a989a
RB
1335 /* We are going to rebuild frequencies from scratch. These values
1336 have just small importance to drive canonicalize_loop_headers. */
45a80bb9 1337 copy_basic_block->frequency = ((gcov_type)bb->frequency
726a989a
RB
1338 * frequency_scale / REG_BR_PROB_BASE);
1339
45a80bb9
JH
1340 if (copy_basic_block->frequency > BB_FREQ_MAX)
1341 copy_basic_block->frequency = BB_FREQ_MAX;
e21aff8a 1342
726a989a
RB
1343 copy_gsi = gsi_start_bb (copy_basic_block);
1344
1345 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
e21aff8a 1346 {
726a989a
RB
1347 gimple stmt = gsi_stmt (gsi);
1348 gimple orig_stmt = stmt;
e21aff8a 1349
416c991f 1350 id->regimplify = false;
726a989a
RB
1351 stmt = remap_gimple_stmt (stmt, id);
1352 if (gimple_nop_p (stmt))
1353 continue;
1354
1355 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
c2a4718a 1356 seq_gsi = copy_gsi;
726a989a
RB
1357
1358 /* With return slot optimization we can end up with
1359 non-gimple (foo *)&this->m, fix that here. */
4c29307d
JJ
1360 if (is_gimple_assign (stmt)
1361 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1362 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
e21aff8a 1363 {
726a989a 1364 tree new_rhs;
c2a4718a 1365 new_rhs = force_gimple_operand_gsi (&seq_gsi,
726a989a
RB
1366 gimple_assign_rhs1 (stmt),
1367 true, NULL, true, GSI_SAME_STMT);
1368 gimple_assign_set_rhs1 (stmt, new_rhs);
c2a4718a 1369 id->regimplify = false;
726a989a 1370 }
2b65dae5 1371
c2a4718a
JJ
1372 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1373
1374 if (id->regimplify)
1375 gimple_regimplify_operands (stmt, &seq_gsi);
1376
1377 /* If copy_basic_block has been empty at the start of this iteration,
1378 call gsi_start_bb again to get at the newly added statements. */
1379 if (gsi_end_p (copy_gsi))
1380 copy_gsi = gsi_start_bb (copy_basic_block);
1381 else
1382 gsi_next (&copy_gsi);
110cfe1c 1383
726a989a
RB
1384 /* Process the new statement. The call to gimple_regimplify_operands
1385 possibly turned the statement into multiple statements, we
1386 need to process all of them. */
c2a4718a 1387 do
726a989a 1388 {
c2a4718a 1389 stmt = gsi_stmt (copy_gsi);
726a989a
RB
1390 if (is_gimple_call (stmt)
1391 && gimple_call_va_arg_pack_p (stmt)
1392 && id->gimple_call)
1393 {
1394 /* __builtin_va_arg_pack () should be replaced by
1395 all arguments corresponding to ... in the caller. */
1396 tree p;
1397 gimple new_call;
1398 VEC(tree, heap) *argarray;
1399 size_t nargs = gimple_call_num_args (id->gimple_call);
1400 size_t n;
1401
1402 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1403 nargs--;
1404
1405 /* Create the new array of arguments. */
1406 n = nargs + gimple_call_num_args (stmt);
1407 argarray = VEC_alloc (tree, heap, n);
1408 VEC_safe_grow (tree, heap, argarray, n);
1409
1410 /* Copy all the arguments before '...' */
1411 memcpy (VEC_address (tree, argarray),
1412 gimple_call_arg_ptr (stmt, 0),
1413 gimple_call_num_args (stmt) * sizeof (tree));
1414
1415 /* Append the arguments passed in '...' */
1416 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1417 gimple_call_arg_ptr (id->gimple_call, 0)
1418 + (gimple_call_num_args (id->gimple_call) - nargs),
1419 nargs * sizeof (tree));
1420
1421 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1422 argarray);
1423
1424 VEC_free (tree, heap, argarray);
1425
1426 /* Copy all GIMPLE_CALL flags, location and block, except
1427 GF_CALL_VA_ARG_PACK. */
1428 gimple_call_copy_flags (new_call, stmt);
1429 gimple_call_set_va_arg_pack (new_call, false);
1430 gimple_set_location (new_call, gimple_location (stmt));
1431 gimple_set_block (new_call, gimple_block (stmt));
1432 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1433
1434 gsi_replace (&copy_gsi, new_call, false);
9cfa22be 1435 gimple_set_bb (stmt, NULL);
726a989a
RB
1436 stmt = new_call;
1437 }
1438 else if (is_gimple_call (stmt)
1439 && id->gimple_call
1440 && (decl = gimple_call_fndecl (stmt))
1441 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1442 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
e0704a46 1443 {
726a989a
RB
1444 /* __builtin_va_arg_pack_len () should be replaced by
1445 the number of anonymous arguments. */
1446 size_t nargs = gimple_call_num_args (id->gimple_call);
1447 tree count, p;
1448 gimple new_stmt;
1449
1450 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1451 nargs--;
1452
1453 count = build_int_cst (integer_type_node, nargs);
1454 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1455 gsi_replace (&copy_gsi, new_stmt, false);
1456 stmt = new_stmt;
1457 }
b8a00a4d 1458
726a989a
RB
1459 /* Statements produced by inlining can be unfolded, especially
1460 when we constant propagated some operands. We can't fold
1461 them right now for two reasons:
1462 1) folding require SSA_NAME_DEF_STMTs to be correct
1463 2) we can't change function calls to builtins.
1464 So we just mark statement for later folding. We mark
1465 all new statements, instead just statements that has changed
1466 by some nontrivial substitution so even statements made
1467 foldable indirectly are updated. If this turns out to be
1468 expensive, copy_body can be told to watch for nontrivial
1469 changes. */
1470 if (id->statements_to_fold)
1471 pointer_set_insert (id->statements_to_fold, stmt);
1472
1473 /* We're duplicating a CALL_EXPR. Find any corresponding
1474 callgraph edges and update or duplicate them. */
1475 if (is_gimple_call (stmt))
1476 {
1477 struct cgraph_node *node;
1478 struct cgraph_edge *edge;
f618d33e 1479 int flags;
6ef5231b 1480
726a989a 1481 switch (id->transform_call_graph_edges)
e0704a46 1482 {
726a989a
RB
1483 case CB_CGE_DUPLICATE:
1484 edge = cgraph_edge (id->src_node, orig_stmt);
1485 if (edge)
1486 cgraph_clone_edge (edge, id->dst_node, stmt,
3e293154
MJ
1487 REG_BR_PROB_BASE, 1,
1488 edge->frequency, true);
726a989a
RB
1489 break;
1490
1491 case CB_CGE_MOVE_CLONES:
1492 for (node = id->dst_node->next_clone;
1493 node;
1494 node = node->next_clone)
1495 {
1496 edge = cgraph_edge (node, orig_stmt);
3e293154
MJ
1497 if (edge)
1498 cgraph_set_call_stmt (edge, stmt);
726a989a
RB
1499 }
1500 /* FALLTHRU */
110cfe1c 1501
726a989a
RB
1502 case CB_CGE_MOVE:
1503 edge = cgraph_edge (id->dst_node, orig_stmt);
1504 if (edge)
1505 cgraph_set_call_stmt (edge, stmt);
1506 break;
110cfe1c 1507
726a989a
RB
1508 default:
1509 gcc_unreachable ();
110cfe1c 1510 }
f618d33e
MJ
1511
1512 flags = gimple_call_flags (stmt);
1513
1514 if (flags & ECF_MAY_BE_ALLOCA)
1515 cfun->calls_alloca = true;
1516 if (flags & ECF_RETURNS_TWICE)
1517 cfun->calls_setjmp = true;
726a989a 1518 }
e21aff8a 1519
726a989a
RB
1520 /* If you think we can abort here, you are wrong.
1521 There is no region 0 in gimple. */
1522 gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) != 0);
1523
1524 if (stmt_could_throw_p (stmt)
1525 /* When we are cloning for inlining, we are supposed to
1526 construct a clone that calls precisely the same functions
1527 as original. However IPA optimizers might've proved
1528 earlier some function calls as non-trapping that might
1529 render some basic blocks dead that might become
1530 unreachable.
1531
1532 We can't update SSA with unreachable blocks in CFG and thus
1533 we prevent the scenario by preserving even the "dead" eh
1534 edges until the point they are later removed by
1535 fixup_cfg pass. */
1536 || (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
1537 && lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) > 0))
1538 {
1539 int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
1540
1541 /* Add an entry for the copied tree in the EH hashtable.
1542 When cloning or versioning, use the hashtable in
1543 cfun, and just copy the EH number. When inlining, use the
1544 hashtable in the caller, and adjust the region number. */
1545 if (region > 0)
1546 add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
1547
1548 /* If this tree doesn't have a region associated with it,
1549 and there is a "current region,"
1550 then associate this tree with the current region
1551 and add edges associated with this region. */
1552 if (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) <= 0
1553 && id->eh_region > 0
1554 && stmt_could_throw_p (stmt))
1555 add_stmt_to_eh_region (stmt, id->eh_region);
e21aff8a 1556 }
726a989a
RB
1557
1558 if (gimple_in_ssa_p (cfun))
1559 {
1560 ssa_op_iter i;
1561 tree def;
1562
1563 find_new_referenced_vars (gsi_stmt (copy_gsi));
1564 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1565 if (TREE_CODE (def) == SSA_NAME)
1566 SSA_NAME_DEF_STMT (def) = stmt;
1567 }
1568
1569 gsi_next (&copy_gsi);
e21aff8a 1570 }
c2a4718a 1571 while (!gsi_end_p (copy_gsi));
726a989a
RB
1572
1573 copy_gsi = gsi_last_bb (copy_basic_block);
e21aff8a 1574 }
726a989a 1575
e21aff8a
SB
1576 return copy_basic_block;
1577}
1578
110cfe1c
JH
1579/* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1580 form is quite easy, since dominator relationship for old basic blocks does
1581 not change.
1582
1583 There is however exception where inlining might change dominator relation
1584 across EH edges from basic block within inlined functions destinating
5305a4cb 1585 to landing pads in function we inline into.
110cfe1c 1586
e9705dc5
AO
1587 The function fills in PHI_RESULTs of such PHI nodes if they refer
1588 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1589 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1590 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1591 set, and this means that there will be no overlapping live ranges
110cfe1c
JH
1592 for the underlying symbol.
1593
1594 This might change in future if we allow redirecting of EH edges and
1595 we might want to change way build CFG pre-inlining to include
1596 all the possible edges then. */
1597static void
e9705dc5
AO
1598update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1599 bool can_throw, bool nonlocal_goto)
110cfe1c
JH
1600{
1601 edge e;
1602 edge_iterator ei;
1603
1604 FOR_EACH_EDGE (e, ei, bb->succs)
1605 if (!e->dest->aux
1606 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1607 {
726a989a
RB
1608 gimple phi;
1609 gimple_stmt_iterator si;
110cfe1c 1610
e9705dc5
AO
1611 if (!nonlocal_goto)
1612 gcc_assert (e->flags & EDGE_EH);
726a989a 1613
e9705dc5
AO
1614 if (!can_throw)
1615 gcc_assert (!(e->flags & EDGE_EH));
726a989a
RB
1616
1617 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
110cfe1c 1618 {
e9705dc5
AO
1619 edge re;
1620
726a989a
RB
1621 phi = gsi_stmt (si);
1622
e9705dc5
AO
1623 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1624 gcc_assert (!e->dest->aux);
1625
496a4ef5
JH
1626 gcc_assert ((e->flags & EDGE_EH)
1627 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
e9705dc5
AO
1628
1629 if (!is_gimple_reg (PHI_RESULT (phi)))
1630 {
726a989a 1631 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
e9705dc5
AO
1632 continue;
1633 }
1634
1635 re = find_edge (ret_bb, e->dest);
1432b19f 1636 gcc_assert (re);
e9705dc5
AO
1637 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1638 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1639
1640 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1641 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
110cfe1c
JH
1642 }
1643 }
1644}
1645
726a989a 1646
128a79fb
KH
1647/* Copy edges from BB into its copy constructed earlier, scale profile
1648 accordingly. Edges will be taken care of later. Assume aux
1649 pointers to point to the copies of each BB. */
726a989a 1650
e21aff8a 1651static void
0178d644 1652copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
e21aff8a 1653{
cceb1885 1654 basic_block new_bb = (basic_block) bb->aux;
e21aff8a
SB
1655 edge_iterator ei;
1656 edge old_edge;
726a989a 1657 gimple_stmt_iterator si;
e21aff8a
SB
1658 int flags;
1659
1660 /* Use the indices from the original blocks to create edges for the
1661 new ones. */
1662 FOR_EACH_EDGE (old_edge, ei, bb->succs)
e0704a46
JH
1663 if (!(old_edge->flags & EDGE_EH))
1664 {
82d6e6fc 1665 edge new_edge;
e21aff8a 1666
e0704a46 1667 flags = old_edge->flags;
e21aff8a 1668
e0704a46
JH
1669 /* Return edges do get a FALLTHRU flag when the get inlined. */
1670 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1671 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1672 flags |= EDGE_FALLTHRU;
82d6e6fc
KG
1673 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1674 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1675 new_edge->probability = old_edge->probability;
e0704a46 1676 }
e21aff8a
SB
1677
1678 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1679 return;
1680
726a989a 1681 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
e21aff8a 1682 {
726a989a 1683 gimple copy_stmt;
e9705dc5 1684 bool can_throw, nonlocal_goto;
e21aff8a 1685
726a989a 1686 copy_stmt = gsi_stmt (si);
e21aff8a 1687 update_stmt (copy_stmt);
110cfe1c
JH
1688 if (gimple_in_ssa_p (cfun))
1689 mark_symbols_for_renaming (copy_stmt);
726a989a 1690
e21aff8a 1691 /* Do this before the possible split_block. */
726a989a 1692 gsi_next (&si);
e21aff8a
SB
1693
1694 /* If this tree could throw an exception, there are two
1695 cases where we need to add abnormal edge(s): the
1696 tree wasn't in a region and there is a "current
1697 region" in the caller; or the original tree had
1698 EH edges. In both cases split the block after the tree,
1699 and add abnormal edge(s) as needed; we need both
1700 those from the callee and the caller.
1701 We check whether the copy can throw, because the const
1702 propagation can change an INDIRECT_REF which throws
1703 into a COMPONENT_REF which doesn't. If the copy
1704 can throw, the original could also throw. */
726a989a
RB
1705 can_throw = stmt_can_throw_internal (copy_stmt);
1706 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
e9705dc5
AO
1707
1708 if (can_throw || nonlocal_goto)
e21aff8a 1709 {
726a989a 1710 if (!gsi_end_p (si))
e21aff8a
SB
1711 /* Note that bb's predecessor edges aren't necessarily
1712 right at this point; split_block doesn't care. */
1713 {
1714 edge e = split_block (new_bb, copy_stmt);
110cfe1c 1715
e21aff8a 1716 new_bb = e->dest;
110cfe1c 1717 new_bb->aux = e->src->aux;
726a989a 1718 si = gsi_start_bb (new_bb);
e21aff8a 1719 }
e9705dc5 1720 }
e21aff8a 1721
e9705dc5
AO
1722 if (can_throw)
1723 make_eh_edges (copy_stmt);
110cfe1c 1724
e9705dc5 1725 if (nonlocal_goto)
726a989a 1726 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
e9705dc5
AO
1727
1728 if ((can_throw || nonlocal_goto)
1729 && gimple_in_ssa_p (cfun))
726a989a 1730 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
e9705dc5 1731 can_throw, nonlocal_goto);
110cfe1c
JH
1732 }
1733}
1734
1735/* Copy the PHIs. All blocks and edges are copied, some blocks
1736 was possibly split and new outgoing EH edges inserted.
1737 BB points to the block of original function and AUX pointers links
1738 the original and newly copied blocks. */
1739
1740static void
1741copy_phis_for_bb (basic_block bb, copy_body_data *id)
1742{
3d9a9f94 1743 basic_block const new_bb = (basic_block) bb->aux;
110cfe1c 1744 edge_iterator ei;
726a989a
RB
1745 gimple phi;
1746 gimple_stmt_iterator si;
110cfe1c 1747
726a989a 1748 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
110cfe1c 1749 {
726a989a
RB
1750 tree res, new_res;
1751 gimple new_phi;
110cfe1c
JH
1752 edge new_edge;
1753
726a989a
RB
1754 phi = gsi_stmt (si);
1755 res = PHI_RESULT (phi);
1756 new_res = res;
110cfe1c
JH
1757 if (is_gimple_reg (res))
1758 {
726a989a 1759 walk_tree (&new_res, copy_tree_body_r, id, NULL);
110cfe1c
JH
1760 SSA_NAME_DEF_STMT (new_res)
1761 = new_phi = create_phi_node (new_res, new_bb);
1762 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1763 {
726a989a
RB
1764 edge const old_edge
1765 = find_edge ((basic_block) new_edge->src->aux, bb);
110cfe1c
JH
1766 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1767 tree new_arg = arg;
726a989a
RB
1768 tree block = id->block;
1769 id->block = NULL_TREE;
1770 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1771 id->block = block;
110cfe1c 1772 gcc_assert (new_arg);
36b6e793
JJ
1773 /* With return slot optimization we can end up with
1774 non-gimple (foo *)&this->m, fix that here. */
1775 if (TREE_CODE (new_arg) != SSA_NAME
1776 && TREE_CODE (new_arg) != FUNCTION_DECL
1777 && !is_gimple_val (new_arg))
1778 {
726a989a
RB
1779 gimple_seq stmts = NULL;
1780 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1781 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
36b6e793 1782 }
110cfe1c
JH
1783 add_phi_arg (new_phi, new_arg, new_edge);
1784 }
e21aff8a
SB
1785 }
1786 }
1787}
1788
726a989a 1789
e21aff8a 1790/* Wrapper for remap_decl so it can be used as a callback. */
726a989a 1791
e21aff8a
SB
1792static tree
1793remap_decl_1 (tree decl, void *data)
1794{
1b369fae 1795 return remap_decl (decl, (copy_body_data *) data);
e21aff8a
SB
1796}
1797
110cfe1c
JH
1798/* Build struct function and associated datastructures for the new clone
1799 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1800
1801static void
1802initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
1803 int frequency)
1804{
110cfe1c 1805 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
0178d644 1806 gcov_type count_scale, frequency_scale;
110cfe1c
JH
1807
1808 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1809 count_scale = (REG_BR_PROB_BASE * count
1810 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1811 else
1812 count_scale = 1;
1813
1814 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1815 frequency_scale = (REG_BR_PROB_BASE * frequency
1816 /
1817 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1818 else
1819 frequency_scale = count_scale;
1820
1821 /* Register specific tree functions. */
726a989a 1822 gimple_register_cfg_hooks ();
39ecc018
JH
1823
1824 /* Get clean struct function. */
1825 push_struct_function (new_fndecl);
1826
1827 /* We will rebuild these, so just sanity check that they are empty. */
1828 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
1829 gcc_assert (cfun->local_decls == NULL);
1830 gcc_assert (cfun->cfg == NULL);
1831 gcc_assert (cfun->decl == new_fndecl);
1832
39ecc018
JH
1833 /* Copy items we preserve during clonning. */
1834 cfun->static_chain_decl = src_cfun->static_chain_decl;
1835 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
1836 cfun->function_end_locus = src_cfun->function_end_locus;
1837 cfun->curr_properties = src_cfun->curr_properties;
1838 cfun->last_verified = src_cfun->last_verified;
1839 if (src_cfun->ipa_transforms_to_apply)
1840 cfun->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
1841 src_cfun->ipa_transforms_to_apply);
1842 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
1843 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
1844 cfun->function_frequency = src_cfun->function_frequency;
1845 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
1846 cfun->stdarg = src_cfun->stdarg;
1847 cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
1848 cfun->after_inlining = src_cfun->after_inlining;
1849 cfun->returns_struct = src_cfun->returns_struct;
1850 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
1851 cfun->after_tree_profile = src_cfun->after_tree_profile;
1852
110cfe1c
JH
1853 init_empty_tree_cfg ();
1854
1855 ENTRY_BLOCK_PTR->count =
1856 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1857 REG_BR_PROB_BASE);
1858 ENTRY_BLOCK_PTR->frequency =
1859 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1860 frequency_scale / REG_BR_PROB_BASE);
1861 EXIT_BLOCK_PTR->count =
1862 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1863 REG_BR_PROB_BASE);
1864 EXIT_BLOCK_PTR->frequency =
1865 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1866 frequency_scale / REG_BR_PROB_BASE);
1867 if (src_cfun->eh)
1868 init_eh_for_function ();
1869
1870 if (src_cfun->gimple_df)
1871 {
5db9ba0c 1872 init_tree_ssa (cfun);
110cfe1c
JH
1873 cfun->gimple_df->in_ssa_p = true;
1874 init_ssa_operands ();
1875 }
1876 pop_cfun ();
1877}
1878
e21aff8a
SB
1879/* Make a copy of the body of FN so that it can be inserted inline in
1880 another function. Walks FN via CFG, returns new fndecl. */
1881
1882static tree
1b369fae 1883copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
e21aff8a
SB
1884 basic_block entry_block_map, basic_block exit_block_map)
1885{
1b369fae 1886 tree callee_fndecl = id->src_fn;
e21aff8a 1887 /* Original cfun for the callee, doesn't change. */
1b369fae 1888 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
110cfe1c 1889 struct function *cfun_to_copy;
e21aff8a
SB
1890 basic_block bb;
1891 tree new_fndecl = NULL;
0178d644 1892 gcov_type count_scale, frequency_scale;
110cfe1c 1893 int last;
e21aff8a 1894
1b369fae 1895 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
e21aff8a 1896 count_scale = (REG_BR_PROB_BASE * count
1b369fae 1897 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
e21aff8a
SB
1898 else
1899 count_scale = 1;
1900
1b369fae 1901 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
e21aff8a
SB
1902 frequency_scale = (REG_BR_PROB_BASE * frequency
1903 /
1b369fae 1904 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
e21aff8a
SB
1905 else
1906 frequency_scale = count_scale;
1907
1908 /* Register specific tree functions. */
726a989a 1909 gimple_register_cfg_hooks ();
e21aff8a
SB
1910
1911 /* Must have a CFG here at this point. */
1912 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
1913 (DECL_STRUCT_FUNCTION (callee_fndecl)));
1914
110cfe1c
JH
1915 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1916
e21aff8a
SB
1917 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
1918 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
110cfe1c
JH
1919 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1920 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
e21aff8a 1921
e21aff8a
SB
1922 /* Duplicate any exception-handling regions. */
1923 if (cfun->eh)
1924 {
1b369fae 1925 id->eh_region_offset
fad41cd7
RH
1926 = duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
1927 0, id->eh_region);
e21aff8a 1928 }
726a989a 1929
e21aff8a
SB
1930 /* Use aux pointers to map the original blocks to copy. */
1931 FOR_EACH_BB_FN (bb, cfun_to_copy)
110cfe1c 1932 {
82d6e6fc
KG
1933 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
1934 bb->aux = new_bb;
1935 new_bb->aux = bb;
110cfe1c
JH
1936 }
1937
7c57be85 1938 last = last_basic_block;
726a989a 1939
e21aff8a
SB
1940 /* Now that we've duplicated the blocks, duplicate their edges. */
1941 FOR_ALL_BB_FN (bb, cfun_to_copy)
e9705dc5 1942 copy_edges_for_bb (bb, count_scale, exit_block_map);
726a989a 1943
110cfe1c
JH
1944 if (gimple_in_ssa_p (cfun))
1945 FOR_ALL_BB_FN (bb, cfun_to_copy)
1946 copy_phis_for_bb (bb, id);
726a989a 1947
e21aff8a 1948 FOR_ALL_BB_FN (bb, cfun_to_copy)
110cfe1c
JH
1949 {
1950 ((basic_block)bb->aux)->aux = NULL;
1951 bb->aux = NULL;
1952 }
726a989a 1953
110cfe1c
JH
1954 /* Zero out AUX fields of newly created block during EH edge
1955 insertion. */
7c57be85 1956 for (; last < last_basic_block; last++)
110cfe1c
JH
1957 BASIC_BLOCK (last)->aux = NULL;
1958 entry_block_map->aux = NULL;
1959 exit_block_map->aux = NULL;
e21aff8a
SB
1960
1961 return new_fndecl;
1962}
1963
e21aff8a 1964static tree
1b369fae 1965copy_body (copy_body_data *id, gcov_type count, int frequency,
e21aff8a
SB
1966 basic_block entry_block_map, basic_block exit_block_map)
1967{
1b369fae 1968 tree fndecl = id->src_fn;
e21aff8a
SB
1969 tree body;
1970
1971 /* If this body has a CFG, walk CFG and copy. */
1972 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
1973 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
1974
1975 return body;
1976}
1977
04482133
AO
1978/* Return true if VALUE is an ADDR_EXPR of an automatic variable
1979 defined in function FN, or of a data member thereof. */
1980
1981static bool
1982self_inlining_addr_expr (tree value, tree fn)
1983{
1984 tree var;
1985
1986 if (TREE_CODE (value) != ADDR_EXPR)
1987 return false;
1988
1989 var = get_base_address (TREE_OPERAND (value, 0));
e21aff8a 1990
50886bf1 1991 return var && auto_var_in_fn_p (var, fn);
04482133
AO
1992}
1993
6de9cd9a 1994static void
0f1961a2
JH
1995insert_init_stmt (basic_block bb, gimple init_stmt)
1996{
0f1961a2
JH
1997 /* If VAR represents a zero-sized variable, it's possible that the
1998 assignment statement may result in no gimple statements. */
1999 if (init_stmt)
c2a4718a
JJ
2000 {
2001 gimple_stmt_iterator si = gsi_last_bb (bb);
0f1961a2 2002
bfb0b886
RG
2003 /* We can end up with init statements that store to a non-register
2004 from a rhs with a conversion. Handle that here by forcing the
2005 rhs into a temporary. gimple_regimplify_operands is not
2006 prepared to do this for us. */
2007 if (!is_gimple_reg (gimple_assign_lhs (init_stmt))
2008 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2009 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2010 {
2011 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2012 gimple_expr_type (init_stmt),
2013 gimple_assign_rhs1 (init_stmt));
2014 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2015 GSI_NEW_STMT);
2016 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2017 gimple_assign_set_rhs1 (init_stmt, rhs);
2018 }
c2a4718a
JJ
2019 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2020 gimple_regimplify_operands (init_stmt, &si);
2021 mark_symbols_for_renaming (init_stmt);
2022 }
0f1961a2
JH
2023}
2024
2025/* Initialize parameter P with VALUE. If needed, produce init statement
2026 at the end of BB. When BB is NULL, we return init statement to be
2027 output later. */
2028static gimple
1b369fae 2029setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
e21aff8a 2030 basic_block bb, tree *vars)
6de9cd9a 2031{
0f1961a2 2032 gimple init_stmt = NULL;
6de9cd9a 2033 tree var;
f4088621 2034 tree rhs = value;
110cfe1c
JH
2035 tree def = (gimple_in_ssa_p (cfun)
2036 ? gimple_default_def (id->src_cfun, p) : NULL);
6de9cd9a 2037
f4088621
RG
2038 if (value
2039 && value != error_mark_node
2040 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
c54e3854
RG
2041 {
2042 if (fold_convertible_p (TREE_TYPE (p), value))
2043 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2044 else
2045 /* ??? For valid (GIMPLE) programs we should not end up here.
2046 Still if something has gone wrong and we end up with truly
2047 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2048 to not leak invalid GIMPLE to the following passes. */
2049 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2050 }
f4088621 2051
110cfe1c
JH
2052 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2053 we may not need to create a new variable here at all. Instead, we may
2054 be able to just use the argument value. */
6de9cd9a
DN
2055 if (TREE_READONLY (p)
2056 && !TREE_ADDRESSABLE (p)
110cfe1c
JH
2057 && value && !TREE_SIDE_EFFECTS (value)
2058 && !def)
6de9cd9a 2059 {
84936f6f
RH
2060 /* We may produce non-gimple trees by adding NOPs or introduce
2061 invalid sharing when operand is not really constant.
2062 It is not big deal to prohibit constant propagation here as
2063 we will constant propagate in DOM1 pass anyway. */
2064 if (is_gimple_min_invariant (value)
f4088621
RG
2065 && useless_type_conversion_p (TREE_TYPE (p),
2066 TREE_TYPE (value))
04482133
AO
2067 /* We have to be very careful about ADDR_EXPR. Make sure
2068 the base variable isn't a local variable of the inlined
2069 function, e.g., when doing recursive inlining, direct or
2070 mutually-recursive or whatever, which is why we don't
2071 just test whether fn == current_function_decl. */
2072 && ! self_inlining_addr_expr (value, fn))
6de9cd9a 2073 {
6de9cd9a 2074 insert_decl_map (id, p, value);
0f1961a2 2075 return NULL;
6de9cd9a
DN
2076 }
2077 }
2078
5377d5ba
RK
2079 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2080 here since the type of this decl must be visible to the calling
8c27b7d4 2081 function. */
1b369fae 2082 var = copy_decl_to_var (p, id);
110cfe1c
JH
2083 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2084 {
2085 get_var_ann (var);
2086 add_referenced_var (var);
2087 }
e21aff8a 2088
6de9cd9a
DN
2089 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2090 that way, when the PARM_DECL is encountered, it will be
2091 automatically replaced by the VAR_DECL. */
7c7d3047 2092 insert_decl_map (id, p, var);
6de9cd9a
DN
2093
2094 /* Declare this new variable. */
2095 TREE_CHAIN (var) = *vars;
2096 *vars = var;
2097
2098 /* Make gimplifier happy about this variable. */
84936f6f 2099 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
6de9cd9a
DN
2100
2101 /* Even if P was TREE_READONLY, the new VAR should not be.
2102 In the original code, we would have constructed a
2103 temporary, and then the function body would have never
2104 changed the value of P. However, now, we will be
2105 constructing VAR directly. The constructor body may
2106 change its value multiple times as it is being
2107 constructed. Therefore, it must not be TREE_READONLY;
2108 the back-end assumes that TREE_READONLY variable is
2109 assigned to only once. */
2110 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2111 TREE_READONLY (var) = 0;
2112
110cfe1c
JH
2113 /* If there is no setup required and we are in SSA, take the easy route
2114 replacing all SSA names representing the function parameter by the
2115 SSA name passed to function.
2116
2117 We need to construct map for the variable anyway as it might be used
2118 in different SSA names when parameter is set in function.
2119
8454d27e
JH
2120 Do replacement at -O0 for const arguments replaced by constant.
2121 This is important for builtin_constant_p and other construct requiring
2122 constant argument to be visible in inlined function body.
2123
110cfe1c
JH
2124 FIXME: This usually kills the last connection in between inlined
2125 function parameter and the actual value in debug info. Can we do
2126 better here? If we just inserted the statement, copy propagation
2127 would kill it anyway as it always did in older versions of GCC.
2128
2129 We might want to introduce a notion that single SSA_NAME might
2130 represent multiple variables for purposes of debugging. */
2131 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
8454d27e
JH
2132 && (optimize
2133 || (TREE_READONLY (p)
2134 && is_gimple_min_invariant (rhs)))
110cfe1c 2135 && (TREE_CODE (rhs) == SSA_NAME
9b718f81
JH
2136 || is_gimple_min_invariant (rhs))
2137 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
110cfe1c
JH
2138 {
2139 insert_decl_map (id, def, rhs);
0f1961a2 2140 return NULL;
110cfe1c
JH
2141 }
2142
f6f2da7d
JH
2143 /* If the value of argument is never used, don't care about initializing
2144 it. */
1cf5abb3 2145 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
f6f2da7d
JH
2146 {
2147 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
0f1961a2 2148 return NULL;
f6f2da7d
JH
2149 }
2150
6de9cd9a
DN
2151 /* Initialize this VAR_DECL from the equivalent argument. Convert
2152 the argument to the proper type in case it was promoted. */
2153 if (value)
2154 {
6de9cd9a 2155 if (rhs == error_mark_node)
110cfe1c 2156 {
7c7d3047 2157 insert_decl_map (id, p, var);
0f1961a2 2158 return NULL;
110cfe1c 2159 }
afe08db5 2160
73dab33b 2161 STRIP_USELESS_TYPE_CONVERSION (rhs);
6de9cd9a 2162
726a989a 2163 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
6de9cd9a 2164 keep our trees in gimple form. */
110cfe1c
JH
2165 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2166 {
2167 def = remap_ssa_name (def, id);
726a989a 2168 init_stmt = gimple_build_assign (def, rhs);
110cfe1c
JH
2169 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2170 set_default_def (var, NULL);
2171 }
2172 else
726a989a 2173 init_stmt = gimple_build_assign (var, rhs);
6de9cd9a 2174
0f1961a2
JH
2175 if (bb && init_stmt)
2176 insert_init_stmt (bb, init_stmt);
6de9cd9a 2177 }
0f1961a2 2178 return init_stmt;
6de9cd9a
DN
2179}
2180
d4e4baa9 2181/* Generate code to initialize the parameters of the function at the
726a989a 2182 top of the stack in ID from the GIMPLE_CALL STMT. */
d4e4baa9 2183
e21aff8a 2184static void
726a989a 2185initialize_inlined_parameters (copy_body_data *id, gimple stmt,
e21aff8a 2186 tree fn, basic_block bb)
d4e4baa9 2187{
d4e4baa9 2188 tree parms;
726a989a 2189 size_t i;
d4e4baa9 2190 tree p;
d436bff8 2191 tree vars = NULL_TREE;
726a989a 2192 tree static_chain = gimple_call_chain (stmt);
d4e4baa9
AO
2193
2194 /* Figure out what the parameters are. */
18c6ada9 2195 parms = DECL_ARGUMENTS (fn);
d4e4baa9 2196
d4e4baa9
AO
2197 /* Loop through the parameter declarations, replacing each with an
2198 equivalent VAR_DECL, appropriately initialized. */
726a989a
RB
2199 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2200 {
2201 tree val;
2202 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2203 setup_one_parameter (id, p, val, fn, bb, &vars);
2204 }
4838c5ee 2205
6de9cd9a
DN
2206 /* Initialize the static chain. */
2207 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
ea99e0be 2208 gcc_assert (fn != current_function_decl);
6de9cd9a
DN
2209 if (p)
2210 {
2211 /* No static chain? Seems like a bug in tree-nested.c. */
1e128c5f 2212 gcc_assert (static_chain);
4838c5ee 2213
e21aff8a 2214 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
4838c5ee
AO
2215 }
2216
e21aff8a 2217 declare_inline_vars (id->block, vars);
d4e4baa9
AO
2218}
2219
726a989a 2220
e21aff8a
SB
2221/* Declare a return variable to replace the RESULT_DECL for the
2222 function we are calling. An appropriate DECL_STMT is returned.
2223 The USE_STMT is filled to contain a use of the declaration to
2224 indicate the return value of the function.
2225
110cfe1c
JH
2226 RETURN_SLOT, if non-null is place where to store the result. It
2227 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
726a989a 2228 was the LHS of the MODIFY_EXPR to which this call is the RHS.
7740f00d
RH
2229
2230 The return value is a (possibly null) value that is the result of the
2231 function as seen by the callee. *USE_P is a (possibly null) value that
2232 holds the result as seen by the caller. */
d4e4baa9 2233
d436bff8 2234static tree
110cfe1c
JH
2235declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2236 tree *use_p)
d4e4baa9 2237{
1b369fae
RH
2238 tree callee = id->src_fn;
2239 tree caller = id->dst_fn;
7740f00d
RH
2240 tree result = DECL_RESULT (callee);
2241 tree callee_type = TREE_TYPE (result);
2242 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
2243 tree var, use;
d4e4baa9
AO
2244
2245 /* We don't need to do anything for functions that don't return
2246 anything. */
7740f00d 2247 if (!result || VOID_TYPE_P (callee_type))
d4e4baa9 2248 {
6de9cd9a 2249 *use_p = NULL_TREE;
d4e4baa9
AO
2250 return NULL_TREE;
2251 }
2252
cc77ae10 2253 /* If there was a return slot, then the return value is the
7740f00d 2254 dereferenced address of that object. */
110cfe1c 2255 if (return_slot)
7740f00d 2256 {
110cfe1c 2257 /* The front end shouldn't have used both return_slot and
7740f00d 2258 a modify expression. */
1e128c5f 2259 gcc_assert (!modify_dest);
cc77ae10 2260 if (DECL_BY_REFERENCE (result))
110cfe1c
JH
2261 {
2262 tree return_slot_addr = build_fold_addr_expr (return_slot);
2263 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2264
2265 /* We are going to construct *&return_slot and we can't do that
2266 for variables believed to be not addressable.
2267
2268 FIXME: This check possibly can match, because values returned
2269 via return slot optimization are not believed to have address
2270 taken by alias analysis. */
2271 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2272 if (gimple_in_ssa_p (cfun))
2273 {
2274 HOST_WIDE_INT bitsize;
2275 HOST_WIDE_INT bitpos;
2276 tree offset;
2277 enum machine_mode mode;
2278 int unsignedp;
2279 int volatilep;
2280 tree base;
2281 base = get_inner_reference (return_slot, &bitsize, &bitpos,
2282 &offset,
2283 &mode, &unsignedp, &volatilep,
2284 false);
2285 if (TREE_CODE (base) == INDIRECT_REF)
2286 base = TREE_OPERAND (base, 0);
2287 if (TREE_CODE (base) == SSA_NAME)
2288 base = SSA_NAME_VAR (base);
2289 mark_sym_for_renaming (base);
2290 }
2291 var = return_slot_addr;
2292 }
cc77ae10 2293 else
110cfe1c
JH
2294 {
2295 var = return_slot;
2296 gcc_assert (TREE_CODE (var) != SSA_NAME);
b5ca517c 2297 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
110cfe1c 2298 }
0890b981
AP
2299 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2300 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2301 && !DECL_GIMPLE_REG_P (result)
22918034 2302 && DECL_P (var))
0890b981 2303 DECL_GIMPLE_REG_P (var) = 0;
7740f00d
RH
2304 use = NULL;
2305 goto done;
2306 }
2307
2308 /* All types requiring non-trivial constructors should have been handled. */
1e128c5f 2309 gcc_assert (!TREE_ADDRESSABLE (callee_type));
7740f00d
RH
2310
2311 /* Attempt to avoid creating a new temporary variable. */
110cfe1c
JH
2312 if (modify_dest
2313 && TREE_CODE (modify_dest) != SSA_NAME)
7740f00d
RH
2314 {
2315 bool use_it = false;
2316
2317 /* We can't use MODIFY_DEST if there's type promotion involved. */
f4088621 2318 if (!useless_type_conversion_p (callee_type, caller_type))
7740f00d
RH
2319 use_it = false;
2320
2321 /* ??? If we're assigning to a variable sized type, then we must
2322 reuse the destination variable, because we've no good way to
2323 create variable sized temporaries at this point. */
2324 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2325 use_it = true;
2326
2327 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2328 reuse it as the result of the call directly. Don't do this if
2329 it would promote MODIFY_DEST to addressable. */
e2f9fe42
RH
2330 else if (TREE_ADDRESSABLE (result))
2331 use_it = false;
2332 else
2333 {
2334 tree base_m = get_base_address (modify_dest);
2335
2336 /* If the base isn't a decl, then it's a pointer, and we don't
2337 know where that's going to go. */
2338 if (!DECL_P (base_m))
2339 use_it = false;
2340 else if (is_global_var (base_m))
2341 use_it = false;
0890b981
AP
2342 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2343 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2344 && !DECL_GIMPLE_REG_P (result)
2345 && DECL_GIMPLE_REG_P (base_m))
1d327c16 2346 use_it = false;
e2f9fe42
RH
2347 else if (!TREE_ADDRESSABLE (base_m))
2348 use_it = true;
2349 }
7740f00d
RH
2350
2351 if (use_it)
2352 {
2353 var = modify_dest;
2354 use = NULL;
2355 goto done;
2356 }
2357 }
2358
1e128c5f 2359 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
7740f00d 2360
c08cd4c1 2361 var = copy_result_decl_to_var (result, id);
110cfe1c
JH
2362 if (gimple_in_ssa_p (cfun))
2363 {
2364 get_var_ann (var);
2365 add_referenced_var (var);
2366 }
e21aff8a 2367
7740f00d 2368 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
cb91fab0 2369 DECL_STRUCT_FUNCTION (caller)->local_decls
7740f00d 2370 = tree_cons (NULL_TREE, var,
cb91fab0 2371 DECL_STRUCT_FUNCTION (caller)->local_decls);
7740f00d 2372
6de9cd9a 2373 /* Do not have the rest of GCC warn about this variable as it should
471854f8 2374 not be visible to the user. */
6de9cd9a 2375 TREE_NO_WARNING (var) = 1;
d4e4baa9 2376
c08cd4c1
JM
2377 declare_inline_vars (id->block, var);
2378
7740f00d
RH
2379 /* Build the use expr. If the return type of the function was
2380 promoted, convert it back to the expected type. */
2381 use = var;
f4088621 2382 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
7740f00d 2383 use = fold_convert (caller_type, var);
73dab33b
AP
2384
2385 STRIP_USELESS_TYPE_CONVERSION (use);
7740f00d 2386
c08cd4c1 2387 if (DECL_BY_REFERENCE (result))
32848948
RG
2388 {
2389 TREE_ADDRESSABLE (var) = 1;
2390 var = build_fold_addr_expr (var);
2391 }
c08cd4c1 2392
7740f00d 2393 done:
d4e4baa9
AO
2394 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2395 way, when the RESULT_DECL is encountered, it will be
2396 automatically replaced by the VAR_DECL. */
5e20bdd7 2397 insert_decl_map (id, result, var);
d4e4baa9 2398
6de9cd9a
DN
2399 /* Remember this so we can ignore it in remap_decls. */
2400 id->retvar = var;
2401
7740f00d
RH
2402 *use_p = use;
2403 return var;
d4e4baa9
AO
2404}
2405
0e9e1e0a 2406/* Returns nonzero if a function can be inlined as a tree. */
4838c5ee 2407
b3c3af2f
SB
2408bool
2409tree_inlinable_function_p (tree fn)
4838c5ee 2410{
658344f2 2411 return inlinable_function_p (fn);
726a989a
RB
2412}
2413
2414static const char *inline_forbidden_reason;
2415
2416/* A callback for walk_gimple_seq to handle tree operands. Returns
2417 NULL_TREE if a function can be inlined, otherwise sets the reason
2418 why not and returns a tree representing the offending operand. */
2419
2420static tree
2421inline_forbidden_p_op (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
2422 void *fnp ATTRIBUTE_UNUSED)
2423{
2424 tree node = *nodep;
2425 tree t;
2426
2427 if (TREE_CODE (node) == RECORD_TYPE || TREE_CODE (node) == UNION_TYPE)
2428 {
2429 /* We cannot inline a function of the form
2430
2431 void F (int i) { struct S { int ar[i]; } s; }
2432
2433 Attempting to do so produces a catch-22.
2434 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
2435 UNION_TYPE nodes, then it goes into infinite recursion on a
2436 structure containing a pointer to its own type. If it doesn't,
2437 then the type node for S doesn't get adjusted properly when
2438 F is inlined.
2439
2440 ??? This is likely no longer true, but it's too late in the 4.0
2441 cycle to try to find out. This should be checked for 4.1. */
2442 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
2443 if (variably_modified_type_p (TREE_TYPE (t), NULL))
2444 {
2445 inline_forbidden_reason
2446 = G_("function %q+F can never be inlined "
2447 "because it uses variable sized variables");
2448 return node;
2449 }
2450 }
2451
2452 return NULL_TREE;
4838c5ee
AO
2453}
2454
726a989a
RB
2455
2456/* A callback for walk_gimple_seq to handle statements. Returns
2457 non-NULL iff a function can not be inlined. Also sets the reason
2458 why. */
c986baf6 2459
c986baf6 2460static tree
726a989a
RB
2461inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2462 struct walk_stmt_info *wip)
c986baf6 2463{
726a989a 2464 tree fn = (tree) wip->info;
f08545a8 2465 tree t;
726a989a 2466 gimple stmt = gsi_stmt (*gsi);
c986baf6 2467
726a989a 2468 switch (gimple_code (stmt))
f08545a8 2469 {
726a989a 2470 case GIMPLE_CALL:
3197c4fd
AS
2471 /* Refuse to inline alloca call unless user explicitly forced so as
2472 this may change program's memory overhead drastically when the
2473 function using alloca is called in loop. In GCC present in
2474 SPEC2000 inlining into schedule_block cause it to require 2GB of
2475 RAM instead of 256MB. */
726a989a 2476 if (gimple_alloca_call_p (stmt)
f08545a8
JH
2477 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2478 {
ddd2d57e 2479 inline_forbidden_reason
dee15844 2480 = G_("function %q+F can never be inlined because it uses "
ddd2d57e 2481 "alloca (override using the always_inline attribute)");
726a989a
RB
2482 *handled_ops_p = true;
2483 return fn;
f08545a8 2484 }
726a989a
RB
2485
2486 t = gimple_call_fndecl (stmt);
2487 if (t == NULL_TREE)
f08545a8 2488 break;
84f5e1b1 2489
f08545a8
JH
2490 /* We cannot inline functions that call setjmp. */
2491 if (setjmp_call_p (t))
2492 {
ddd2d57e 2493 inline_forbidden_reason
dee15844 2494 = G_("function %q+F can never be inlined because it uses setjmp");
726a989a
RB
2495 *handled_ops_p = true;
2496 return t;
f08545a8
JH
2497 }
2498
6de9cd9a 2499 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3197c4fd 2500 switch (DECL_FUNCTION_CODE (t))
f08545a8 2501 {
3197c4fd
AS
2502 /* We cannot inline functions that take a variable number of
2503 arguments. */
2504 case BUILT_IN_VA_START:
3197c4fd
AS
2505 case BUILT_IN_NEXT_ARG:
2506 case BUILT_IN_VA_END:
6de9cd9a 2507 inline_forbidden_reason
dee15844 2508 = G_("function %q+F can never be inlined because it "
6de9cd9a 2509 "uses variable argument lists");
726a989a
RB
2510 *handled_ops_p = true;
2511 return t;
6de9cd9a 2512
3197c4fd 2513 case BUILT_IN_LONGJMP:
6de9cd9a
DN
2514 /* We can't inline functions that call __builtin_longjmp at
2515 all. The non-local goto machinery really requires the
2516 destination be in a different function. If we allow the
2517 function calling __builtin_longjmp to be inlined into the
2518 function calling __builtin_setjmp, Things will Go Awry. */
2519 inline_forbidden_reason
dee15844 2520 = G_("function %q+F can never be inlined because "
6de9cd9a 2521 "it uses setjmp-longjmp exception handling");
726a989a
RB
2522 *handled_ops_p = true;
2523 return t;
6de9cd9a
DN
2524
2525 case BUILT_IN_NONLOCAL_GOTO:
2526 /* Similarly. */
2527 inline_forbidden_reason
dee15844 2528 = G_("function %q+F can never be inlined because "
6de9cd9a 2529 "it uses non-local goto");
726a989a
RB
2530 *handled_ops_p = true;
2531 return t;
f08545a8 2532
4b284111
JJ
2533 case BUILT_IN_RETURN:
2534 case BUILT_IN_APPLY_ARGS:
2535 /* If a __builtin_apply_args caller would be inlined,
2536 it would be saving arguments of the function it has
2537 been inlined into. Similarly __builtin_return would
2538 return from the function the inline has been inlined into. */
2539 inline_forbidden_reason
dee15844 2540 = G_("function %q+F can never be inlined because "
4b284111 2541 "it uses __builtin_return or __builtin_apply_args");
726a989a
RB
2542 *handled_ops_p = true;
2543 return t;
4b284111 2544
3197c4fd
AS
2545 default:
2546 break;
2547 }
f08545a8
JH
2548 break;
2549
726a989a
RB
2550 case GIMPLE_GOTO:
2551 t = gimple_goto_dest (stmt);
f08545a8
JH
2552
2553 /* We will not inline a function which uses computed goto. The
2554 addresses of its local labels, which may be tucked into
2555 global storage, are of course not constant across
2556 instantiations, which causes unexpected behavior. */
2557 if (TREE_CODE (t) != LABEL_DECL)
2558 {
ddd2d57e 2559 inline_forbidden_reason
dee15844 2560 = G_("function %q+F can never be inlined "
ddd2d57e 2561 "because it contains a computed goto");
726a989a
RB
2562 *handled_ops_p = true;
2563 return t;
f08545a8 2564 }
6de9cd9a 2565 break;
f08545a8 2566
726a989a
RB
2567 case GIMPLE_LABEL:
2568 t = gimple_label_label (stmt);
6de9cd9a 2569 if (DECL_NONLOCAL (t))
f08545a8 2570 {
6de9cd9a
DN
2571 /* We cannot inline a function that receives a non-local goto
2572 because we cannot remap the destination label used in the
2573 function that is performing the non-local goto. */
ddd2d57e 2574 inline_forbidden_reason
dee15844 2575 = G_("function %q+F can never be inlined "
6de9cd9a 2576 "because it receives a non-local goto");
726a989a
RB
2577 *handled_ops_p = true;
2578 return t;
f08545a8 2579 }
f08545a8
JH
2580 break;
2581
f08545a8
JH
2582 default:
2583 break;
2584 }
2585
726a989a 2586 *handled_ops_p = false;
f08545a8 2587 return NULL_TREE;
84f5e1b1
RH
2588}
2589
726a989a 2590
2092ee7d
JJ
2591static tree
2592inline_forbidden_p_2 (tree *nodep, int *walk_subtrees,
2593 void *fnp)
2594{
2595 tree node = *nodep;
2596 tree fn = (tree) fnp;
2597
2598 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2599 {
2600 inline_forbidden_reason
2601 = G_("function %q+F can never be inlined "
2602 "because it saves address of local label in a static variable");
2603 return node;
2604 }
2605
2606 if (TYPE_P (node))
2607 *walk_subtrees = 0;
2608
2609 return NULL_TREE;
2610}
2611
726a989a
RB
2612/* Return true if FNDECL is a function that cannot be inlined into
2613 another one. */
2614
2615static bool
f08545a8 2616inline_forbidden_p (tree fndecl)
84f5e1b1 2617{
070588f0 2618 location_t saved_loc = input_location;
2092ee7d
JJ
2619 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
2620 tree step;
726a989a
RB
2621 struct walk_stmt_info wi;
2622 struct pointer_set_t *visited_nodes;
2623 basic_block bb;
2624 bool forbidden_p = false;
2625
2626 visited_nodes = pointer_set_create ();
2627 memset (&wi, 0, sizeof (wi));
2628 wi.info = (void *) fndecl;
2629 wi.pset = visited_nodes;
e21aff8a 2630
2092ee7d 2631 FOR_EACH_BB_FN (bb, fun)
726a989a
RB
2632 {
2633 gimple ret;
2634 gimple_seq seq = bb_seq (bb);
2635 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt,
2636 inline_forbidden_p_op, &wi);
2637 forbidden_p = (ret != NULL);
2638 if (forbidden_p)
2639 goto egress;
2640 }
ed397c43 2641
cb91fab0 2642 for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2092ee7d
JJ
2643 {
2644 tree decl = TREE_VALUE (step);
2645 if (TREE_CODE (decl) == VAR_DECL
2646 && TREE_STATIC (decl)
2647 && !DECL_EXTERNAL (decl)
2648 && DECL_INITIAL (decl))
726a989a
RB
2649 {
2650 tree ret;
2651 ret = walk_tree_without_duplicates (&DECL_INITIAL (decl),
2652 inline_forbidden_p_2, fndecl);
2653 forbidden_p = (ret != NULL);
2654 if (forbidden_p)
2655 goto egress;
2656 }
2092ee7d
JJ
2657 }
2658
e21aff8a 2659egress:
726a989a 2660 pointer_set_destroy (visited_nodes);
070588f0 2661 input_location = saved_loc;
726a989a 2662 return forbidden_p;
84f5e1b1
RH
2663}
2664
b3c3af2f
SB
2665/* Returns nonzero if FN is a function that does not have any
2666 fundamental inline blocking properties. */
d4e4baa9 2667
b3c3af2f
SB
2668static bool
2669inlinable_function_p (tree fn)
d4e4baa9 2670{
b3c3af2f 2671 bool inlinable = true;
18177c7e
RG
2672 bool do_warning;
2673 tree always_inline;
d4e4baa9
AO
2674
2675 /* If we've already decided this function shouldn't be inlined,
2676 there's no need to check again. */
2677 if (DECL_UNINLINABLE (fn))
b3c3af2f 2678 return false;
d4e4baa9 2679
18177c7e
RG
2680 /* We only warn for functions declared `inline' by the user. */
2681 do_warning = (warn_inline
18177c7e 2682 && DECL_DECLARED_INLINE_P (fn)
0494626a 2683 && !DECL_NO_INLINE_WARNING_P (fn)
18177c7e
RG
2684 && !DECL_IN_SYSTEM_HEADER (fn));
2685
2686 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
2687
e90acd93 2688 if (flag_no_inline
18177c7e
RG
2689 && always_inline == NULL)
2690 {
2691 if (do_warning)
2692 warning (OPT_Winline, "function %q+F can never be inlined because it "
2693 "is suppressed using -fno-inline", fn);
2694 inlinable = false;
2695 }
2696
2697 /* Don't auto-inline anything that might not be bound within
2698 this unit of translation. */
2699 else if (!DECL_DECLARED_INLINE_P (fn)
2700 && DECL_REPLACEABLE_P (fn))
2701 inlinable = false;
2702
2703 else if (!function_attribute_inlinable_p (fn))
2704 {
2705 if (do_warning)
2706 warning (OPT_Winline, "function %q+F can never be inlined because it "
2707 "uses attributes conflicting with inlining", fn);
2708 inlinable = false;
2709 }
46c5ad27 2710
f08545a8 2711 else if (inline_forbidden_p (fn))
b3c3af2f
SB
2712 {
2713 /* See if we should warn about uninlinable functions. Previously,
2714 some of these warnings would be issued while trying to expand
2715 the function inline, but that would cause multiple warnings
2716 about functions that would for example call alloca. But since
2717 this a property of the function, just one warning is enough.
2718 As a bonus we can now give more details about the reason why a
18177c7e
RG
2719 function is not inlinable. */
2720 if (always_inline)
dee15844 2721 sorry (inline_forbidden_reason, fn);
2d327012 2722 else if (do_warning)
d2fcbf6f 2723 warning (OPT_Winline, inline_forbidden_reason, fn);
b3c3af2f
SB
2724
2725 inlinable = false;
2726 }
d4e4baa9
AO
2727
2728 /* Squirrel away the result so that we don't have to check again. */
b3c3af2f 2729 DECL_UNINLINABLE (fn) = !inlinable;
d4e4baa9 2730
b3c3af2f
SB
2731 return inlinable;
2732}
2733
e5c4f28a
RG
2734/* Estimate the cost of a memory move. Use machine dependent
2735 word size and take possible memcpy call into account. */
2736
2737int
2738estimate_move_cost (tree type)
2739{
2740 HOST_WIDE_INT size;
2741
2742 size = int_size_in_bytes (type);
2743
e04ad03d 2744 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
e5c4f28a
RG
2745 /* Cost of a memcpy call, 3 arguments and the call. */
2746 return 4;
2747 else
2748 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
2749}
2750
726a989a 2751/* Returns cost of operation CODE, according to WEIGHTS */
7f9bc51b 2752
726a989a
RB
2753static int
2754estimate_operator_cost (enum tree_code code, eni_weights *weights)
6de9cd9a 2755{
726a989a 2756 switch (code)
6de9cd9a 2757 {
726a989a
RB
2758 /* These are "free" conversions, or their presumed cost
2759 is folded into other operations. */
61fcaeec 2760 case RANGE_EXPR:
1a87cf0c 2761 CASE_CONVERT:
726a989a
RB
2762 case COMPLEX_EXPR:
2763 case PAREN_EXPR:
726a989a 2764 return 0;
6de9cd9a 2765
e5c4f28a
RG
2766 /* Assign cost of 1 to usual operations.
2767 ??? We may consider mapping RTL costs to this. */
6de9cd9a 2768 case COND_EXPR:
4151978d 2769 case VEC_COND_EXPR:
6de9cd9a
DN
2770
2771 case PLUS_EXPR:
5be014d5 2772 case POINTER_PLUS_EXPR:
6de9cd9a
DN
2773 case MINUS_EXPR:
2774 case MULT_EXPR:
2775
325217ed 2776 case FIXED_CONVERT_EXPR:
6de9cd9a 2777 case FIX_TRUNC_EXPR:
6de9cd9a
DN
2778
2779 case NEGATE_EXPR:
2780 case FLOAT_EXPR:
2781 case MIN_EXPR:
2782 case MAX_EXPR:
2783 case ABS_EXPR:
2784
2785 case LSHIFT_EXPR:
2786 case RSHIFT_EXPR:
2787 case LROTATE_EXPR:
2788 case RROTATE_EXPR:
a6b46ba2
DN
2789 case VEC_LSHIFT_EXPR:
2790 case VEC_RSHIFT_EXPR:
6de9cd9a
DN
2791
2792 case BIT_IOR_EXPR:
2793 case BIT_XOR_EXPR:
2794 case BIT_AND_EXPR:
2795 case BIT_NOT_EXPR:
2796
2797 case TRUTH_ANDIF_EXPR:
2798 case TRUTH_ORIF_EXPR:
2799 case TRUTH_AND_EXPR:
2800 case TRUTH_OR_EXPR:
2801 case TRUTH_XOR_EXPR:
2802 case TRUTH_NOT_EXPR:
2803
2804 case LT_EXPR:
2805 case LE_EXPR:
2806 case GT_EXPR:
2807 case GE_EXPR:
2808 case EQ_EXPR:
2809 case NE_EXPR:
2810 case ORDERED_EXPR:
2811 case UNORDERED_EXPR:
2812
2813 case UNLT_EXPR:
2814 case UNLE_EXPR:
2815 case UNGT_EXPR:
2816 case UNGE_EXPR:
2817 case UNEQ_EXPR:
d1a7edaf 2818 case LTGT_EXPR:
6de9cd9a 2819
6de9cd9a
DN
2820 case CONJ_EXPR:
2821
2822 case PREDECREMENT_EXPR:
2823 case PREINCREMENT_EXPR:
2824 case POSTDECREMENT_EXPR:
2825 case POSTINCREMENT_EXPR:
2826
16630a2c
DN
2827 case REALIGN_LOAD_EXPR:
2828
61d3cdbb
DN
2829 case REDUC_MAX_EXPR:
2830 case REDUC_MIN_EXPR:
2831 case REDUC_PLUS_EXPR:
20f06221 2832 case WIDEN_SUM_EXPR:
726a989a
RB
2833 case WIDEN_MULT_EXPR:
2834 case DOT_PROD_EXPR:
2835
89d67cca
DN
2836 case VEC_WIDEN_MULT_HI_EXPR:
2837 case VEC_WIDEN_MULT_LO_EXPR:
2838 case VEC_UNPACK_HI_EXPR:
2839 case VEC_UNPACK_LO_EXPR:
d9987fb4
UB
2840 case VEC_UNPACK_FLOAT_HI_EXPR:
2841 case VEC_UNPACK_FLOAT_LO_EXPR:
8115817b 2842 case VEC_PACK_TRUNC_EXPR:
89d67cca 2843 case VEC_PACK_SAT_EXPR:
d9987fb4 2844 case VEC_PACK_FIX_TRUNC_EXPR:
98b44b0e
IR
2845 case VEC_EXTRACT_EVEN_EXPR:
2846 case VEC_EXTRACT_ODD_EXPR:
2847 case VEC_INTERLEAVE_HIGH_EXPR:
2848 case VEC_INTERLEAVE_LOW_EXPR:
2849
726a989a 2850 return 1;
6de9cd9a 2851
1ea7e6ad 2852 /* Few special cases of expensive operations. This is useful
6de9cd9a
DN
2853 to avoid inlining on functions having too many of these. */
2854 case TRUNC_DIV_EXPR:
2855 case CEIL_DIV_EXPR:
2856 case FLOOR_DIV_EXPR:
2857 case ROUND_DIV_EXPR:
2858 case EXACT_DIV_EXPR:
2859 case TRUNC_MOD_EXPR:
2860 case CEIL_MOD_EXPR:
2861 case FLOOR_MOD_EXPR:
2862 case ROUND_MOD_EXPR:
2863 case RDIV_EXPR:
726a989a
RB
2864 return weights->div_mod_cost;
2865
2866 default:
2867 /* We expect a copy assignment with no operator. */
2868 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
2869 return 0;
2870 }
2871}
2872
2873
2874/* Estimate number of instructions that will be created by expanding
2875 the statements in the statement sequence STMTS.
2876 WEIGHTS contains weights attributed to various constructs. */
2877
2878static
2879int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
2880{
2881 int cost;
2882 gimple_stmt_iterator gsi;
2883
2884 cost = 0;
2885 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
2886 cost += estimate_num_insns (gsi_stmt (gsi), weights);
2887
2888 return cost;
2889}
2890
2891
2892/* Estimate number of instructions that will be created by expanding STMT.
2893 WEIGHTS contains weights attributed to various constructs. */
2894
2895int
2896estimate_num_insns (gimple stmt, eni_weights *weights)
2897{
2898 unsigned cost, i;
2899 enum gimple_code code = gimple_code (stmt);
2900 tree lhs;
2901
2902 switch (code)
2903 {
2904 case GIMPLE_ASSIGN:
2905 /* Try to estimate the cost of assignments. We have three cases to
2906 deal with:
2907 1) Simple assignments to registers;
2908 2) Stores to things that must live in memory. This includes
2909 "normal" stores to scalars, but also assignments of large
2910 structures, or constructors of big arrays;
2911
2912 Let us look at the first two cases, assuming we have "a = b + C":
2913 <GIMPLE_ASSIGN <var_decl "a">
2914 <plus_expr <var_decl "b"> <constant C>>
2915 If "a" is a GIMPLE register, the assignment to it is free on almost
2916 any target, because "a" usually ends up in a real register. Hence
2917 the only cost of this expression comes from the PLUS_EXPR, and we
2918 can ignore the GIMPLE_ASSIGN.
2919 If "a" is not a GIMPLE register, the assignment to "a" will most
2920 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
2921 of moving something into "a", which we compute using the function
2922 estimate_move_cost. */
2923 lhs = gimple_assign_lhs (stmt);
2924 if (is_gimple_reg (lhs))
2925 cost = 0;
2926 else
2927 cost = estimate_move_cost (TREE_TYPE (lhs));
2928
2929 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights);
2930 break;
2931
2932 case GIMPLE_COND:
2933 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights);
2934 break;
2935
2936 case GIMPLE_SWITCH:
2937 /* Take into account cost of the switch + guess 2 conditional jumps for
2938 each case label.
2939
2940 TODO: once the switch expansion logic is sufficiently separated, we can
2941 do better job on estimating cost of the switch. */
2942 cost = gimple_switch_num_labels (stmt) * 2;
6de9cd9a 2943 break;
726a989a
RB
2944
2945 case GIMPLE_CALL:
6de9cd9a 2946 {
726a989a
RB
2947 tree decl = gimple_call_fndecl (stmt);
2948 tree addr = gimple_call_fn (stmt);
8723e2fe
JH
2949 tree funtype = TREE_TYPE (addr);
2950
726a989a
RB
2951 if (POINTER_TYPE_P (funtype))
2952 funtype = TREE_TYPE (funtype);
6de9cd9a 2953
625a2efb 2954 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
726a989a 2955 cost = weights->target_builtin_call_cost;
625a2efb 2956 else
726a989a 2957 cost = weights->call_cost;
625a2efb 2958
8c96cd51 2959 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
6de9cd9a
DN
2960 switch (DECL_FUNCTION_CODE (decl))
2961 {
2962 case BUILT_IN_CONSTANT_P:
726a989a 2963 return 0;
6de9cd9a 2964 case BUILT_IN_EXPECT:
726a989a
RB
2965 cost = 0;
2966 break;
2967
7f9bc51b
ZD
2968 /* Prefetch instruction is not expensive. */
2969 case BUILT_IN_PREFETCH:
726a989a 2970 cost = weights->target_builtin_call_cost;
7f9bc51b 2971 break;
726a989a 2972
6de9cd9a
DN
2973 default:
2974 break;
2975 }
e5c4f28a 2976
8723e2fe
JH
2977 if (decl)
2978 funtype = TREE_TYPE (decl);
2979
726a989a
RB
2980 /* Our cost must be kept in sync with
2981 cgraph_estimate_size_after_inlining that does use function
2982 declaration to figure out the arguments. */
8723e2fe
JH
2983 if (decl && DECL_ARGUMENTS (decl))
2984 {
2985 tree arg;
2986 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
726a989a 2987 cost += estimate_move_cost (TREE_TYPE (arg));
8723e2fe
JH
2988 }
2989 else if (funtype && prototype_p (funtype))
2990 {
2991 tree t;
2992 for (t = TYPE_ARG_TYPES (funtype); t; t = TREE_CHAIN (t))
726a989a 2993 cost += estimate_move_cost (TREE_VALUE (t));
8723e2fe
JH
2994 }
2995 else
c7f599d0 2996 {
726a989a
RB
2997 for (i = 0; i < gimple_call_num_args (stmt); i++)
2998 {
2999 tree arg = gimple_call_arg (stmt, i);
3000 cost += estimate_move_cost (TREE_TYPE (arg));
3001 }
c7f599d0 3002 }
e5c4f28a 3003
6de9cd9a
DN
3004 break;
3005 }
88f4034b 3006
726a989a
RB
3007 case GIMPLE_GOTO:
3008 case GIMPLE_LABEL:
3009 case GIMPLE_NOP:
3010 case GIMPLE_PHI:
3011 case GIMPLE_RETURN:
3012 case GIMPLE_CHANGE_DYNAMIC_TYPE:
3013 case GIMPLE_PREDICT:
3014 return 0;
3015
3016 case GIMPLE_ASM:
3017 case GIMPLE_RESX:
3018 return 1;
3019
3020 case GIMPLE_BIND:
3021 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3022
3023 case GIMPLE_EH_FILTER:
3024 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3025
3026 case GIMPLE_CATCH:
3027 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3028
3029 case GIMPLE_TRY:
3030 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3031 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3032
3033 /* OpenMP directives are generally very expensive. */
3034
3035 case GIMPLE_OMP_RETURN:
3036 case GIMPLE_OMP_SECTIONS_SWITCH:
3037 case GIMPLE_OMP_ATOMIC_STORE:
3038 case GIMPLE_OMP_CONTINUE:
3039 /* ...except these, which are cheap. */
3040 return 0;
3041
3042 case GIMPLE_OMP_ATOMIC_LOAD:
3043 return weights->omp_cost;
3044
3045 case GIMPLE_OMP_FOR:
3046 return (weights->omp_cost
3047 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3048 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3049
3050 case GIMPLE_OMP_PARALLEL:
3051 case GIMPLE_OMP_TASK:
3052 case GIMPLE_OMP_CRITICAL:
3053 case GIMPLE_OMP_MASTER:
3054 case GIMPLE_OMP_ORDERED:
3055 case GIMPLE_OMP_SECTION:
3056 case GIMPLE_OMP_SECTIONS:
3057 case GIMPLE_OMP_SINGLE:
3058 return (weights->omp_cost
3059 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
88f4034b 3060
6de9cd9a 3061 default:
1e128c5f 3062 gcc_unreachable ();
6de9cd9a 3063 }
726a989a
RB
3064
3065 return cost;
6de9cd9a
DN
3066}
3067
726a989a
RB
3068/* Estimate number of instructions that will be created by expanding
3069 function FNDECL. WEIGHTS contains weights attributed to various
3070 constructs. */
aa4a53af 3071
6de9cd9a 3072int
726a989a 3073estimate_num_insns_fn (tree fndecl, eni_weights *weights)
6de9cd9a 3074{
726a989a
RB
3075 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3076 gimple_stmt_iterator bsi;
e21aff8a 3077 basic_block bb;
726a989a 3078 int n = 0;
e21aff8a 3079
726a989a
RB
3080 gcc_assert (my_function && my_function->cfg);
3081 FOR_EACH_BB_FN (bb, my_function)
e21aff8a 3082 {
726a989a
RB
3083 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3084 n += estimate_num_insns (gsi_stmt (bsi), weights);
e21aff8a 3085 }
e21aff8a 3086
726a989a 3087 return n;
7f9bc51b
ZD
3088}
3089
726a989a 3090
7f9bc51b
ZD
3091/* Initializes weights used by estimate_num_insns. */
3092
3093void
3094init_inline_once (void)
3095{
3096 eni_inlining_weights.call_cost = PARAM_VALUE (PARAM_INLINE_CALL_COST);
625a2efb 3097 eni_inlining_weights.target_builtin_call_cost = 1;
7f9bc51b 3098 eni_inlining_weights.div_mod_cost = 10;
7f9bc51b
ZD
3099 eni_inlining_weights.omp_cost = 40;
3100
3101 eni_size_weights.call_cost = 1;
625a2efb 3102 eni_size_weights.target_builtin_call_cost = 1;
7f9bc51b 3103 eni_size_weights.div_mod_cost = 1;
7f9bc51b
ZD
3104 eni_size_weights.omp_cost = 40;
3105
3106 /* Estimating time for call is difficult, since we have no idea what the
3107 called function does. In the current uses of eni_time_weights,
3108 underestimating the cost does less harm than overestimating it, so
ea2c620c 3109 we choose a rather small value here. */
7f9bc51b 3110 eni_time_weights.call_cost = 10;
625a2efb 3111 eni_time_weights.target_builtin_call_cost = 10;
7f9bc51b 3112 eni_time_weights.div_mod_cost = 10;
7f9bc51b 3113 eni_time_weights.omp_cost = 40;
6de9cd9a
DN
3114}
3115
726a989a
RB
3116/* Estimate the number of instructions in a gimple_seq. */
3117
3118int
3119count_insns_seq (gimple_seq seq, eni_weights *weights)
3120{
3121 gimple_stmt_iterator gsi;
3122 int n = 0;
3123 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3124 n += estimate_num_insns (gsi_stmt (gsi), weights);
3125
3126 return n;
3127}
3128
3129
e21aff8a 3130/* Install new lexical TREE_BLOCK underneath 'current_block'. */
726a989a 3131
e21aff8a 3132static void
4a283090 3133prepend_lexical_block (tree current_block, tree new_block)
e21aff8a 3134{
4a283090
JH
3135 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3136 BLOCK_SUBBLOCKS (current_block) = new_block;
e21aff8a 3137 BLOCK_SUPERCONTEXT (new_block) = current_block;
e21aff8a
SB
3138}
3139
3e293154
MJ
3140/* Fetch callee declaration from the call graph edge going from NODE and
3141 associated with STMR call statement. Return NULL_TREE if not found. */
3142static tree
726a989a 3143get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3e293154
MJ
3144{
3145 struct cgraph_edge *cs;
3146
3147 cs = cgraph_edge (node, stmt);
3148 if (cs)
3149 return cs->callee->decl;
3150
3151 return NULL_TREE;
3152}
3153
726a989a 3154/* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
d4e4baa9 3155
e21aff8a 3156static bool
726a989a 3157expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
d4e4baa9 3158{
1ea193c2 3159 tree retvar, use_retvar;
d436bff8 3160 tree fn;
6be42dd4 3161 struct pointer_map_t *st;
110cfe1c 3162 tree return_slot;
7740f00d 3163 tree modify_dest;
6de9cd9a 3164 location_t saved_location;
e21aff8a 3165 struct cgraph_edge *cg_edge;
61a05df1 3166 cgraph_inline_failed_t reason;
e21aff8a
SB
3167 basic_block return_block;
3168 edge e;
726a989a 3169 gimple_stmt_iterator gsi, stmt_gsi;
e21aff8a 3170 bool successfully_inlined = FALSE;
4f6c2131 3171 bool purge_dead_abnormal_edges;
e21aff8a
SB
3172 tree t_step;
3173 tree var;
d4e4baa9 3174
6de9cd9a
DN
3175 /* Set input_location here so we get the right instantiation context
3176 if we call instantiate_decl from inlinable_function_p. */
3177 saved_location = input_location;
726a989a
RB
3178 if (gimple_has_location (stmt))
3179 input_location = gimple_location (stmt);
6de9cd9a 3180
d4e4baa9 3181 /* From here on, we're only interested in CALL_EXPRs. */
726a989a 3182 if (gimple_code (stmt) != GIMPLE_CALL)
6de9cd9a 3183 goto egress;
d4e4baa9
AO
3184
3185 /* First, see if we can figure out what function is being called.
3186 If we cannot, then there is no hope of inlining the function. */
726a989a 3187 fn = gimple_call_fndecl (stmt);
d4e4baa9 3188 if (!fn)
3e293154
MJ
3189 {
3190 fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3191 if (!fn)
3192 goto egress;
3193 }
d4e4baa9 3194
b58b1157 3195 /* Turn forward declarations into real ones. */
d4d1ebc1 3196 fn = cgraph_node (fn)->decl;
b58b1157 3197
726a989a 3198 /* If FN is a declaration of a function in a nested scope that was
a1a0fd4e
AO
3199 globally declared inline, we don't set its DECL_INITIAL.
3200 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3201 C++ front-end uses it for cdtors to refer to their internal
3202 declarations, that are not real functions. Fortunately those
3203 don't have trees to be saved, so we can tell by checking their
726a989a
RB
3204 gimple_body. */
3205 if (!DECL_INITIAL (fn)
a1a0fd4e 3206 && DECL_ABSTRACT_ORIGIN (fn)
39ecc018 3207 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
a1a0fd4e
AO
3208 fn = DECL_ABSTRACT_ORIGIN (fn);
3209
18c6ada9
JH
3210 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3211 Kill this check once this is fixed. */
1b369fae 3212 if (!id->dst_node->analyzed)
6de9cd9a 3213 goto egress;
18c6ada9 3214
1b369fae 3215 cg_edge = cgraph_edge (id->dst_node, stmt);
18c6ada9
JH
3216
3217 /* Constant propagation on argument done during previous inlining
3218 may create new direct call. Produce an edge for it. */
e21aff8a 3219 if (!cg_edge)
18c6ada9
JH
3220 {
3221 struct cgraph_node *dest = cgraph_node (fn);
3222
6de9cd9a
DN
3223 /* We have missing edge in the callgraph. This can happen in one case
3224 where previous inlining turned indirect call into direct call by
3225 constant propagating arguments. In all other cases we hit a bug
3226 (incorrect node sharing is most common reason for missing edges. */
7e8b322a 3227 gcc_assert (dest->needed);
1b369fae 3228 cgraph_create_edge (id->dst_node, dest, stmt,
45a80bb9
JH
3229 bb->count, CGRAPH_FREQ_BASE,
3230 bb->loop_depth)->inline_failed
61a05df1 3231 = CIF_ORIGINALLY_INDIRECT_CALL;
45a80bb9
JH
3232 if (dump_file)
3233 {
3234 fprintf (dump_file, "Created new direct edge to %s",
3235 cgraph_node_name (dest));
3236 }
6de9cd9a 3237 goto egress;
18c6ada9
JH
3238 }
3239
d4e4baa9
AO
3240 /* Don't try to inline functions that are not well-suited to
3241 inlining. */
e21aff8a 3242 if (!cgraph_inline_p (cg_edge, &reason))
a833faa5 3243 {
3e293154
MJ
3244 /* If this call was originally indirect, we do not want to emit any
3245 inlining related warnings or sorry messages because there are no
3246 guarantees regarding those. */
3247 if (cg_edge->indirect_call)
3248 goto egress;
3249
7fac66d4
JH
3250 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3251 /* Avoid warnings during early inline pass. */
7e8b322a 3252 && cgraph_global_info_ready)
2d327012 3253 {
61a05df1
JH
3254 sorry ("inlining failed in call to %q+F: %s", fn,
3255 cgraph_inline_failed_string (reason));
2d327012
JH
3256 sorry ("called from here");
3257 }
3258 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3259 && !DECL_IN_SYSTEM_HEADER (fn)
61a05df1 3260 && reason != CIF_UNSPECIFIED
d63db217
JH
3261 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3262 /* Avoid warnings during early inline pass. */
7e8b322a 3263 && cgraph_global_info_ready)
a833faa5 3264 {
dee15844 3265 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
61a05df1 3266 fn, cgraph_inline_failed_string (reason));
3176a0c2 3267 warning (OPT_Winline, "called from here");
a833faa5 3268 }
6de9cd9a 3269 goto egress;
a833faa5 3270 }
ea99e0be 3271 fn = cg_edge->callee->decl;
d4e4baa9 3272
18c6ada9 3273#ifdef ENABLE_CHECKING
1b369fae 3274 if (cg_edge->callee->decl != id->dst_node->decl)
e21aff8a 3275 verify_cgraph_node (cg_edge->callee);
18c6ada9
JH
3276#endif
3277
e21aff8a 3278 /* We will be inlining this callee. */
e21aff8a
SB
3279 id->eh_region = lookup_stmt_eh_region (stmt);
3280
726a989a 3281 /* Split the block holding the GIMPLE_CALL. */
e21aff8a
SB
3282 e = split_block (bb, stmt);
3283 bb = e->src;
3284 return_block = e->dest;
3285 remove_edge (e);
3286
4f6c2131
EB
3287 /* split_block splits after the statement; work around this by
3288 moving the call into the second block manually. Not pretty,
3289 but seems easier than doing the CFG manipulation by hand
726a989a
RB
3290 when the GIMPLE_CALL is in the last statement of BB. */
3291 stmt_gsi = gsi_last_bb (bb);
3292 gsi_remove (&stmt_gsi, false);
4f6c2131 3293
726a989a 3294 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4f6c2131
EB
3295 been the source of abnormal edges. In this case, schedule
3296 the removal of dead abnormal edges. */
726a989a
RB
3297 gsi = gsi_start_bb (return_block);
3298 if (gsi_end_p (gsi))
e21aff8a 3299 {
726a989a 3300 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4f6c2131 3301 purge_dead_abnormal_edges = true;
e21aff8a 3302 }
4f6c2131
EB
3303 else
3304 {
726a989a 3305 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4f6c2131
EB
3306 purge_dead_abnormal_edges = false;
3307 }
3308
726a989a 3309 stmt_gsi = gsi_start_bb (return_block);
742a37d5 3310
d436bff8
AH
3311 /* Build a block containing code to initialize the arguments, the
3312 actual inline expansion of the body, and a label for the return
3313 statements within the function to jump to. The type of the
3314 statement expression is the return type of the function call. */
e21aff8a
SB
3315 id->block = make_node (BLOCK);
3316 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3e2844cb 3317 BLOCK_SOURCE_LOCATION (id->block) = input_location;
4a283090 3318 prepend_lexical_block (gimple_block (stmt), id->block);
e21aff8a 3319
d4e4baa9
AO
3320 /* Local declarations will be replaced by their equivalents in this
3321 map. */
3322 st = id->decl_map;
6be42dd4 3323 id->decl_map = pointer_map_create ();
d4e4baa9 3324
e21aff8a 3325 /* Record the function we are about to inline. */
1b369fae
RH
3326 id->src_fn = fn;
3327 id->src_node = cg_edge->callee;
110cfe1c 3328 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
726a989a 3329 id->gimple_call = stmt;
1b369fae 3330
3c8da8a5
AO
3331 gcc_assert (!id->src_cfun->after_inlining);
3332
045685a9 3333 id->entry_bb = bb;
7299cb99
JH
3334 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3335 {
3336 gimple_stmt_iterator si = gsi_last_bb (bb);
3337 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3338 NOT_TAKEN),
3339 GSI_NEW_STMT);
3340 }
726a989a 3341 initialize_inlined_parameters (id, stmt, fn, bb);
d4e4baa9 3342
ea99e0be 3343 if (DECL_INITIAL (fn))
4a283090 3344 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
acb8f212 3345
d4e4baa9
AO
3346 /* Return statements in the function body will be replaced by jumps
3347 to the RET_LABEL. */
1e128c5f
GB
3348 gcc_assert (DECL_INITIAL (fn));
3349 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
23700f65 3350
726a989a 3351 /* Find the LHS to which the result of this call is assigned. */
110cfe1c 3352 return_slot = NULL;
726a989a 3353 if (gimple_call_lhs (stmt))
81bafd36 3354 {
726a989a 3355 modify_dest = gimple_call_lhs (stmt);
81bafd36
ILT
3356
3357 /* The function which we are inlining might not return a value,
3358 in which case we should issue a warning that the function
3359 does not return a value. In that case the optimizers will
3360 see that the variable to which the value is assigned was not
3361 initialized. We do not want to issue a warning about that
3362 uninitialized variable. */
3363 if (DECL_P (modify_dest))
3364 TREE_NO_WARNING (modify_dest) = 1;
726a989a
RB
3365
3366 if (gimple_call_return_slot_opt_p (stmt))
fa47911c 3367 {
110cfe1c 3368 return_slot = modify_dest;
fa47911c
JM
3369 modify_dest = NULL;
3370 }
81bafd36 3371 }
7740f00d
RH
3372 else
3373 modify_dest = NULL;
3374
1ea193c2
ILT
3375 /* If we are inlining a call to the C++ operator new, we don't want
3376 to use type based alias analysis on the return value. Otherwise
3377 we may get confused if the compiler sees that the inlined new
3378 function returns a pointer which was just deleted. See bug
3379 33407. */
3380 if (DECL_IS_OPERATOR_NEW (fn))
3381 {
3382 return_slot = NULL;
3383 modify_dest = NULL;
3384 }
3385
d4e4baa9 3386 /* Declare the return variable for the function. */
726a989a 3387 retvar = declare_return_variable (id, return_slot, modify_dest, &use_retvar);
1ea193c2
ILT
3388
3389 if (DECL_IS_OPERATOR_NEW (fn))
3390 {
3391 gcc_assert (TREE_CODE (retvar) == VAR_DECL
3392 && POINTER_TYPE_P (TREE_TYPE (retvar)));
3393 DECL_NO_TBAA_P (retvar) = 1;
3394 }
d4e4baa9 3395
acb8f212 3396 /* Add local vars in this inlined callee to caller. */
cb91fab0 3397 t_step = id->src_cfun->local_decls;
acb8f212
JH
3398 for (; t_step; t_step = TREE_CHAIN (t_step))
3399 {
3400 var = TREE_VALUE (t_step);
3401 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
eb50f5f4 3402 {
65401a0b 3403 if (var_ann (var) && add_referenced_var (var))
eb50f5f4
JH
3404 cfun->local_decls = tree_cons (NULL_TREE, var,
3405 cfun->local_decls);
3406 }
526d73ab
JH
3407 else if (!can_be_nonlocal (var, id))
3408 cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3409 cfun->local_decls);
acb8f212
JH
3410 }
3411
eb50f5f4
JH
3412 /* This is it. Duplicate the callee body. Assume callee is
3413 pre-gimplified. Note that we must not alter the caller
3414 function in any way before this point, as this CALL_EXPR may be
3415 a self-referential call; if we're calling ourselves, we need to
3416 duplicate our body before altering anything. */
3417 copy_body (id, bb->count, bb->frequency, bb, return_block);
3418
d4e4baa9 3419 /* Clean up. */
6be42dd4 3420 pointer_map_destroy (id->decl_map);
d4e4baa9
AO
3421 id->decl_map = st;
3422
5006671f
RG
3423 /* Unlink the calls virtual operands before replacing it. */
3424 unlink_stmt_vdef (stmt);
3425
84936f6f 3426 /* If the inlined function returns a result that we care about,
726a989a
RB
3427 substitute the GIMPLE_CALL with an assignment of the return
3428 variable to the LHS of the call. That is, if STMT was
3429 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3430 if (use_retvar && gimple_call_lhs (stmt))
e21aff8a 3431 {
726a989a
RB
3432 gimple old_stmt = stmt;
3433 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3434 gsi_replace (&stmt_gsi, stmt, false);
110cfe1c 3435 if (gimple_in_ssa_p (cfun))
5006671f 3436 mark_symbols_for_renaming (stmt);
726a989a 3437 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
e21aff8a 3438 }
6de9cd9a 3439 else
110cfe1c 3440 {
726a989a
RB
3441 /* Handle the case of inlining a function with no return
3442 statement, which causes the return value to become undefined. */
3443 if (gimple_call_lhs (stmt)
3444 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
110cfe1c 3445 {
726a989a
RB
3446 tree name = gimple_call_lhs (stmt);
3447 tree var = SSA_NAME_VAR (name);
110cfe1c
JH
3448 tree def = gimple_default_def (cfun, var);
3449
110cfe1c
JH
3450 if (def)
3451 {
726a989a
RB
3452 /* If the variable is used undefined, make this name
3453 undefined via a move. */
3454 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3455 gsi_replace (&stmt_gsi, stmt, true);
110cfe1c 3456 }
110cfe1c
JH
3457 else
3458 {
726a989a
RB
3459 /* Otherwise make this variable undefined. */
3460 gsi_remove (&stmt_gsi, true);
110cfe1c 3461 set_default_def (var, name);
726a989a 3462 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
110cfe1c
JH
3463 }
3464 }
3465 else
726a989a 3466 gsi_remove (&stmt_gsi, true);
110cfe1c 3467 }
d4e4baa9 3468
4f6c2131 3469 if (purge_dead_abnormal_edges)
726a989a 3470 gimple_purge_dead_abnormal_call_edges (return_block);
84936f6f 3471
e21aff8a
SB
3472 /* If the value of the new expression is ignored, that's OK. We
3473 don't warn about this for CALL_EXPRs, so we shouldn't warn about
3474 the equivalent inlined version either. */
726a989a
RB
3475 if (is_gimple_assign (stmt))
3476 {
3477 gcc_assert (gimple_assign_single_p (stmt)
1a87cf0c 3478 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
726a989a
RB
3479 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3480 }
84936f6f 3481
1eb3331e
DB
3482 /* Output the inlining info for this abstract function, since it has been
3483 inlined. If we don't do this now, we can lose the information about the
3484 variables in the function when the blocks get blown away as soon as we
3485 remove the cgraph node. */
e21aff8a 3486 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
84936f6f 3487
e72fcfe8 3488 /* Update callgraph if needed. */
e21aff8a 3489 cgraph_remove_node (cg_edge->callee);
e72fcfe8 3490
e21aff8a 3491 id->block = NULL_TREE;
e21aff8a 3492 successfully_inlined = TRUE;
742a37d5 3493
6de9cd9a
DN
3494 egress:
3495 input_location = saved_location;
e21aff8a 3496 return successfully_inlined;
d4e4baa9 3497}
6de9cd9a 3498
e21aff8a
SB
3499/* Expand call statements reachable from STMT_P.
3500 We can only have CALL_EXPRs as the "toplevel" tree code or nested
726a989a 3501 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
e21aff8a
SB
3502 unfortunately not use that function here because we need a pointer
3503 to the CALL_EXPR, not the tree itself. */
3504
3505static bool
1b369fae 3506gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
6de9cd9a 3507{
726a989a 3508 gimple_stmt_iterator gsi;
6de9cd9a 3509
726a989a 3510 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 3511 {
726a989a 3512 gimple stmt = gsi_stmt (gsi);
e21aff8a 3513
726a989a
RB
3514 if (is_gimple_call (stmt)
3515 && expand_call_inline (bb, stmt, id))
3516 return true;
6de9cd9a 3517 }
726a989a 3518
e21aff8a 3519 return false;
6de9cd9a
DN
3520}
3521
726a989a 3522
b8a00a4d
JH
3523/* Walk all basic blocks created after FIRST and try to fold every statement
3524 in the STATEMENTS pointer set. */
726a989a 3525
b8a00a4d
JH
3526static void
3527fold_marked_statements (int first, struct pointer_set_t *statements)
3528{
726a989a 3529 for (; first < n_basic_blocks; first++)
b8a00a4d
JH
3530 if (BASIC_BLOCK (first))
3531 {
726a989a
RB
3532 gimple_stmt_iterator gsi;
3533
3534 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
3535 !gsi_end_p (gsi);
3536 gsi_next (&gsi))
3537 if (pointer_set_contains (statements, gsi_stmt (gsi)))
9477eb38 3538 {
726a989a 3539 gimple old_stmt = gsi_stmt (gsi);
2bafad93 3540
726a989a 3541 if (fold_stmt (&gsi))
9477eb38 3542 {
726a989a
RB
3543 /* Re-read the statement from GSI as fold_stmt() may
3544 have changed it. */
3545 gimple new_stmt = gsi_stmt (gsi);
3546 update_stmt (new_stmt);
3547
3548 if (is_gimple_call (old_stmt))
3549 cgraph_update_edges_for_call_stmt (old_stmt, new_stmt);
3550
3551 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
3552 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
9477eb38
JH
3553 }
3554 }
b8a00a4d
JH
3555 }
3556}
3557
1084e689
JH
3558/* Return true if BB has at least one abnormal outgoing edge. */
3559
3560static inline bool
3561has_abnormal_outgoing_edge_p (basic_block bb)
3562{
3563 edge e;
3564 edge_iterator ei;
3565
3566 FOR_EACH_EDGE (e, ei, bb->succs)
3567 if (e->flags & EDGE_ABNORMAL)
3568 return true;
3569
3570 return false;
3571}
3572
d4e4baa9
AO
3573/* Expand calls to inline functions in the body of FN. */
3574
873aa8f5 3575unsigned int
46c5ad27 3576optimize_inline_calls (tree fn)
d4e4baa9 3577{
1b369fae 3578 copy_body_data id;
d4e4baa9 3579 tree prev_fn;
e21aff8a 3580 basic_block bb;
b8a00a4d 3581 int last = n_basic_blocks;
d406b663
JJ
3582 struct gimplify_ctx gctx;
3583
c5b6f18e
MM
3584 /* There is no point in performing inlining if errors have already
3585 occurred -- and we might crash if we try to inline invalid
3586 code. */
3587 if (errorcount || sorrycount)
873aa8f5 3588 return 0;
c5b6f18e 3589
d4e4baa9
AO
3590 /* Clear out ID. */
3591 memset (&id, 0, sizeof (id));
3592
1b369fae
RH
3593 id.src_node = id.dst_node = cgraph_node (fn);
3594 id.dst_fn = fn;
d4e4baa9
AO
3595 /* Or any functions that aren't finished yet. */
3596 prev_fn = NULL_TREE;
3597 if (current_function_decl)
3598 {
1b369fae 3599 id.dst_fn = current_function_decl;
d4e4baa9
AO
3600 prev_fn = current_function_decl;
3601 }
1b369fae
RH
3602
3603 id.copy_decl = copy_decl_maybe_to_var;
3604 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3605 id.transform_new_cfg = false;
3606 id.transform_return_to_modify = true;
9ff420f1 3607 id.transform_lang_insert_block = NULL;
b8a00a4d 3608 id.statements_to_fold = pointer_set_create ();
1b369fae 3609
d406b663 3610 push_gimplify_context (&gctx);
d4e4baa9 3611
672987e8
ZD
3612 /* We make no attempts to keep dominance info up-to-date. */
3613 free_dominance_info (CDI_DOMINATORS);
3614 free_dominance_info (CDI_POST_DOMINATORS);
3615
726a989a
RB
3616 /* Register specific gimple functions. */
3617 gimple_register_cfg_hooks ();
3618
e21aff8a
SB
3619 /* Reach the trees by walking over the CFG, and note the
3620 enclosing basic-blocks in the call edges. */
3621 /* We walk the blocks going forward, because inlined function bodies
3622 will split id->current_basic_block, and the new blocks will
3623 follow it; we'll trudge through them, processing their CALL_EXPRs
3624 along the way. */
3625 FOR_EACH_BB (bb)
3626 gimple_expand_calls_inline (bb, &id);
d4e4baa9 3627
e21aff8a 3628 pop_gimplify_context (NULL);
6de9cd9a 3629
18c6ada9
JH
3630#ifdef ENABLE_CHECKING
3631 {
3632 struct cgraph_edge *e;
3633
1b369fae 3634 verify_cgraph_node (id.dst_node);
18c6ada9
JH
3635
3636 /* Double check that we inlined everything we are supposed to inline. */
1b369fae 3637 for (e = id.dst_node->callees; e; e = e->next_callee)
1e128c5f 3638 gcc_assert (e->inline_failed);
18c6ada9
JH
3639 }
3640#endif
a9eafe81
AP
3641
3642 /* Fold the statements before compacting/renumbering the basic blocks. */
3643 fold_marked_statements (last, id.statements_to_fold);
3644 pointer_set_destroy (id.statements_to_fold);
3645
3646 /* Renumber the (code) basic_blocks consecutively. */
3647 compact_blocks ();
3648 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3649 number_blocks (fn);
b8a00a4d 3650
873aa8f5 3651 fold_cond_expr_cond ();
726a989a 3652
110cfe1c
JH
3653 /* It would be nice to check SSA/CFG/statement consistency here, but it is
3654 not possible yet - the IPA passes might make various functions to not
3655 throw and they don't care to proactively update local EH info. This is
3656 done later in fixup_cfg pass that also execute the verification. */
726a989a
RB
3657 return (TODO_update_ssa
3658 | TODO_cleanup_cfg
45a80bb9
JH
3659 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
3660 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
d4e4baa9
AO
3661}
3662
d4e4baa9
AO
3663/* Passed to walk_tree. Copies the node pointed to, if appropriate. */
3664
3665tree
46c5ad27 3666copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
d4e4baa9
AO
3667{
3668 enum tree_code code = TREE_CODE (*tp);
07beea0d 3669 enum tree_code_class cl = TREE_CODE_CLASS (code);
d4e4baa9
AO
3670
3671 /* We make copies of most nodes. */
07beea0d 3672 if (IS_EXPR_CODE_CLASS (cl)
d4e4baa9
AO
3673 || code == TREE_LIST
3674 || code == TREE_VEC
8843c120
DN
3675 || code == TYPE_DECL
3676 || code == OMP_CLAUSE)
d4e4baa9
AO
3677 {
3678 /* Because the chain gets clobbered when we make a copy, we save it
3679 here. */
82d6e6fc 3680 tree chain = NULL_TREE, new_tree;
07beea0d 3681
726a989a 3682 chain = TREE_CHAIN (*tp);
d4e4baa9
AO
3683
3684 /* Copy the node. */
82d6e6fc 3685 new_tree = copy_node (*tp);
6de9cd9a
DN
3686
3687 /* Propagate mudflap marked-ness. */
3688 if (flag_mudflap && mf_marked_p (*tp))
82d6e6fc 3689 mf_mark (new_tree);
6de9cd9a 3690
82d6e6fc 3691 *tp = new_tree;
d4e4baa9
AO
3692
3693 /* Now, restore the chain, if appropriate. That will cause
3694 walk_tree to walk into the chain as well. */
50674e96
DN
3695 if (code == PARM_DECL
3696 || code == TREE_LIST
aaf46ef9 3697 || code == OMP_CLAUSE)
d4e4baa9
AO
3698 TREE_CHAIN (*tp) = chain;
3699
3700 /* For now, we don't update BLOCKs when we make copies. So, we
6de9cd9a
DN
3701 have to nullify all BIND_EXPRs. */
3702 if (TREE_CODE (*tp) == BIND_EXPR)
3703 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
d4e4baa9 3704 }
4038c495
GB
3705 else if (code == CONSTRUCTOR)
3706 {
3707 /* CONSTRUCTOR nodes need special handling because
3708 we need to duplicate the vector of elements. */
82d6e6fc 3709 tree new_tree;
4038c495 3710
82d6e6fc 3711 new_tree = copy_node (*tp);
4038c495
GB
3712
3713 /* Propagate mudflap marked-ness. */
3714 if (flag_mudflap && mf_marked_p (*tp))
82d6e6fc 3715 mf_mark (new_tree);
9f63daea 3716
82d6e6fc 3717 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4038c495 3718 CONSTRUCTOR_ELTS (*tp));
82d6e6fc 3719 *tp = new_tree;
4038c495 3720 }
6615c446 3721 else if (TREE_CODE_CLASS (code) == tcc_type)
d4e4baa9 3722 *walk_subtrees = 0;
6615c446 3723 else if (TREE_CODE_CLASS (code) == tcc_declaration)
6de9cd9a 3724 *walk_subtrees = 0;
a396f8ae
GK
3725 else if (TREE_CODE_CLASS (code) == tcc_constant)
3726 *walk_subtrees = 0;
1e128c5f
GB
3727 else
3728 gcc_assert (code != STATEMENT_LIST);
d4e4baa9
AO
3729 return NULL_TREE;
3730}
3731
3732/* The SAVE_EXPR pointed to by TP is being copied. If ST contains
aa4a53af 3733 information indicating to what new SAVE_EXPR this one should be mapped,
e21aff8a
SB
3734 use that one. Otherwise, create a new node and enter it in ST. FN is
3735 the function into which the copy will be placed. */
d4e4baa9 3736
892c7e1e 3737static void
82c82743 3738remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
d4e4baa9 3739{
6be42dd4
RG
3740 struct pointer_map_t *st = (struct pointer_map_t *) st_;
3741 tree *n;
5e20bdd7 3742 tree t;
d4e4baa9
AO
3743
3744 /* See if we already encountered this SAVE_EXPR. */
6be42dd4 3745 n = (tree *) pointer_map_contains (st, *tp);
d92b4486 3746
d4e4baa9
AO
3747 /* If we didn't already remap this SAVE_EXPR, do so now. */
3748 if (!n)
3749 {
5e20bdd7 3750 t = copy_node (*tp);
d4e4baa9 3751
d4e4baa9 3752 /* Remember this SAVE_EXPR. */
6be42dd4 3753 *pointer_map_insert (st, *tp) = t;
350ebd54 3754 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
6be42dd4 3755 *pointer_map_insert (st, t) = t;
d4e4baa9
AO
3756 }
3757 else
5e20bdd7
JZ
3758 {
3759 /* We've already walked into this SAVE_EXPR; don't do it again. */
3760 *walk_subtrees = 0;
6be42dd4 3761 t = *n;
5e20bdd7 3762 }
d4e4baa9
AO
3763
3764 /* Replace this SAVE_EXPR with the copy. */
5e20bdd7 3765 *tp = t;
d4e4baa9 3766}
d436bff8 3767
aa4a53af
RK
3768/* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
3769 copies the declaration and enters it in the splay_tree in DATA (which is
1b369fae 3770 really an `copy_body_data *'). */
6de9cd9a
DN
3771
3772static tree
3773mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
3774 void *data)
3775{
1b369fae 3776 copy_body_data *id = (copy_body_data *) data;
6de9cd9a
DN
3777
3778 /* Don't walk into types. */
350fae66
RK
3779 if (TYPE_P (*tp))
3780 *walk_subtrees = 0;
6de9cd9a 3781
350fae66 3782 else if (TREE_CODE (*tp) == LABEL_EXPR)
6de9cd9a 3783 {
350fae66 3784 tree decl = TREE_OPERAND (*tp, 0);
6de9cd9a 3785
350fae66 3786 /* Copy the decl and remember the copy. */
1b369fae 3787 insert_decl_map (id, decl, id->copy_decl (decl, id));
6de9cd9a
DN
3788 }
3789
3790 return NULL_TREE;
3791}
3792
19114537
EC
3793/* Perform any modifications to EXPR required when it is unsaved. Does
3794 not recurse into EXPR's subtrees. */
3795
3796static void
3797unsave_expr_1 (tree expr)
3798{
3799 switch (TREE_CODE (expr))
3800 {
3801 case TARGET_EXPR:
3802 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
3803 It's OK for this to happen if it was part of a subtree that
3804 isn't immediately expanded, such as operand 2 of another
3805 TARGET_EXPR. */
3806 if (TREE_OPERAND (expr, 1))
3807 break;
3808
3809 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
3810 TREE_OPERAND (expr, 3) = NULL_TREE;
3811 break;
3812
3813 default:
3814 break;
3815 }
3816}
3817
6de9cd9a
DN
3818/* Called via walk_tree when an expression is unsaved. Using the
3819 splay_tree pointed to by ST (which is really a `splay_tree'),
3820 remaps all local declarations to appropriate replacements. */
d436bff8
AH
3821
3822static tree
6de9cd9a 3823unsave_r (tree *tp, int *walk_subtrees, void *data)
d436bff8 3824{
1b369fae 3825 copy_body_data *id = (copy_body_data *) data;
6be42dd4
RG
3826 struct pointer_map_t *st = id->decl_map;
3827 tree *n;
6de9cd9a
DN
3828
3829 /* Only a local declaration (variable or label). */
3830 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
3831 || TREE_CODE (*tp) == LABEL_DECL)
3832 {
3833 /* Lookup the declaration. */
6be42dd4 3834 n = (tree *) pointer_map_contains (st, *tp);
9f63daea 3835
6de9cd9a
DN
3836 /* If it's there, remap it. */
3837 if (n)
6be42dd4 3838 *tp = *n;
6de9cd9a 3839 }
aa4a53af 3840
6de9cd9a 3841 else if (TREE_CODE (*tp) == STATEMENT_LIST)
726a989a 3842 gcc_unreachable ();
6de9cd9a
DN
3843 else if (TREE_CODE (*tp) == BIND_EXPR)
3844 copy_bind_expr (tp, walk_subtrees, id);
3845 else if (TREE_CODE (*tp) == SAVE_EXPR)
82c82743 3846 remap_save_expr (tp, st, walk_subtrees);
d436bff8 3847 else
6de9cd9a
DN
3848 {
3849 copy_tree_r (tp, walk_subtrees, NULL);
3850
3851 /* Do whatever unsaving is required. */
3852 unsave_expr_1 (*tp);
3853 }
3854
3855 /* Keep iterating. */
3856 return NULL_TREE;
d436bff8
AH
3857}
3858
19114537
EC
3859/* Copies everything in EXPR and replaces variables, labels
3860 and SAVE_EXPRs local to EXPR. */
6de9cd9a
DN
3861
3862tree
19114537 3863unsave_expr_now (tree expr)
6de9cd9a 3864{
1b369fae 3865 copy_body_data id;
6de9cd9a
DN
3866
3867 /* There's nothing to do for NULL_TREE. */
3868 if (expr == 0)
3869 return expr;
3870
3871 /* Set up ID. */
3872 memset (&id, 0, sizeof (id));
1b369fae
RH
3873 id.src_fn = current_function_decl;
3874 id.dst_fn = current_function_decl;
6be42dd4 3875 id.decl_map = pointer_map_create ();
6de9cd9a 3876
1b369fae
RH
3877 id.copy_decl = copy_decl_no_change;
3878 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3879 id.transform_new_cfg = false;
3880 id.transform_return_to_modify = false;
9ff420f1 3881 id.transform_lang_insert_block = NULL;
1b369fae 3882
6de9cd9a
DN
3883 /* Walk the tree once to find local labels. */
3884 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
3885
3886 /* Walk the tree again, copying, remapping, and unsaving. */
3887 walk_tree (&expr, unsave_r, &id, NULL);
3888
3889 /* Clean up. */
6be42dd4 3890 pointer_map_destroy (id.decl_map);
6de9cd9a
DN
3891
3892 return expr;
3893}
3894
726a989a
RB
3895/* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
3896 label, copies the declaration and enters it in the splay_tree in DATA (which
3897 is really a 'copy_body_data *'. */
3898
3899static tree
3900mark_local_labels_stmt (gimple_stmt_iterator *gsip,
3901 bool *handled_ops_p ATTRIBUTE_UNUSED,
3902 struct walk_stmt_info *wi)
3903{
3904 copy_body_data *id = (copy_body_data *) wi->info;
3905 gimple stmt = gsi_stmt (*gsip);
3906
3907 if (gimple_code (stmt) == GIMPLE_LABEL)
3908 {
3909 tree decl = gimple_label_label (stmt);
3910
3911 /* Copy the decl and remember the copy. */
3912 insert_decl_map (id, decl, id->copy_decl (decl, id));
3913 }
3914
3915 return NULL_TREE;
3916}
3917
3918
3919/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
3920 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
3921 remaps all local declarations to appropriate replacements in gimple
3922 operands. */
3923
3924static tree
3925replace_locals_op (tree *tp, int *walk_subtrees, void *data)
3926{
3927 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
3928 copy_body_data *id = (copy_body_data *) wi->info;
3929 struct pointer_map_t *st = id->decl_map;
3930 tree *n;
3931 tree expr = *tp;
3932
3933 /* Only a local declaration (variable or label). */
3934 if ((TREE_CODE (expr) == VAR_DECL
3935 && !TREE_STATIC (expr))
3936 || TREE_CODE (expr) == LABEL_DECL)
3937 {
3938 /* Lookup the declaration. */
3939 n = (tree *) pointer_map_contains (st, expr);
3940
3941 /* If it's there, remap it. */
3942 if (n)
3943 *tp = *n;
3944 *walk_subtrees = 0;
3945 }
3946 else if (TREE_CODE (expr) == STATEMENT_LIST
3947 || TREE_CODE (expr) == BIND_EXPR
3948 || TREE_CODE (expr) == SAVE_EXPR)
3949 gcc_unreachable ();
3950 else if (TREE_CODE (expr) == TARGET_EXPR)
3951 {
3952 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
3953 It's OK for this to happen if it was part of a subtree that
3954 isn't immediately expanded, such as operand 2 of another
3955 TARGET_EXPR. */
3956 if (!TREE_OPERAND (expr, 1))
3957 {
3958 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
3959 TREE_OPERAND (expr, 3) = NULL_TREE;
3960 }
3961 }
3962
3963 /* Keep iterating. */
3964 return NULL_TREE;
3965}
3966
3967
3968/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
3969 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
3970 remaps all local declarations to appropriate replacements in gimple
3971 statements. */
3972
3973static tree
3974replace_locals_stmt (gimple_stmt_iterator *gsip,
3975 bool *handled_ops_p ATTRIBUTE_UNUSED,
3976 struct walk_stmt_info *wi)
3977{
3978 copy_body_data *id = (copy_body_data *) wi->info;
3979 gimple stmt = gsi_stmt (*gsip);
3980
3981 if (gimple_code (stmt) == GIMPLE_BIND)
3982 {
3983 tree block = gimple_bind_block (stmt);
3984
3985 if (block)
3986 {
3987 remap_block (&block, id);
3988 gimple_bind_set_block (stmt, block);
3989 }
3990
3991 /* This will remap a lot of the same decls again, but this should be
3992 harmless. */
3993 if (gimple_bind_vars (stmt))
526d73ab 3994 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
726a989a
RB
3995 }
3996
3997 /* Keep iterating. */
3998 return NULL_TREE;
3999}
4000
4001
4002/* Copies everything in SEQ and replaces variables and labels local to
4003 current_function_decl. */
4004
4005gimple_seq
4006copy_gimple_seq_and_replace_locals (gimple_seq seq)
4007{
4008 copy_body_data id;
4009 struct walk_stmt_info wi;
4010 struct pointer_set_t *visited;
4011 gimple_seq copy;
4012
4013 /* There's nothing to do for NULL_TREE. */
4014 if (seq == NULL)
4015 return seq;
4016
4017 /* Set up ID. */
4018 memset (&id, 0, sizeof (id));
4019 id.src_fn = current_function_decl;
4020 id.dst_fn = current_function_decl;
4021 id.decl_map = pointer_map_create ();
4022
4023 id.copy_decl = copy_decl_no_change;
4024 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4025 id.transform_new_cfg = false;
4026 id.transform_return_to_modify = false;
4027 id.transform_lang_insert_block = NULL;
4028
4029 /* Walk the tree once to find local labels. */
4030 memset (&wi, 0, sizeof (wi));
4031 visited = pointer_set_create ();
4032 wi.info = &id;
4033 wi.pset = visited;
4034 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4035 pointer_set_destroy (visited);
4036
4037 copy = gimple_seq_copy (seq);
4038
4039 /* Walk the copy, remapping decls. */
4040 memset (&wi, 0, sizeof (wi));
4041 wi.info = &id;
4042 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4043
4044 /* Clean up. */
4045 pointer_map_destroy (id.decl_map);
4046
4047 return copy;
4048}
4049
4050
6de9cd9a 4051/* Allow someone to determine if SEARCH is a child of TOP from gdb. */
aa4a53af 4052
6de9cd9a
DN
4053static tree
4054debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4055{
4056 if (*tp == data)
4057 return (tree) data;
4058 else
4059 return NULL;
4060}
4061
6de9cd9a
DN
4062bool
4063debug_find_tree (tree top, tree search)
4064{
4065 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4066}
4067
e21aff8a 4068
6de9cd9a
DN
4069/* Declare the variables created by the inliner. Add all the variables in
4070 VARS to BIND_EXPR. */
4071
4072static void
e21aff8a 4073declare_inline_vars (tree block, tree vars)
6de9cd9a 4074{
84936f6f
RH
4075 tree t;
4076 for (t = vars; t; t = TREE_CHAIN (t))
9659ce8b
JH
4077 {
4078 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4079 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
cb91fab0 4080 cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
9659ce8b 4081 }
6de9cd9a 4082
e21aff8a
SB
4083 if (block)
4084 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4085}
4086
19734dd8 4087/* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
1b369fae
RH
4088 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4089 VAR_DECL translation. */
19734dd8 4090
1b369fae
RH
4091static tree
4092copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
19734dd8 4093{
19734dd8
RL
4094 /* Don't generate debug information for the copy if we wouldn't have
4095 generated it for the copy either. */
4096 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4097 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4098
4099 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4100 declaration inspired this copy. */
4101 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4102
4103 /* The new variable/label has no RTL, yet. */
68a976f2
RL
4104 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4105 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
19734dd8
RL
4106 SET_DECL_RTL (copy, NULL_RTX);
4107
4108 /* These args would always appear unused, if not for this. */
4109 TREE_USED (copy) = 1;
4110
4111 /* Set the context for the new declaration. */
4112 if (!DECL_CONTEXT (decl))
4113 /* Globals stay global. */
4114 ;
1b369fae 4115 else if (DECL_CONTEXT (decl) != id->src_fn)
19734dd8
RL
4116 /* Things that weren't in the scope of the function we're inlining
4117 from aren't in the scope we're inlining to, either. */
4118 ;
4119 else if (TREE_STATIC (decl))
4120 /* Function-scoped static variables should stay in the original
4121 function. */
4122 ;
4123 else
4124 /* Ordinary automatic local variables are now in the scope of the
4125 new function. */
1b369fae 4126 DECL_CONTEXT (copy) = id->dst_fn;
19734dd8
RL
4127
4128 return copy;
4129}
4130
1b369fae
RH
4131static tree
4132copy_decl_to_var (tree decl, copy_body_data *id)
4133{
4134 tree copy, type;
4135
4136 gcc_assert (TREE_CODE (decl) == PARM_DECL
4137 || TREE_CODE (decl) == RESULT_DECL);
4138
4139 type = TREE_TYPE (decl);
4140
4141 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
4142 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4143 TREE_READONLY (copy) = TREE_READONLY (decl);
4144 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
0890b981 4145 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
058dcc25 4146 DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (decl);
1b369fae
RH
4147
4148 return copy_decl_for_dup_finish (id, decl, copy);
4149}
4150
c08cd4c1
JM
4151/* Like copy_decl_to_var, but create a return slot object instead of a
4152 pointer variable for return by invisible reference. */
4153
4154static tree
4155copy_result_decl_to_var (tree decl, copy_body_data *id)
4156{
4157 tree copy, type;
4158
4159 gcc_assert (TREE_CODE (decl) == PARM_DECL
4160 || TREE_CODE (decl) == RESULT_DECL);
4161
4162 type = TREE_TYPE (decl);
4163 if (DECL_BY_REFERENCE (decl))
4164 type = TREE_TYPE (type);
4165
4166 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
4167 TREE_READONLY (copy) = TREE_READONLY (decl);
4168 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4169 if (!DECL_BY_REFERENCE (decl))
4170 {
4171 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
0890b981 4172 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
058dcc25 4173 DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (decl);
c08cd4c1
JM
4174 }
4175
4176 return copy_decl_for_dup_finish (id, decl, copy);
4177}
4178
9ff420f1 4179tree
1b369fae
RH
4180copy_decl_no_change (tree decl, copy_body_data *id)
4181{
4182 tree copy;
4183
4184 copy = copy_node (decl);
4185
4186 /* The COPY is not abstract; it will be generated in DST_FN. */
4187 DECL_ABSTRACT (copy) = 0;
4188 lang_hooks.dup_lang_specific_decl (copy);
4189
4190 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4191 been taken; it's for internal bookkeeping in expand_goto_internal. */
4192 if (TREE_CODE (copy) == LABEL_DECL)
4193 {
4194 TREE_ADDRESSABLE (copy) = 0;
4195 LABEL_DECL_UID (copy) = -1;
4196 }
4197
4198 return copy_decl_for_dup_finish (id, decl, copy);
4199}
4200
4201static tree
4202copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4203{
4204 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4205 return copy_decl_to_var (decl, id);
4206 else
4207 return copy_decl_no_change (decl, id);
4208}
4209
19734dd8
RL
4210/* Return a copy of the function's argument tree. */
4211static tree
c6f7cfc1
JH
4212copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4213 bitmap args_to_skip, tree *vars)
19734dd8 4214{
c6f7cfc1
JH
4215 tree arg, *parg;
4216 tree new_parm = NULL;
4217 int i = 0;
19734dd8 4218
c6f7cfc1
JH
4219 parg = &new_parm;
4220
4221 for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4222 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4223 {
4224 tree new_tree = remap_decl (arg, id);
4225 lang_hooks.dup_lang_specific_decl (new_tree);
4226 *parg = new_tree;
4227 parg = &TREE_CHAIN (new_tree);
4228 }
eb50f5f4 4229 else if (!pointer_map_contains (id->decl_map, arg))
c6f7cfc1
JH
4230 {
4231 /* Make an equivalent VAR_DECL. If the argument was used
4232 as temporary variable later in function, the uses will be
4233 replaced by local variable. */
4234 tree var = copy_decl_to_var (arg, id);
4235 get_var_ann (var);
4236 add_referenced_var (var);
4237 insert_decl_map (id, arg, var);
4238 /* Declare this new variable. */
4239 TREE_CHAIN (var) = *vars;
4240 *vars = var;
4241 }
4242 return new_parm;
19734dd8
RL
4243}
4244
4245/* Return a copy of the function's static chain. */
4246static tree
1b369fae 4247copy_static_chain (tree static_chain, copy_body_data * id)
19734dd8
RL
4248{
4249 tree *chain_copy, *pvar;
4250
4251 chain_copy = &static_chain;
4252 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4253 {
82d6e6fc
KG
4254 tree new_tree = remap_decl (*pvar, id);
4255 lang_hooks.dup_lang_specific_decl (new_tree);
4256 TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4257 *pvar = new_tree;
19734dd8
RL
4258 }
4259 return static_chain;
4260}
4261
4262/* Return true if the function is allowed to be versioned.
4263 This is a guard for the versioning functionality. */
4264bool
4265tree_versionable_function_p (tree fndecl)
4266{
4267 if (fndecl == NULL_TREE)
4268 return false;
4269 /* ??? There are cases where a function is
4270 uninlinable but can be versioned. */
4271 if (!tree_inlinable_function_p (fndecl))
4272 return false;
4273
4274 return true;
4275}
4276
9f5e9983
JJ
4277/* Create a new name for omp child function. Returns an identifier. */
4278
4279static GTY(()) unsigned int clone_fn_id_num;
4280
4281static tree
4282clone_function_name (tree decl)
4283{
4284 tree name = DECL_ASSEMBLER_NAME (decl);
4285 size_t len = IDENTIFIER_LENGTH (name);
4286 char *tmp_name, *prefix;
4287
4288 prefix = XALLOCAVEC (char, len + strlen ("_clone") + 1);
4289 memcpy (prefix, IDENTIFIER_POINTER (name), len);
4290 strcpy (prefix + len, "_clone");
4291#ifndef NO_DOT_IN_LABEL
4292 prefix[len] = '.';
4293#elif !defined NO_DOLLAR_IN_LABEL
4294 prefix[len] = '$';
4295#endif
4296 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
4297 return get_identifier (tmp_name);
4298}
4299
19734dd8
RL
4300/* Create a copy of a function's tree.
4301 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4302 of the original function and the new copied function
4303 respectively. In case we want to replace a DECL
4304 tree with another tree while duplicating the function's
4305 body, TREE_MAP represents the mapping between these
ea99e0be
JH
4306 trees. If UPDATE_CLONES is set, the call_stmt fields
4307 of edges of clones of the function will be updated. */
19734dd8 4308void
ea99e0be 4309tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
c6f7cfc1 4310 bool update_clones, bitmap args_to_skip)
19734dd8
RL
4311{
4312 struct cgraph_node *old_version_node;
4313 struct cgraph_node *new_version_node;
1b369fae 4314 copy_body_data id;
110cfe1c 4315 tree p;
19734dd8
RL
4316 unsigned i;
4317 struct ipa_replace_map *replace_info;
4318 basic_block old_entry_block;
0f1961a2
JH
4319 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4320
19734dd8 4321 tree t_step;
873aa8f5 4322 tree old_current_function_decl = current_function_decl;
0f1961a2 4323 tree vars = NULL_TREE;
19734dd8
RL
4324
4325 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4326 && TREE_CODE (new_decl) == FUNCTION_DECL);
4327 DECL_POSSIBLY_INLINED (old_decl) = 1;
4328
4329 old_version_node = cgraph_node (old_decl);
4330 new_version_node = cgraph_node (new_decl);
4331
a3aadcc5
JH
4332 /* Output the inlining info for this abstract function, since it has been
4333 inlined. If we don't do this now, we can lose the information about the
4334 variables in the function when the blocks get blown away as soon as we
4335 remove the cgraph node. */
4336 (*debug_hooks->outlining_inline_function) (old_decl);
4337
19734dd8
RL
4338 DECL_ARTIFICIAL (new_decl) = 1;
4339 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
4340
3d283195
JH
4341 /* Prepare the data structures for the tree copy. */
4342 memset (&id, 0, sizeof (id));
4343
19734dd8 4344 /* Generate a new name for the new version. */
ea99e0be 4345 if (!update_clones)
19734dd8 4346 {
9f5e9983 4347 DECL_NAME (new_decl) = clone_function_name (old_decl);
95c8e172
RL
4348 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
4349 SET_DECL_RTL (new_decl, NULL_RTX);
3d283195 4350 id.statements_to_fold = pointer_set_create ();
19734dd8 4351 }
19734dd8 4352
6be42dd4 4353 id.decl_map = pointer_map_create ();
1b369fae
RH
4354 id.src_fn = old_decl;
4355 id.dst_fn = new_decl;
4356 id.src_node = old_version_node;
4357 id.dst_node = new_version_node;
4358 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
19734dd8 4359
1b369fae
RH
4360 id.copy_decl = copy_decl_no_change;
4361 id.transform_call_graph_edges
4362 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
4363 id.transform_new_cfg = true;
4364 id.transform_return_to_modify = false;
9ff420f1 4365 id.transform_lang_insert_block = NULL;
1b369fae 4366
19734dd8 4367 current_function_decl = new_decl;
110cfe1c
JH
4368 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
4369 (DECL_STRUCT_FUNCTION (old_decl));
4370 initialize_cfun (new_decl, old_decl,
4371 old_entry_block->count,
4372 old_entry_block->frequency);
4373 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
19734dd8
RL
4374
4375 /* Copy the function's static chain. */
4376 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
4377 if (p)
4378 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
4379 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
4380 &id);
0f1961a2 4381
19734dd8
RL
4382 /* If there's a tree_map, prepare for substitution. */
4383 if (tree_map)
4384 for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
4385 {
0f1961a2 4386 gimple init;
726a989a
RB
4387 replace_info
4388 = (struct ipa_replace_map *) VARRAY_GENERIC_PTR (tree_map, i);
1b369fae 4389 if (replace_info->replace_p)
00fc2333 4390 {
657c0925
JH
4391 tree op = replace_info->new_tree;
4392
4393 STRIP_NOPS (op);
4394
4395 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
4396 op = TREE_OPERAND (op, 0);
4397
4398 if (TREE_CODE (op) == ADDR_EXPR)
00fc2333 4399 {
657c0925 4400 op = TREE_OPERAND (op, 0);
00fc2333
JH
4401 while (handled_component_p (op))
4402 op = TREE_OPERAND (op, 0);
4403 if (TREE_CODE (op) == VAR_DECL)
4404 add_referenced_var (op);
4405 }
0f1961a2
JH
4406 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
4407 init = setup_one_parameter (&id, replace_info->old_tree,
4408 replace_info->new_tree, id.src_fn,
4409 NULL,
4410 &vars);
4411 if (init)
4412 VEC_safe_push (gimple, heap, init_stmts, init);
00fc2333 4413 }
19734dd8 4414 }
eb50f5f4
JH
4415 /* Copy the function's arguments. */
4416 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
4417 DECL_ARGUMENTS (new_decl) =
4418 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
4419 args_to_skip, &vars);
4420
4421 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
4422
4423 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4424 number_blocks (id.dst_fn);
19734dd8 4425
0f1961a2 4426 declare_inline_vars (DECL_INITIAL (new_decl), vars);
cb91fab0 4427 if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
19734dd8 4428 /* Add local vars. */
cb91fab0 4429 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
19734dd8
RL
4430 t_step; t_step = TREE_CHAIN (t_step))
4431 {
4432 tree var = TREE_VALUE (t_step);
4433 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
cb91fab0 4434 cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
526d73ab 4435 else if (!can_be_nonlocal (var, &id))
cb91fab0 4436 cfun->local_decls =
19734dd8 4437 tree_cons (NULL_TREE, remap_decl (var, &id),
cb91fab0 4438 cfun->local_decls);
19734dd8
RL
4439 }
4440
4441 /* Copy the Function's body. */
110cfe1c 4442 copy_body (&id, old_entry_block->count, old_entry_block->frequency, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
19734dd8 4443
19734dd8
RL
4444 if (DECL_RESULT (old_decl) != NULL_TREE)
4445 {
4446 tree *res_decl = &DECL_RESULT (old_decl);
4447 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
4448 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
4449 }
4450
19734dd8
RL
4451 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4452 number_blocks (new_decl);
4453
0f1961a2
JH
4454 if (VEC_length (gimple, init_stmts))
4455 {
4456 basic_block bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
4457 while (VEC_length (gimple, init_stmts))
4458 insert_init_stmt (bb, VEC_pop (gimple, init_stmts));
4459 }
4460
19734dd8 4461 /* Clean up. */
6be42dd4 4462 pointer_map_destroy (id.decl_map);
5006671f
RG
4463 free_dominance_info (CDI_DOMINATORS);
4464 free_dominance_info (CDI_POST_DOMINATORS);
3d283195
JH
4465 if (!update_clones)
4466 {
4467 fold_marked_statements (0, id.statements_to_fold);
4468 pointer_set_destroy (id.statements_to_fold);
4469 fold_cond_expr_cond ();
5006671f 4470 delete_unreachable_blocks ();
110cfe1c
JH
4471 update_ssa (TODO_update_ssa);
4472 }
0f1961a2 4473 VEC_free (gimple, heap, init_stmts);
110cfe1c 4474 pop_cfun ();
873aa8f5
JH
4475 current_function_decl = old_current_function_decl;
4476 gcc_assert (!current_function_decl
4477 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
19734dd8
RL
4478 return;
4479}
4480
52dd234b
RH
4481/* Duplicate a type, fields and all. */
4482
4483tree
4484build_duplicate_type (tree type)
4485{
1b369fae 4486 struct copy_body_data id;
52dd234b
RH
4487
4488 memset (&id, 0, sizeof (id));
1b369fae
RH
4489 id.src_fn = current_function_decl;
4490 id.dst_fn = current_function_decl;
4491 id.src_cfun = cfun;
6be42dd4 4492 id.decl_map = pointer_map_create ();
4009f2e7 4493 id.copy_decl = copy_decl_no_change;
52dd234b
RH
4494
4495 type = remap_type_1 (type, &id);
4496
6be42dd4 4497 pointer_map_destroy (id.decl_map);
52dd234b 4498
f31c9f09
DG
4499 TYPE_CANONICAL (type) = type;
4500
52dd234b
RH
4501 return type;
4502}
ab442df7
MM
4503
4504/* Return whether it is safe to inline a function because it used different
4505 target specific options or different optimization options. */
4506bool
4507tree_can_inline_p (tree caller, tree callee)
4508{
5779e713
MM
4509#if 0
4510 /* This causes a regression in SPEC in that it prevents a cold function from
4511 inlining a hot function. Perhaps this should only apply to functions
4512 that the user declares hot/cold/optimize explicitly. */
4513
ab442df7
MM
4514 /* Don't inline a function with a higher optimization level than the
4515 caller, or with different space constraints (hot/cold functions). */
4516 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
4517 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
4518
4519 if (caller_tree != callee_tree)
4520 {
4521 struct cl_optimization *caller_opt
4522 = TREE_OPTIMIZATION ((caller_tree)
4523 ? caller_tree
4524 : optimization_default_node);
4525
4526 struct cl_optimization *callee_opt
4527 = TREE_OPTIMIZATION ((callee_tree)
4528 ? callee_tree
4529 : optimization_default_node);
4530
4531 if ((caller_opt->optimize > callee_opt->optimize)
4532 || (caller_opt->optimize_size != callee_opt->optimize_size))
4533 return false;
4534 }
5779e713 4535#endif
ab442df7
MM
4536
4537 /* Allow the backend to decide if inlining is ok. */
4538 return targetm.target_option.can_inline_p (caller, callee);
4539}
9f5e9983
JJ
4540
4541#include "gt-tree-inline.h"