]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-inline.c
gfortran.h (gfc_component): Add field "symbol_attribute attr"...
[thirdparty/gcc.git] / gcc / tree-inline.c
CommitLineData
ac534736 1/* Tree inlining.
b5ca517c 2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
ebb07520 3 Free Software Foundation, Inc.
588d3ade
AO
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
5
54a7b573 6This file is part of GCC.
588d3ade 7
54a7b573 8GCC is free software; you can redistribute it and/or modify
588d3ade 9it under the terms of the GNU General Public License as published by
9dcd6f09 10the Free Software Foundation; either version 3, or (at your option)
588d3ade
AO
11any later version.
12
54a7b573 13GCC is distributed in the hope that it will be useful,
588d3ade
AO
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
588d3ade
AO
21
22#include "config.h"
23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
69dcadff 26#include "toplev.h"
588d3ade
AO
27#include "tree.h"
28#include "tree-inline.h"
d4e4baa9
AO
29#include "rtl.h"
30#include "expr.h"
31#include "flags.h"
32#include "params.h"
33#include "input.h"
34#include "insn-config.h"
d4e4baa9
AO
35#include "varray.h"
36#include "hashtab.h"
d23c55c2 37#include "langhooks.h"
e21aff8a
SB
38#include "basic-block.h"
39#include "tree-iterator.h"
1c4a429a 40#include "cgraph.h"
ddd2d57e 41#include "intl.h"
6de9cd9a 42#include "tree-mudflap.h"
089efaa4 43#include "tree-flow.h"
18c6ada9 44#include "function.h"
e21aff8a
SB
45#include "ggc.h"
46#include "tree-flow.h"
6de9cd9a 47#include "diagnostic.h"
e21aff8a 48#include "except.h"
1eb3331e 49#include "debug.h"
e21aff8a 50#include "pointer-set.h"
19734dd8 51#include "ipa-prop.h"
6946b3f7 52#include "value-prof.h"
110cfe1c 53#include "tree-pass.h"
18177c7e
RG
54#include "target.h"
55#include "integrate.h"
d4e4baa9 56
6de9cd9a
DN
57/* I'm not real happy about this, but we need to handle gimple and
58 non-gimple trees. */
726a989a 59#include "gimple.h"
588d3ade 60
1b369fae 61/* Inlining, Cloning, Versioning, Parallelization
e21aff8a
SB
62
63 Inlining: a function body is duplicated, but the PARM_DECLs are
64 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
726a989a 65 MODIFY_EXPRs that store to a dedicated returned-value variable.
e21aff8a
SB
66 The duplicated eh_region info of the copy will later be appended
67 to the info for the caller; the eh_region info in copied throwing
68 statements and RESX_EXPRs is adjusted accordingly.
69
e21aff8a
SB
70 Cloning: (only in C++) We have one body for a con/de/structor, and
71 multiple function decls, each with a unique parameter list.
72 Duplicate the body, using the given splay tree; some parameters
73 will become constants (like 0 or 1).
74
1b369fae
RH
75 Versioning: a function body is duplicated and the result is a new
76 function rather than into blocks of an existing function as with
77 inlining. Some parameters will become constants.
78
79 Parallelization: a region of a function is duplicated resulting in
80 a new function. Variables may be replaced with complex expressions
81 to enable shared variable semantics.
82
e21aff8a
SB
83 All of these will simultaneously lookup any callgraph edges. If
84 we're going to inline the duplicated function body, and the given
85 function has some cloned callgraph nodes (one for each place this
86 function will be inlined) those callgraph edges will be duplicated.
1b369fae 87 If we're cloning the body, those callgraph edges will be
e21aff8a
SB
88 updated to point into the new body. (Note that the original
89 callgraph node and edge list will not be altered.)
90
726a989a 91 See the CALL_EXPR handling case in copy_tree_body_r (). */
e21aff8a 92
d4e4baa9
AO
93/* To Do:
94
95 o In order to make inlining-on-trees work, we pessimized
96 function-local static constants. In particular, they are now
97 always output, even when not addressed. Fix this by treating
98 function-local static constants just like global static
99 constants; the back-end already knows not to output them if they
100 are not needed.
101
102 o Provide heuristics to clamp inlining of recursive template
103 calls? */
104
7f9bc51b
ZD
105
106/* Weights that estimate_num_insns uses for heuristics in inlining. */
107
108eni_weights eni_inlining_weights;
109
110/* Weights that estimate_num_insns uses to estimate the size of the
111 produced code. */
112
113eni_weights eni_size_weights;
114
115/* Weights that estimate_num_insns uses to estimate the time necessary
116 to execute the produced code. */
117
118eni_weights eni_time_weights;
119
d4e4baa9
AO
120/* Prototypes. */
121
1b369fae 122static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
b3c3af2f 123static bool inlinable_function_p (tree);
1b369fae
RH
124static void remap_block (tree *, copy_body_data *);
125static tree remap_decls (tree, copy_body_data *);
126static void copy_bind_expr (tree *, int *, copy_body_data *);
6de9cd9a 127static tree mark_local_for_remap_r (tree *, int *, void *);
19114537 128static void unsave_expr_1 (tree);
6de9cd9a 129static tree unsave_r (tree *, int *, void *);
e21aff8a 130static void declare_inline_vars (tree, tree);
892c7e1e 131static void remap_save_expr (tree *, void *, int *);
acb8f212 132static void add_lexical_block (tree current_block, tree new_block);
1b369fae 133static tree copy_decl_to_var (tree, copy_body_data *);
c08cd4c1 134static tree copy_result_decl_to_var (tree, copy_body_data *);
1b369fae 135static tree copy_decl_maybe_to_var (tree, copy_body_data *);
726a989a 136static gimple remap_gimple_stmt (gimple, copy_body_data *);
e21aff8a 137
5e20bdd7
JZ
138/* Insert a tree->tree mapping for ID. Despite the name suggests
139 that the trees should be variables, it is used for more than that. */
140
1b369fae
RH
141void
142insert_decl_map (copy_body_data *id, tree key, tree value)
5e20bdd7 143{
6be42dd4 144 *pointer_map_insert (id->decl_map, key) = value;
5e20bdd7
JZ
145
146 /* Always insert an identity map as well. If we see this same new
147 node again, we won't want to duplicate it a second time. */
148 if (key != value)
6be42dd4 149 *pointer_map_insert (id->decl_map, value) = value;
5e20bdd7
JZ
150}
151
110cfe1c
JH
152/* Construct new SSA name for old NAME. ID is the inline context. */
153
154static tree
155remap_ssa_name (tree name, copy_body_data *id)
156{
82d6e6fc 157 tree new_tree;
6be42dd4 158 tree *n;
110cfe1c
JH
159
160 gcc_assert (TREE_CODE (name) == SSA_NAME);
161
6be42dd4 162 n = (tree *) pointer_map_contains (id->decl_map, name);
110cfe1c 163 if (n)
129a37fc 164 return unshare_expr (*n);
110cfe1c
JH
165
166 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
167 in copy_bb. */
82d6e6fc 168 new_tree = remap_decl (SSA_NAME_VAR (name), id);
726a989a 169
110cfe1c
JH
170 /* We might've substituted constant or another SSA_NAME for
171 the variable.
172
173 Replace the SSA name representing RESULT_DECL by variable during
174 inlining: this saves us from need to introduce PHI node in a case
175 return value is just partly initialized. */
82d6e6fc 176 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
110cfe1c
JH
177 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
178 || !id->transform_return_to_modify))
179 {
82d6e6fc
KG
180 new_tree = make_ssa_name (new_tree, NULL);
181 insert_decl_map (id, name, new_tree);
182 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
110cfe1c 183 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
82d6e6fc 184 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
726a989a 185 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
045685a9
JH
186 {
187 /* By inlining function having uninitialized variable, we might
188 extend the lifetime (variable might get reused). This cause
189 ICE in the case we end up extending lifetime of SSA name across
fa10beec 190 abnormal edge, but also increase register pressure.
045685a9 191
726a989a
RB
192 We simply initialize all uninitialized vars by 0 except
193 for case we are inlining to very first BB. We can avoid
194 this for all BBs that are not inside strongly connected
195 regions of the CFG, but this is expensive to test. */
196 if (id->entry_bb
197 && is_gimple_reg (SSA_NAME_VAR (name))
045685a9 198 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
0723b99a 199 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
045685a9
JH
200 || EDGE_COUNT (id->entry_bb->preds) != 1))
201 {
726a989a
RB
202 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
203 gimple init_stmt;
204
82d6e6fc
KG
205 init_stmt = gimple_build_assign (new_tree,
206 fold_convert (TREE_TYPE (new_tree),
045685a9 207 integer_zero_node));
726a989a 208 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
82d6e6fc 209 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
045685a9
JH
210 }
211 else
212 {
82d6e6fc 213 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
726a989a
RB
214 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
215 == name)
82d6e6fc 216 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
045685a9
JH
217 }
218 }
110cfe1c
JH
219 }
220 else
82d6e6fc
KG
221 insert_decl_map (id, name, new_tree);
222 return new_tree;
110cfe1c
JH
223}
224
e21aff8a 225/* Remap DECL during the copying of the BLOCK tree for the function. */
d4e4baa9 226
1b369fae
RH
227tree
228remap_decl (tree decl, copy_body_data *id)
d4e4baa9 229{
6be42dd4 230 tree *n;
e21aff8a
SB
231 tree fn;
232
233 /* We only remap local variables in the current function. */
1b369fae 234 fn = id->src_fn;
3c2a7a6a 235
e21aff8a
SB
236 /* See if we have remapped this declaration. */
237
6be42dd4 238 n = (tree *) pointer_map_contains (id->decl_map, decl);
e21aff8a
SB
239
240 /* If we didn't already have an equivalent for this declaration,
241 create one now. */
d4e4baa9
AO
242 if (!n)
243 {
d4e4baa9 244 /* Make a copy of the variable or label. */
1b369fae 245 tree t = id->copy_decl (decl, id);
19734dd8 246
596b98ce
AO
247 /* Remember it, so that if we encounter this local entity again
248 we can reuse this copy. Do this early because remap_type may
249 need this decl for TYPE_STUB_DECL. */
250 insert_decl_map (id, decl, t);
251
1b369fae
RH
252 if (!DECL_P (t))
253 return t;
254
3c2a7a6a
RH
255 /* Remap types, if necessary. */
256 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
257 if (TREE_CODE (t) == TYPE_DECL)
258 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
3c2a7a6a
RH
259
260 /* Remap sizes as necessary. */
726a989a
RB
261 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
262 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
d4e4baa9 263
8c27b7d4 264 /* If fields, do likewise for offset and qualifier. */
5377d5ba
RK
265 if (TREE_CODE (t) == FIELD_DECL)
266 {
726a989a 267 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
5377d5ba 268 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
726a989a 269 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
5377d5ba
RK
270 }
271
110cfe1c
JH
272 if (cfun && gimple_in_ssa_p (cfun)
273 && (TREE_CODE (t) == VAR_DECL
274 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
275 {
276 tree def = gimple_default_def (id->src_cfun, decl);
277 get_var_ann (t);
278 if (TREE_CODE (decl) != PARM_DECL && def)
279 {
280 tree map = remap_ssa_name (def, id);
281 /* Watch out RESULT_DECLs whose SSA names map directly
282 to them. */
045685a9 283 if (TREE_CODE (map) == SSA_NAME
726a989a 284 && gimple_nop_p (SSA_NAME_DEF_STMT (map)))
110cfe1c
JH
285 set_default_def (t, map);
286 }
287 add_referenced_var (t);
288 }
5e20bdd7 289 return t;
d4e4baa9
AO
290 }
291
6be42dd4 292 return unshare_expr (*n);
d4e4baa9
AO
293}
294
3c2a7a6a 295static tree
1b369fae 296remap_type_1 (tree type, copy_body_data *id)
3c2a7a6a 297{
82d6e6fc 298 tree new_tree, t;
3c2a7a6a 299
ed397c43
RK
300 /* We do need a copy. build and register it now. If this is a pointer or
301 reference type, remap the designated type and make a new pointer or
302 reference type. */
303 if (TREE_CODE (type) == POINTER_TYPE)
304 {
82d6e6fc 305 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
ed397c43
RK
306 TYPE_MODE (type),
307 TYPE_REF_CAN_ALIAS_ALL (type));
82d6e6fc
KG
308 insert_decl_map (id, type, new_tree);
309 return new_tree;
ed397c43
RK
310 }
311 else if (TREE_CODE (type) == REFERENCE_TYPE)
312 {
82d6e6fc 313 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
ed397c43
RK
314 TYPE_MODE (type),
315 TYPE_REF_CAN_ALIAS_ALL (type));
82d6e6fc
KG
316 insert_decl_map (id, type, new_tree);
317 return new_tree;
ed397c43
RK
318 }
319 else
82d6e6fc 320 new_tree = copy_node (type);
ed397c43 321
82d6e6fc 322 insert_decl_map (id, type, new_tree);
3c2a7a6a
RH
323
324 /* This is a new type, not a copy of an old type. Need to reassociate
325 variants. We can handle everything except the main variant lazily. */
326 t = TYPE_MAIN_VARIANT (type);
327 if (type != t)
328 {
329 t = remap_type (t, id);
82d6e6fc
KG
330 TYPE_MAIN_VARIANT (new_tree) = t;
331 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
332 TYPE_NEXT_VARIANT (t) = new_tree;
3c2a7a6a
RH
333 }
334 else
335 {
82d6e6fc
KG
336 TYPE_MAIN_VARIANT (new_tree) = new_tree;
337 TYPE_NEXT_VARIANT (new_tree) = NULL;
3c2a7a6a
RH
338 }
339
596b98ce 340 if (TYPE_STUB_DECL (type))
82d6e6fc 341 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
596b98ce 342
3c2a7a6a 343 /* Lazily create pointer and reference types. */
82d6e6fc
KG
344 TYPE_POINTER_TO (new_tree) = NULL;
345 TYPE_REFERENCE_TO (new_tree) = NULL;
3c2a7a6a 346
82d6e6fc 347 switch (TREE_CODE (new_tree))
3c2a7a6a
RH
348 {
349 case INTEGER_TYPE:
350 case REAL_TYPE:
325217ed 351 case FIXED_POINT_TYPE:
3c2a7a6a
RH
352 case ENUMERAL_TYPE:
353 case BOOLEAN_TYPE:
82d6e6fc 354 t = TYPE_MIN_VALUE (new_tree);
3c2a7a6a 355 if (t && TREE_CODE (t) != INTEGER_CST)
82d6e6fc 356 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
1c9766da 357
82d6e6fc 358 t = TYPE_MAX_VALUE (new_tree);
3c2a7a6a 359 if (t && TREE_CODE (t) != INTEGER_CST)
82d6e6fc
KG
360 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
361 return new_tree;
9f63daea 362
3c2a7a6a 363 case FUNCTION_TYPE:
82d6e6fc
KG
364 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
365 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
366 return new_tree;
3c2a7a6a
RH
367
368 case ARRAY_TYPE:
82d6e6fc
KG
369 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
370 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
3c2a7a6a
RH
371 break;
372
373 case RECORD_TYPE:
374 case UNION_TYPE:
375 case QUAL_UNION_TYPE:
52dd234b
RH
376 {
377 tree f, nf = NULL;
378
82d6e6fc 379 for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
52dd234b
RH
380 {
381 t = remap_decl (f, id);
82d6e6fc 382 DECL_CONTEXT (t) = new_tree;
52dd234b
RH
383 TREE_CHAIN (t) = nf;
384 nf = t;
385 }
82d6e6fc 386 TYPE_FIELDS (new_tree) = nreverse (nf);
52dd234b 387 }
3c2a7a6a
RH
388 break;
389
3c2a7a6a
RH
390 case OFFSET_TYPE:
391 default:
392 /* Shouldn't have been thought variable sized. */
1e128c5f 393 gcc_unreachable ();
3c2a7a6a
RH
394 }
395
82d6e6fc
KG
396 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
397 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
3c2a7a6a 398
82d6e6fc 399 return new_tree;
3c2a7a6a
RH
400}
401
1b369fae
RH
402tree
403remap_type (tree type, copy_body_data *id)
52dd234b 404{
6be42dd4 405 tree *node;
4f5c64b8 406 tree tmp;
52dd234b
RH
407
408 if (type == NULL)
409 return type;
410
411 /* See if we have remapped this type. */
6be42dd4 412 node = (tree *) pointer_map_contains (id->decl_map, type);
52dd234b 413 if (node)
6be42dd4 414 return *node;
52dd234b
RH
415
416 /* The type only needs remapping if it's variably modified. */
1b369fae 417 if (! variably_modified_type_p (type, id->src_fn))
52dd234b
RH
418 {
419 insert_decl_map (id, type, type);
420 return type;
421 }
422
4f5c64b8
RG
423 id->remapping_type_depth++;
424 tmp = remap_type_1 (type, id);
425 id->remapping_type_depth--;
426
427 return tmp;
52dd234b
RH
428}
429
6de9cd9a 430static tree
1b369fae 431remap_decls (tree decls, copy_body_data *id)
d4e4baa9 432{
6de9cd9a
DN
433 tree old_var;
434 tree new_decls = NULL_TREE;
d4e4baa9 435
6de9cd9a
DN
436 /* Remap its variables. */
437 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
d4e4baa9 438 {
6de9cd9a
DN
439 tree new_var;
440
726a989a
RB
441 /* We cannot chain the local static declarations into the local_decls
442 as we can't duplicate them or break one decl rule. Go ahead
443 and link them into local_decls. */
444
50886bf1 445 if (!auto_var_in_fn_p (old_var, id->src_fn)
30be951a
JH
446 && !DECL_EXTERNAL (old_var))
447 {
cb91fab0
JH
448 cfun->local_decls = tree_cons (NULL_TREE, old_var,
449 cfun->local_decls);
30be951a
JH
450 continue;
451 }
452
6de9cd9a
DN
453 /* Remap the variable. */
454 new_var = remap_decl (old_var, id);
455
726a989a 456 /* If we didn't remap this variable, we can't mess with its
6de9cd9a
DN
457 TREE_CHAIN. If we remapped this variable to the return slot, it's
458 already declared somewhere else, so don't declare it here. */
459 if (!new_var || new_var == id->retvar)
460 ;
d4e4baa9
AO
461 else
462 {
1e128c5f 463 gcc_assert (DECL_P (new_var));
6de9cd9a
DN
464 TREE_CHAIN (new_var) = new_decls;
465 new_decls = new_var;
d4e4baa9 466 }
d4e4baa9 467 }
d4e4baa9 468
6de9cd9a
DN
469 return nreverse (new_decls);
470}
471
472/* Copy the BLOCK to contain remapped versions of the variables
473 therein. And hook the new block into the block-tree. */
474
475static void
1b369fae 476remap_block (tree *block, copy_body_data *id)
6de9cd9a 477{
d436bff8
AH
478 tree old_block;
479 tree new_block;
d436bff8
AH
480 tree fn;
481
482 /* Make the new block. */
483 old_block = *block;
484 new_block = make_node (BLOCK);
485 TREE_USED (new_block) = TREE_USED (old_block);
486 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
3e2844cb 487 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
d436bff8
AH
488 *block = new_block;
489
490 /* Remap its variables. */
6de9cd9a 491 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
d436bff8 492
1b369fae
RH
493 fn = id->dst_fn;
494
495 if (id->transform_lang_insert_block)
9ff420f1 496 id->transform_lang_insert_block (new_block);
1b369fae 497
d436bff8 498 /* Remember the remapped block. */
6de9cd9a 499 insert_decl_map (id, old_block, new_block);
d4e4baa9
AO
500}
501
acb8f212
JH
502/* Copy the whole block tree and root it in id->block. */
503static tree
1b369fae 504remap_blocks (tree block, copy_body_data *id)
acb8f212
JH
505{
506 tree t;
82d6e6fc 507 tree new_tree = block;
acb8f212
JH
508
509 if (!block)
510 return NULL;
511
82d6e6fc
KG
512 remap_block (&new_tree, id);
513 gcc_assert (new_tree != block);
acb8f212 514 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
82d6e6fc
KG
515 add_lexical_block (new_tree, remap_blocks (t, id));
516 return new_tree;
acb8f212
JH
517}
518
d4e4baa9 519static void
6de9cd9a 520copy_statement_list (tree *tp)
d4e4baa9 521{
6de9cd9a 522 tree_stmt_iterator oi, ni;
82d6e6fc 523 tree new_tree;
6de9cd9a 524
82d6e6fc
KG
525 new_tree = alloc_stmt_list ();
526 ni = tsi_start (new_tree);
6de9cd9a 527 oi = tsi_start (*tp);
82d6e6fc 528 *tp = new_tree;
6de9cd9a
DN
529
530 for (; !tsi_end_p (oi); tsi_next (&oi))
531 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
532}
d4e4baa9 533
6de9cd9a 534static void
1b369fae 535copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
6de9cd9a
DN
536{
537 tree block = BIND_EXPR_BLOCK (*tp);
d4e4baa9
AO
538 /* Copy (and replace) the statement. */
539 copy_tree_r (tp, walk_subtrees, NULL);
6de9cd9a
DN
540 if (block)
541 {
542 remap_block (&block, id);
543 BIND_EXPR_BLOCK (*tp) = block;
544 }
d4e4baa9 545
6de9cd9a
DN
546 if (BIND_EXPR_VARS (*tp))
547 /* This will remap a lot of the same decls again, but this should be
548 harmless. */
549 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
d4e4baa9
AO
550}
551
726a989a
RB
552
553/* Create a new gimple_seq by remapping all the statements in BODY
554 using the inlining information in ID. */
555
556gimple_seq
557remap_gimple_seq (gimple_seq body, copy_body_data *id)
558{
559 gimple_stmt_iterator si;
560 gimple_seq new_body = NULL;
561
562 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
563 {
564 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
565 gimple_seq_add_stmt (&new_body, new_stmt);
566 }
567
568 return new_body;
569}
570
571
572/* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
573 block using the mapping information in ID. */
574
575static gimple
576copy_gimple_bind (gimple stmt, copy_body_data *id)
577{
578 gimple new_bind;
579 tree new_block, new_vars;
580 gimple_seq body, new_body;
581
582 /* Copy the statement. Note that we purposely don't use copy_stmt
583 here because we need to remap statements as we copy. */
584 body = gimple_bind_body (stmt);
585 new_body = remap_gimple_seq (body, id);
586
587 new_block = gimple_bind_block (stmt);
588 if (new_block)
589 remap_block (&new_block, id);
590
591 /* This will remap a lot of the same decls again, but this should be
592 harmless. */
593 new_vars = gimple_bind_vars (stmt);
594 if (new_vars)
595 new_vars = remap_decls (new_vars, id);
596
597 new_bind = gimple_build_bind (new_vars, new_body, new_block);
598
599 return new_bind;
600}
601
602
603/* Remap the GIMPLE operand pointed to by *TP. DATA is really a
604 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
605 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
606 recursing into the children nodes of *TP. */
607
608static tree
609remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
610{
611 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
612 copy_body_data *id = (copy_body_data *) wi_p->info;
613 tree fn = id->src_fn;
614
615 if (TREE_CODE (*tp) == SSA_NAME)
616 {
617 *tp = remap_ssa_name (*tp, id);
618 *walk_subtrees = 0;
619 return NULL;
620 }
621 else if (auto_var_in_fn_p (*tp, fn))
622 {
623 /* Local variables and labels need to be replaced by equivalent
624 variables. We don't want to copy static variables; there's
625 only one of those, no matter how many times we inline the
626 containing function. Similarly for globals from an outer
627 function. */
628 tree new_decl;
629
630 /* Remap the declaration. */
631 new_decl = remap_decl (*tp, id);
632 gcc_assert (new_decl);
633 /* Replace this variable with the copy. */
634 STRIP_TYPE_NOPS (new_decl);
635 *tp = new_decl;
636 *walk_subtrees = 0;
637 }
638 else if (TREE_CODE (*tp) == STATEMENT_LIST)
639 gcc_unreachable ();
640 else if (TREE_CODE (*tp) == SAVE_EXPR)
641 gcc_unreachable ();
642 else if (TREE_CODE (*tp) == LABEL_DECL
643 && (!DECL_CONTEXT (*tp)
644 || decl_function_context (*tp) == id->src_fn))
645 /* These may need to be remapped for EH handling. */
646 *tp = remap_decl (*tp, id);
647 else if (TYPE_P (*tp))
648 /* Types may need remapping as well. */
649 *tp = remap_type (*tp, id);
650 else if (CONSTANT_CLASS_P (*tp))
651 {
652 /* If this is a constant, we have to copy the node iff the type
653 will be remapped. copy_tree_r will not copy a constant. */
654 tree new_type = remap_type (TREE_TYPE (*tp), id);
655
656 if (new_type == TREE_TYPE (*tp))
657 *walk_subtrees = 0;
658
659 else if (TREE_CODE (*tp) == INTEGER_CST)
660 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
661 TREE_INT_CST_HIGH (*tp));
662 else
663 {
664 *tp = copy_node (*tp);
665 TREE_TYPE (*tp) = new_type;
666 }
667 }
668 else
669 {
670 /* Otherwise, just copy the node. Note that copy_tree_r already
671 knows not to copy VAR_DECLs, etc., so this is safe. */
672 if (TREE_CODE (*tp) == INDIRECT_REF)
673 {
674 /* Get rid of *& from inline substitutions that can happen when a
675 pointer argument is an ADDR_EXPR. */
676 tree decl = TREE_OPERAND (*tp, 0);
677 tree *n;
678
679 n = (tree *) pointer_map_contains (id->decl_map, decl);
680 if (n)
681 {
82d6e6fc 682 tree type, new_tree, old;
726a989a
RB
683
684 /* If we happen to get an ADDR_EXPR in n->value, strip
685 it manually here as we'll eventually get ADDR_EXPRs
686 which lie about their types pointed to. In this case
687 build_fold_indirect_ref wouldn't strip the
688 INDIRECT_REF, but we absolutely rely on that. As
689 fold_indirect_ref does other useful transformations,
690 try that first, though. */
691 type = TREE_TYPE (TREE_TYPE (*n));
82d6e6fc 692 new_tree = unshare_expr (*n);
726a989a 693 old = *tp;
82d6e6fc 694 *tp = gimple_fold_indirect_ref (new_tree);
726a989a
RB
695 if (!*tp)
696 {
82d6e6fc 697 if (TREE_CODE (new_tree) == ADDR_EXPR)
726a989a 698 {
82d6e6fc 699 *tp = fold_indirect_ref_1 (type, new_tree);
726a989a
RB
700 /* ??? We should either assert here or build
701 a VIEW_CONVERT_EXPR instead of blindly leaking
702 incompatible types to our IL. */
703 if (! *tp)
82d6e6fc 704 *tp = TREE_OPERAND (new_tree, 0);
726a989a
RB
705 }
706 else
707 {
82d6e6fc 708 *tp = build1 (INDIRECT_REF, type, new_tree);
726a989a
RB
709 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
710 }
711 }
712 *walk_subtrees = 0;
713 return NULL;
714 }
715 }
716
717 /* Here is the "usual case". Copy this tree node, and then
718 tweak some special cases. */
719 copy_tree_r (tp, walk_subtrees, NULL);
720
721 /* Global variables we haven't seen yet need to go into referenced
722 vars. If not referenced from types only. */
723 if (gimple_in_ssa_p (cfun)
724 && TREE_CODE (*tp) == VAR_DECL
725 && id->remapping_type_depth == 0)
726 add_referenced_var (*tp);
727
728 /* We should never have TREE_BLOCK set on non-statements. */
729 if (EXPR_P (*tp))
730 gcc_assert (!TREE_BLOCK (*tp));
731
732 if (TREE_CODE (*tp) != OMP_CLAUSE)
733 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
734
735 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
736 {
737 /* The copied TARGET_EXPR has never been expanded, even if the
738 original node was expanded already. */
739 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
740 TREE_OPERAND (*tp, 3) = NULL_TREE;
741 }
742 else if (TREE_CODE (*tp) == ADDR_EXPR)
743 {
744 /* Variable substitution need not be simple. In particular,
745 the INDIRECT_REF substitution above. Make sure that
746 TREE_CONSTANT and friends are up-to-date. But make sure
747 to not improperly set TREE_BLOCK on some sub-expressions. */
748 int invariant = is_gimple_min_invariant (*tp);
749 tree block = id->block;
750 id->block = NULL_TREE;
751 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
752 id->block = block;
753
754 /* Handle the case where we substituted an INDIRECT_REF
755 into the operand of the ADDR_EXPR. */
756 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
757 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
758 else
759 recompute_tree_invariant_for_addr_expr (*tp);
760
761 /* If this used to be invariant, but is not any longer,
762 then regimplification is probably needed. */
763 if (invariant && !is_gimple_min_invariant (*tp))
764 id->regimplify = true;
765
766 *walk_subtrees = 0;
767 }
768 }
769
770 /* Keep iterating. */
771 return NULL_TREE;
772}
773
774
775/* Called from copy_body_id via walk_tree. DATA is really a
1b369fae 776 `copy_body_data *'. */
aa4a53af 777
1b369fae 778tree
726a989a 779copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
d4e4baa9 780{
1b369fae
RH
781 copy_body_data *id = (copy_body_data *) data;
782 tree fn = id->src_fn;
acb8f212 783 tree new_block;
d4e4baa9 784
e21aff8a
SB
785 /* Begin by recognizing trees that we'll completely rewrite for the
786 inlining context. Our output for these trees is completely
787 different from out input (e.g. RETURN_EXPR is deleted, and morphs
788 into an edge). Further down, we'll handle trees that get
789 duplicated and/or tweaked. */
d4e4baa9 790
1b369fae 791 /* When requested, RETURN_EXPRs should be transformed to just the
726a989a 792 contained MODIFY_EXPR. The branch semantics of the return will
1b369fae
RH
793 be handled elsewhere by manipulating the CFG rather than a statement. */
794 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
d4e4baa9 795 {
e21aff8a 796 tree assignment = TREE_OPERAND (*tp, 0);
d4e4baa9
AO
797
798 /* If we're returning something, just turn that into an
e21aff8a
SB
799 assignment into the equivalent of the original RESULT_DECL.
800 If the "assignment" is just the result decl, the result
801 decl has already been set (e.g. a recent "foo (&result_decl,
802 ...)"); just toss the entire RETURN_EXPR. */
726a989a 803 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
e21aff8a
SB
804 {
805 /* Replace the RETURN_EXPR with (a copy of) the
726a989a 806 MODIFY_EXPR hanging underneath. */
e21aff8a
SB
807 *tp = copy_node (assignment);
808 }
809 else /* Else the RETURN_EXPR returns no value. */
810 {
811 *tp = NULL;
cceb1885 812 return (tree) (void *)1;
e21aff8a 813 }
d4e4baa9 814 }
110cfe1c
JH
815 else if (TREE_CODE (*tp) == SSA_NAME)
816 {
817 *tp = remap_ssa_name (*tp, id);
818 *walk_subtrees = 0;
819 return NULL;
820 }
e21aff8a 821
d4e4baa9
AO
822 /* Local variables and labels need to be replaced by equivalent
823 variables. We don't want to copy static variables; there's only
824 one of those, no matter how many times we inline the containing
5377d5ba 825 function. Similarly for globals from an outer function. */
50886bf1 826 else if (auto_var_in_fn_p (*tp, fn))
d4e4baa9
AO
827 {
828 tree new_decl;
829
830 /* Remap the declaration. */
831 new_decl = remap_decl (*tp, id);
1e128c5f 832 gcc_assert (new_decl);
d4e4baa9
AO
833 /* Replace this variable with the copy. */
834 STRIP_TYPE_NOPS (new_decl);
835 *tp = new_decl;
e4cf29ae 836 *walk_subtrees = 0;
d4e4baa9 837 }
6de9cd9a
DN
838 else if (TREE_CODE (*tp) == STATEMENT_LIST)
839 copy_statement_list (tp);
d4e4baa9 840 else if (TREE_CODE (*tp) == SAVE_EXPR)
82c82743 841 remap_save_expr (tp, id->decl_map, walk_subtrees);
17acc01a
JH
842 else if (TREE_CODE (*tp) == LABEL_DECL
843 && (! DECL_CONTEXT (*tp)
1b369fae 844 || decl_function_context (*tp) == id->src_fn))
e21aff8a 845 /* These may need to be remapped for EH handling. */
17acc01a 846 *tp = remap_decl (*tp, id);
6de9cd9a
DN
847 else if (TREE_CODE (*tp) == BIND_EXPR)
848 copy_bind_expr (tp, walk_subtrees, id);
3c2a7a6a
RH
849 /* Types may need remapping as well. */
850 else if (TYPE_P (*tp))
851 *tp = remap_type (*tp, id);
852
bb04998a
RK
853 /* If this is a constant, we have to copy the node iff the type will be
854 remapped. copy_tree_r will not copy a constant. */
3cf11075 855 else if (CONSTANT_CLASS_P (*tp))
bb04998a
RK
856 {
857 tree new_type = remap_type (TREE_TYPE (*tp), id);
858
859 if (new_type == TREE_TYPE (*tp))
860 *walk_subtrees = 0;
861
862 else if (TREE_CODE (*tp) == INTEGER_CST)
863 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
864 TREE_INT_CST_HIGH (*tp));
865 else
866 {
867 *tp = copy_node (*tp);
868 TREE_TYPE (*tp) = new_type;
869 }
870 }
871
d4e4baa9
AO
872 /* Otherwise, just copy the node. Note that copy_tree_r already
873 knows not to copy VAR_DECLs, etc., so this is safe. */
874 else
875 {
e21aff8a
SB
876 /* Here we handle trees that are not completely rewritten.
877 First we detect some inlining-induced bogosities for
878 discarding. */
726a989a
RB
879 if (TREE_CODE (*tp) == MODIFY_EXPR
880 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
881 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
d4e4baa9
AO
882 {
883 /* Some assignments VAR = VAR; don't generate any rtl code
884 and thus don't count as variable modification. Avoid
885 keeping bogosities like 0 = 0. */
726a989a 886 tree decl = TREE_OPERAND (*tp, 0), value;
6be42dd4 887 tree *n;
d4e4baa9 888
6be42dd4 889 n = (tree *) pointer_map_contains (id->decl_map, decl);
d4e4baa9
AO
890 if (n)
891 {
6be42dd4 892 value = *n;
d4e4baa9 893 STRIP_TYPE_NOPS (value);
becfd6e5 894 if (TREE_CONSTANT (value) || TREE_READONLY (value))
68594ce7 895 {
b03c0b93 896 *tp = build_empty_stmt ();
726a989a 897 return copy_tree_body_r (tp, walk_subtrees, data);
68594ce7 898 }
d4e4baa9
AO
899 }
900 }
1b369fae 901 else if (TREE_CODE (*tp) == INDIRECT_REF)
6de9cd9a
DN
902 {
903 /* Get rid of *& from inline substitutions that can happen when a
904 pointer argument is an ADDR_EXPR. */
81cfbbc2 905 tree decl = TREE_OPERAND (*tp, 0);
6be42dd4 906 tree *n;
6de9cd9a 907
6be42dd4 908 n = (tree *) pointer_map_contains (id->decl_map, decl);
6de9cd9a
DN
909 if (n)
910 {
82d6e6fc 911 tree new_tree;
d84b37b0 912 tree old;
30d2e943
RG
913 /* If we happen to get an ADDR_EXPR in n->value, strip
914 it manually here as we'll eventually get ADDR_EXPRs
915 which lie about their types pointed to. In this case
916 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
095ecc24
RG
917 but we absolutely rely on that. As fold_indirect_ref
918 does other useful transformations, try that first, though. */
6be42dd4 919 tree type = TREE_TYPE (TREE_TYPE (*n));
82d6e6fc 920 new_tree = unshare_expr (*n);
d84b37b0 921 old = *tp;
82d6e6fc 922 *tp = gimple_fold_indirect_ref (new_tree);
095ecc24
RG
923 if (! *tp)
924 {
82d6e6fc 925 if (TREE_CODE (new_tree) == ADDR_EXPR)
de4af523 926 {
82d6e6fc 927 *tp = fold_indirect_ref_1 (type, new_tree);
de4af523
JJ
928 /* ??? We should either assert here or build
929 a VIEW_CONVERT_EXPR instead of blindly leaking
930 incompatible types to our IL. */
931 if (! *tp)
82d6e6fc 932 *tp = TREE_OPERAND (new_tree, 0);
de4af523 933 }
095ecc24 934 else
d84b37b0 935 {
82d6e6fc 936 *tp = build1 (INDIRECT_REF, type, new_tree);
d84b37b0 937 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
955f6531 938 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
d84b37b0 939 }
095ecc24 940 }
81cfbbc2
JH
941 *walk_subtrees = 0;
942 return NULL;
68594ce7
JM
943 }
944 }
945
e21aff8a
SB
946 /* Here is the "usual case". Copy this tree node, and then
947 tweak some special cases. */
1b369fae 948 copy_tree_r (tp, walk_subtrees, NULL);
110cfe1c 949
4f5c64b8
RG
950 /* Global variables we haven't seen yet needs to go into referenced
951 vars. If not referenced from types only. */
726a989a
RB
952 if (gimple_in_ssa_p (cfun)
953 && TREE_CODE (*tp) == VAR_DECL
4f5c64b8 954 && id->remapping_type_depth == 0)
110cfe1c 955 add_referenced_var (*tp);
19734dd8 956
acb8f212
JH
957 /* If EXPR has block defined, map it to newly constructed block.
958 When inlining we want EXPRs without block appear in the block
959 of function call. */
726a989a 960 if (EXPR_P (*tp))
acb8f212
JH
961 {
962 new_block = id->block;
963 if (TREE_BLOCK (*tp))
964 {
6be42dd4
RG
965 tree *n;
966 n = (tree *) pointer_map_contains (id->decl_map,
967 TREE_BLOCK (*tp));
acb8f212 968 gcc_assert (n);
6be42dd4 969 new_block = *n;
acb8f212
JH
970 }
971 TREE_BLOCK (*tp) = new_block;
972 }
68594ce7 973
e0704a46 974 if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
e21aff8a 975 TREE_OPERAND (*tp, 0) =
726a989a
RB
976 build_int_cst (NULL_TREE,
977 id->eh_region_offset
978 + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
18c6ada9 979
726a989a 980 if (TREE_CODE (*tp) != OMP_CLAUSE)
07beea0d 981 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
3c2a7a6a 982
68594ce7
JM
983 /* The copied TARGET_EXPR has never been expanded, even if the
984 original node was expanded already. */
985 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
986 {
987 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
988 TREE_OPERAND (*tp, 3) = NULL_TREE;
989 }
84cce55d
RH
990
991 /* Variable substitution need not be simple. In particular, the
992 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
993 and friends are up-to-date. */
994 else if (TREE_CODE (*tp) == ADDR_EXPR)
995 {
ad6003f2 996 int invariant = is_gimple_min_invariant (*tp);
726a989a
RB
997 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
998
8e85fd14
RG
999 /* Handle the case where we substituted an INDIRECT_REF
1000 into the operand of the ADDR_EXPR. */
1001 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1002 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1003 else
1004 recompute_tree_invariant_for_addr_expr (*tp);
726a989a 1005
416c991f
JJ
1006 /* If this used to be invariant, but is not any longer,
1007 then regimplification is probably needed. */
ad6003f2 1008 if (invariant && !is_gimple_min_invariant (*tp))
416c991f 1009 id->regimplify = true;
726a989a 1010
84cce55d
RH
1011 *walk_subtrees = 0;
1012 }
d4e4baa9
AO
1013 }
1014
1015 /* Keep iterating. */
1016 return NULL_TREE;
1017}
1018
726a989a
RB
1019
1020/* Helper for copy_bb. Remap statement STMT using the inlining
1021 information in ID. Return the new statement copy. */
1022
1023static gimple
1024remap_gimple_stmt (gimple stmt, copy_body_data *id)
1025{
1026 gimple copy = NULL;
1027 struct walk_stmt_info wi;
1028 tree new_block;
1029
1030 /* Begin by recognizing trees that we'll completely rewrite for the
1031 inlining context. Our output for these trees is completely
1032 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1033 into an edge). Further down, we'll handle trees that get
1034 duplicated and/or tweaked. */
1035
1036 /* When requested, GIMPLE_RETURNs should be transformed to just the
1037 contained GIMPLE_ASSIGN. The branch semantics of the return will
1038 be handled elsewhere by manipulating the CFG rather than the
1039 statement. */
1040 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1041 {
1042 tree retval = gimple_return_retval (stmt);
1043
1044 /* If we're returning something, just turn that into an
1045 assignment into the equivalent of the original RESULT_DECL.
1046 If RETVAL is just the result decl, the result decl has
1047 already been set (e.g. a recent "foo (&result_decl, ...)");
1048 just toss the entire GIMPLE_RETURN. */
1049 if (retval && TREE_CODE (retval) != RESULT_DECL)
1050 copy = gimple_build_assign (id->retvar, retval);
1051 else
1052 return gimple_build_nop ();
1053 }
1054 else if (gimple_has_substatements (stmt))
1055 {
1056 gimple_seq s1, s2;
1057
1058 /* When cloning bodies from the C++ front end, we will be handed bodies
1059 in High GIMPLE form. Handle here all the High GIMPLE statements that
1060 have embedded statements. */
1061 switch (gimple_code (stmt))
1062 {
1063 case GIMPLE_BIND:
1064 copy = copy_gimple_bind (stmt, id);
1065 break;
1066
1067 case GIMPLE_CATCH:
1068 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1069 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1070 break;
1071
1072 case GIMPLE_EH_FILTER:
1073 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1074 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1075 break;
1076
1077 case GIMPLE_TRY:
1078 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1079 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1080 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1081 break;
1082
1083 case GIMPLE_WITH_CLEANUP_EXPR:
1084 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1085 copy = gimple_build_wce (s1);
1086 break;
1087
1088 case GIMPLE_OMP_PARALLEL:
1089 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1090 copy = gimple_build_omp_parallel
1091 (s1,
1092 gimple_omp_parallel_clauses (stmt),
1093 gimple_omp_parallel_child_fn (stmt),
1094 gimple_omp_parallel_data_arg (stmt));
1095 break;
1096
1097 case GIMPLE_OMP_TASK:
1098 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1099 copy = gimple_build_omp_task
1100 (s1,
1101 gimple_omp_task_clauses (stmt),
1102 gimple_omp_task_child_fn (stmt),
1103 gimple_omp_task_data_arg (stmt),
1104 gimple_omp_task_copy_fn (stmt),
1105 gimple_omp_task_arg_size (stmt),
1106 gimple_omp_task_arg_align (stmt));
1107 break;
1108
1109 case GIMPLE_OMP_FOR:
1110 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1111 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1112 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1113 gimple_omp_for_collapse (stmt), s2);
1114 {
1115 size_t i;
1116 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1117 {
1118 gimple_omp_for_set_index (copy, i,
1119 gimple_omp_for_index (stmt, i));
1120 gimple_omp_for_set_initial (copy, i,
1121 gimple_omp_for_initial (stmt, i));
1122 gimple_omp_for_set_final (copy, i,
1123 gimple_omp_for_final (stmt, i));
1124 gimple_omp_for_set_incr (copy, i,
1125 gimple_omp_for_incr (stmt, i));
1126 gimple_omp_for_set_cond (copy, i,
1127 gimple_omp_for_cond (stmt, i));
1128 }
1129 }
1130 break;
1131
1132 case GIMPLE_OMP_MASTER:
1133 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1134 copy = gimple_build_omp_master (s1);
1135 break;
1136
1137 case GIMPLE_OMP_ORDERED:
1138 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1139 copy = gimple_build_omp_ordered (s1);
1140 break;
1141
1142 case GIMPLE_OMP_SECTION:
1143 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1144 copy = gimple_build_omp_section (s1);
1145 break;
1146
1147 case GIMPLE_OMP_SECTIONS:
1148 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1149 copy = gimple_build_omp_sections
1150 (s1, gimple_omp_sections_clauses (stmt));
1151 break;
1152
1153 case GIMPLE_OMP_SINGLE:
1154 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1155 copy = gimple_build_omp_single
1156 (s1, gimple_omp_single_clauses (stmt));
1157 break;
1158
1159 default:
1160 gcc_unreachable ();
1161 }
1162 }
1163 else
1164 {
1165 if (gimple_assign_copy_p (stmt)
1166 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1167 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1168 {
1169 /* Here we handle statements that are not completely rewritten.
1170 First we detect some inlining-induced bogosities for
1171 discarding. */
1172
1173 /* Some assignments VAR = VAR; don't generate any rtl code
1174 and thus don't count as variable modification. Avoid
1175 keeping bogosities like 0 = 0. */
1176 tree decl = gimple_assign_lhs (stmt), value;
1177 tree *n;
1178
1179 n = (tree *) pointer_map_contains (id->decl_map, decl);
1180 if (n)
1181 {
1182 value = *n;
1183 STRIP_TYPE_NOPS (value);
1184 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1185 return gimple_build_nop ();
1186 }
1187 }
1188
1189 /* Create a new deep copy of the statement. */
1190 copy = gimple_copy (stmt);
1191 }
1192
1193 /* If STMT has a block defined, map it to the newly constructed
1194 block. When inlining we want statements without a block to
1195 appear in the block of the function call. */
1196 new_block = id->block;
1197 if (gimple_block (copy))
1198 {
1199 tree *n;
1200 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1201 gcc_assert (n);
1202 new_block = *n;
1203 }
1204
1205 gimple_set_block (copy, new_block);
1206
1207 /* Remap all the operands in COPY. */
1208 memset (&wi, 0, sizeof (wi));
1209 wi.info = id;
1210 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1211
1212 /* We have to handle EH region remapping of GIMPLE_RESX specially because
1213 the region number is not an operand. */
1214 if (gimple_code (stmt) == GIMPLE_RESX && id->eh_region_offset)
1215 {
1216 gimple_resx_set_region (copy, gimple_resx_region (stmt) + id->eh_region_offset);
1217 }
1218 return copy;
1219}
1220
1221
e21aff8a
SB
1222/* Copy basic block, scale profile accordingly. Edges will be taken care of
1223 later */
1224
1225static basic_block
0178d644
VR
1226copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1227 gcov_type count_scale)
e21aff8a 1228{
726a989a 1229 gimple_stmt_iterator gsi, copy_gsi;
e21aff8a 1230 basic_block copy_basic_block;
726a989a 1231 tree decl;
e21aff8a
SB
1232
1233 /* create_basic_block() will append every new block to
1234 basic_block_info automatically. */
cceb1885
GDR
1235 copy_basic_block = create_basic_block (NULL, (void *) 0,
1236 (basic_block) bb->prev_bb->aux);
e21aff8a 1237 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
45a80bb9 1238
726a989a
RB
1239 /* We are going to rebuild frequencies from scratch. These values
1240 have just small importance to drive canonicalize_loop_headers. */
45a80bb9 1241 copy_basic_block->frequency = ((gcov_type)bb->frequency
726a989a
RB
1242 * frequency_scale / REG_BR_PROB_BASE);
1243
45a80bb9
JH
1244 if (copy_basic_block->frequency > BB_FREQ_MAX)
1245 copy_basic_block->frequency = BB_FREQ_MAX;
e21aff8a 1246
726a989a
RB
1247 copy_gsi = gsi_start_bb (copy_basic_block);
1248
1249 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
e21aff8a 1250 {
726a989a
RB
1251 gimple stmt = gsi_stmt (gsi);
1252 gimple orig_stmt = stmt;
e21aff8a 1253
416c991f 1254 id->regimplify = false;
726a989a
RB
1255 stmt = remap_gimple_stmt (stmt, id);
1256 if (gimple_nop_p (stmt))
1257 continue;
1258
1259 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1260
1261 /* With return slot optimization we can end up with
1262 non-gimple (foo *)&this->m, fix that here. */
4c29307d
JJ
1263 if (is_gimple_assign (stmt)
1264 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1265 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
e21aff8a 1266 {
726a989a
RB
1267 tree new_rhs;
1268 new_rhs = force_gimple_operand_gsi (&copy_gsi,
1269 gimple_assign_rhs1 (stmt),
1270 true, NULL, true, GSI_SAME_STMT);
1271 gimple_assign_set_rhs1 (stmt, new_rhs);
1272 }
1273 else if (id->regimplify)
1274 gimple_regimplify_operands (stmt, &copy_gsi);
2b65dae5 1275
726a989a 1276 gsi_insert_after (&copy_gsi, stmt, GSI_NEW_STMT);
110cfe1c 1277
726a989a
RB
1278 /* Process the new statement. The call to gimple_regimplify_operands
1279 possibly turned the statement into multiple statements, we
1280 need to process all of them. */
1281 while (!gsi_end_p (copy_gsi))
1282 {
1283 if (is_gimple_call (stmt)
1284 && gimple_call_va_arg_pack_p (stmt)
1285 && id->gimple_call)
1286 {
1287 /* __builtin_va_arg_pack () should be replaced by
1288 all arguments corresponding to ... in the caller. */
1289 tree p;
1290 gimple new_call;
1291 VEC(tree, heap) *argarray;
1292 size_t nargs = gimple_call_num_args (id->gimple_call);
1293 size_t n;
1294
1295 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1296 nargs--;
1297
1298 /* Create the new array of arguments. */
1299 n = nargs + gimple_call_num_args (stmt);
1300 argarray = VEC_alloc (tree, heap, n);
1301 VEC_safe_grow (tree, heap, argarray, n);
1302
1303 /* Copy all the arguments before '...' */
1304 memcpy (VEC_address (tree, argarray),
1305 gimple_call_arg_ptr (stmt, 0),
1306 gimple_call_num_args (stmt) * sizeof (tree));
1307
1308 /* Append the arguments passed in '...' */
1309 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1310 gimple_call_arg_ptr (id->gimple_call, 0)
1311 + (gimple_call_num_args (id->gimple_call) - nargs),
1312 nargs * sizeof (tree));
1313
1314 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1315 argarray);
1316
1317 VEC_free (tree, heap, argarray);
1318
1319 /* Copy all GIMPLE_CALL flags, location and block, except
1320 GF_CALL_VA_ARG_PACK. */
1321 gimple_call_copy_flags (new_call, stmt);
1322 gimple_call_set_va_arg_pack (new_call, false);
1323 gimple_set_location (new_call, gimple_location (stmt));
1324 gimple_set_block (new_call, gimple_block (stmt));
1325 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1326
1327 gsi_replace (&copy_gsi, new_call, false);
1328 stmt = new_call;
1329 }
1330 else if (is_gimple_call (stmt)
1331 && id->gimple_call
1332 && (decl = gimple_call_fndecl (stmt))
1333 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1334 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
e0704a46 1335 {
726a989a
RB
1336 /* __builtin_va_arg_pack_len () should be replaced by
1337 the number of anonymous arguments. */
1338 size_t nargs = gimple_call_num_args (id->gimple_call);
1339 tree count, p;
1340 gimple new_stmt;
1341
1342 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1343 nargs--;
1344
1345 count = build_int_cst (integer_type_node, nargs);
1346 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1347 gsi_replace (&copy_gsi, new_stmt, false);
1348 stmt = new_stmt;
1349 }
b8a00a4d 1350
726a989a
RB
1351 /* Statements produced by inlining can be unfolded, especially
1352 when we constant propagated some operands. We can't fold
1353 them right now for two reasons:
1354 1) folding require SSA_NAME_DEF_STMTs to be correct
1355 2) we can't change function calls to builtins.
1356 So we just mark statement for later folding. We mark
1357 all new statements, instead just statements that has changed
1358 by some nontrivial substitution so even statements made
1359 foldable indirectly are updated. If this turns out to be
1360 expensive, copy_body can be told to watch for nontrivial
1361 changes. */
1362 if (id->statements_to_fold)
1363 pointer_set_insert (id->statements_to_fold, stmt);
1364
1365 /* We're duplicating a CALL_EXPR. Find any corresponding
1366 callgraph edges and update or duplicate them. */
1367 if (is_gimple_call (stmt))
1368 {
1369 struct cgraph_node *node;
1370 struct cgraph_edge *edge;
6ef5231b 1371
726a989a 1372 switch (id->transform_call_graph_edges)
e0704a46 1373 {
726a989a
RB
1374 case CB_CGE_DUPLICATE:
1375 edge = cgraph_edge (id->src_node, orig_stmt);
1376 if (edge)
1377 cgraph_clone_edge (edge, id->dst_node, stmt,
3e293154
MJ
1378 REG_BR_PROB_BASE, 1,
1379 edge->frequency, true);
726a989a
RB
1380 break;
1381
1382 case CB_CGE_MOVE_CLONES:
1383 for (node = id->dst_node->next_clone;
1384 node;
1385 node = node->next_clone)
1386 {
1387 edge = cgraph_edge (node, orig_stmt);
3e293154
MJ
1388 if (edge)
1389 cgraph_set_call_stmt (edge, stmt);
726a989a
RB
1390 }
1391 /* FALLTHRU */
110cfe1c 1392
726a989a
RB
1393 case CB_CGE_MOVE:
1394 edge = cgraph_edge (id->dst_node, orig_stmt);
1395 if (edge)
1396 cgraph_set_call_stmt (edge, stmt);
1397 break;
110cfe1c 1398
726a989a
RB
1399 default:
1400 gcc_unreachable ();
110cfe1c 1401 }
726a989a 1402 }
e21aff8a 1403
726a989a
RB
1404 /* If you think we can abort here, you are wrong.
1405 There is no region 0 in gimple. */
1406 gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) != 0);
1407
1408 if (stmt_could_throw_p (stmt)
1409 /* When we are cloning for inlining, we are supposed to
1410 construct a clone that calls precisely the same functions
1411 as original. However IPA optimizers might've proved
1412 earlier some function calls as non-trapping that might
1413 render some basic blocks dead that might become
1414 unreachable.
1415
1416 We can't update SSA with unreachable blocks in CFG and thus
1417 we prevent the scenario by preserving even the "dead" eh
1418 edges until the point they are later removed by
1419 fixup_cfg pass. */
1420 || (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
1421 && lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) > 0))
1422 {
1423 int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
1424
1425 /* Add an entry for the copied tree in the EH hashtable.
1426 When cloning or versioning, use the hashtable in
1427 cfun, and just copy the EH number. When inlining, use the
1428 hashtable in the caller, and adjust the region number. */
1429 if (region > 0)
1430 add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
1431
1432 /* If this tree doesn't have a region associated with it,
1433 and there is a "current region,"
1434 then associate this tree with the current region
1435 and add edges associated with this region. */
1436 if (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) <= 0
1437 && id->eh_region > 0
1438 && stmt_could_throw_p (stmt))
1439 add_stmt_to_eh_region (stmt, id->eh_region);
e21aff8a 1440 }
726a989a
RB
1441
1442 if (gimple_in_ssa_p (cfun))
1443 {
1444 ssa_op_iter i;
1445 tree def;
1446
1447 find_new_referenced_vars (gsi_stmt (copy_gsi));
1448 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1449 if (TREE_CODE (def) == SSA_NAME)
1450 SSA_NAME_DEF_STMT (def) = stmt;
1451 }
1452
1453 gsi_next (&copy_gsi);
e21aff8a 1454 }
726a989a
RB
1455
1456 copy_gsi = gsi_last_bb (copy_basic_block);
e21aff8a 1457 }
726a989a 1458
e21aff8a
SB
1459 return copy_basic_block;
1460}
1461
110cfe1c
JH
1462/* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1463 form is quite easy, since dominator relationship for old basic blocks does
1464 not change.
1465
1466 There is however exception where inlining might change dominator relation
1467 across EH edges from basic block within inlined functions destinating
5305a4cb 1468 to landing pads in function we inline into.
110cfe1c 1469
e9705dc5
AO
1470 The function fills in PHI_RESULTs of such PHI nodes if they refer
1471 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1472 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1473 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1474 set, and this means that there will be no overlapping live ranges
110cfe1c
JH
1475 for the underlying symbol.
1476
1477 This might change in future if we allow redirecting of EH edges and
1478 we might want to change way build CFG pre-inlining to include
1479 all the possible edges then. */
1480static void
e9705dc5
AO
1481update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1482 bool can_throw, bool nonlocal_goto)
110cfe1c
JH
1483{
1484 edge e;
1485 edge_iterator ei;
1486
1487 FOR_EACH_EDGE (e, ei, bb->succs)
1488 if (!e->dest->aux
1489 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1490 {
726a989a
RB
1491 gimple phi;
1492 gimple_stmt_iterator si;
110cfe1c 1493
e9705dc5 1494 gcc_assert (e->flags & EDGE_ABNORMAL);
726a989a 1495
e9705dc5
AO
1496 if (!nonlocal_goto)
1497 gcc_assert (e->flags & EDGE_EH);
726a989a 1498
e9705dc5
AO
1499 if (!can_throw)
1500 gcc_assert (!(e->flags & EDGE_EH));
726a989a
RB
1501
1502 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
110cfe1c 1503 {
e9705dc5
AO
1504 edge re;
1505
726a989a
RB
1506 phi = gsi_stmt (si);
1507
e9705dc5
AO
1508 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1509 gcc_assert (!e->dest->aux);
1510
726a989a 1511 gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
e9705dc5
AO
1512
1513 if (!is_gimple_reg (PHI_RESULT (phi)))
1514 {
726a989a 1515 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
e9705dc5
AO
1516 continue;
1517 }
1518
1519 re = find_edge (ret_bb, e->dest);
1432b19f 1520 gcc_assert (re);
e9705dc5
AO
1521 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1522 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1523
1524 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1525 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
110cfe1c
JH
1526 }
1527 }
1528}
1529
726a989a 1530
128a79fb
KH
1531/* Copy edges from BB into its copy constructed earlier, scale profile
1532 accordingly. Edges will be taken care of later. Assume aux
1533 pointers to point to the copies of each BB. */
726a989a 1534
e21aff8a 1535static void
0178d644 1536copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
e21aff8a 1537{
cceb1885 1538 basic_block new_bb = (basic_block) bb->aux;
e21aff8a
SB
1539 edge_iterator ei;
1540 edge old_edge;
726a989a 1541 gimple_stmt_iterator si;
e21aff8a
SB
1542 int flags;
1543
1544 /* Use the indices from the original blocks to create edges for the
1545 new ones. */
1546 FOR_EACH_EDGE (old_edge, ei, bb->succs)
e0704a46
JH
1547 if (!(old_edge->flags & EDGE_EH))
1548 {
82d6e6fc 1549 edge new_edge;
e21aff8a 1550
e0704a46 1551 flags = old_edge->flags;
e21aff8a 1552
e0704a46
JH
1553 /* Return edges do get a FALLTHRU flag when the get inlined. */
1554 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1555 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1556 flags |= EDGE_FALLTHRU;
82d6e6fc
KG
1557 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1558 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1559 new_edge->probability = old_edge->probability;
e0704a46 1560 }
e21aff8a
SB
1561
1562 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1563 return;
1564
726a989a 1565 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
e21aff8a 1566 {
726a989a 1567 gimple copy_stmt;
e9705dc5 1568 bool can_throw, nonlocal_goto;
e21aff8a 1569
726a989a 1570 copy_stmt = gsi_stmt (si);
e21aff8a 1571 update_stmt (copy_stmt);
110cfe1c
JH
1572 if (gimple_in_ssa_p (cfun))
1573 mark_symbols_for_renaming (copy_stmt);
726a989a 1574
e21aff8a 1575 /* Do this before the possible split_block. */
726a989a 1576 gsi_next (&si);
e21aff8a
SB
1577
1578 /* If this tree could throw an exception, there are two
1579 cases where we need to add abnormal edge(s): the
1580 tree wasn't in a region and there is a "current
1581 region" in the caller; or the original tree had
1582 EH edges. In both cases split the block after the tree,
1583 and add abnormal edge(s) as needed; we need both
1584 those from the callee and the caller.
1585 We check whether the copy can throw, because the const
1586 propagation can change an INDIRECT_REF which throws
1587 into a COMPONENT_REF which doesn't. If the copy
1588 can throw, the original could also throw. */
726a989a
RB
1589 can_throw = stmt_can_throw_internal (copy_stmt);
1590 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
e9705dc5
AO
1591
1592 if (can_throw || nonlocal_goto)
e21aff8a 1593 {
726a989a 1594 if (!gsi_end_p (si))
e21aff8a
SB
1595 /* Note that bb's predecessor edges aren't necessarily
1596 right at this point; split_block doesn't care. */
1597 {
1598 edge e = split_block (new_bb, copy_stmt);
110cfe1c 1599
e21aff8a 1600 new_bb = e->dest;
110cfe1c 1601 new_bb->aux = e->src->aux;
726a989a 1602 si = gsi_start_bb (new_bb);
e21aff8a 1603 }
e9705dc5 1604 }
e21aff8a 1605
e9705dc5
AO
1606 if (can_throw)
1607 make_eh_edges (copy_stmt);
110cfe1c 1608
e9705dc5 1609 if (nonlocal_goto)
726a989a 1610 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
e9705dc5
AO
1611
1612 if ((can_throw || nonlocal_goto)
1613 && gimple_in_ssa_p (cfun))
726a989a 1614 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
e9705dc5 1615 can_throw, nonlocal_goto);
110cfe1c
JH
1616 }
1617}
1618
1619/* Copy the PHIs. All blocks and edges are copied, some blocks
1620 was possibly split and new outgoing EH edges inserted.
1621 BB points to the block of original function and AUX pointers links
1622 the original and newly copied blocks. */
1623
1624static void
1625copy_phis_for_bb (basic_block bb, copy_body_data *id)
1626{
3d9a9f94 1627 basic_block const new_bb = (basic_block) bb->aux;
110cfe1c 1628 edge_iterator ei;
726a989a
RB
1629 gimple phi;
1630 gimple_stmt_iterator si;
110cfe1c 1631
726a989a 1632 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
110cfe1c 1633 {
726a989a
RB
1634 tree res, new_res;
1635 gimple new_phi;
110cfe1c
JH
1636 edge new_edge;
1637
726a989a
RB
1638 phi = gsi_stmt (si);
1639 res = PHI_RESULT (phi);
1640 new_res = res;
110cfe1c
JH
1641 if (is_gimple_reg (res))
1642 {
726a989a 1643 walk_tree (&new_res, copy_tree_body_r, id, NULL);
110cfe1c
JH
1644 SSA_NAME_DEF_STMT (new_res)
1645 = new_phi = create_phi_node (new_res, new_bb);
1646 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1647 {
726a989a
RB
1648 edge const old_edge
1649 = find_edge ((basic_block) new_edge->src->aux, bb);
110cfe1c
JH
1650 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1651 tree new_arg = arg;
726a989a
RB
1652 tree block = id->block;
1653 id->block = NULL_TREE;
1654 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1655 id->block = block;
110cfe1c 1656 gcc_assert (new_arg);
36b6e793
JJ
1657 /* With return slot optimization we can end up with
1658 non-gimple (foo *)&this->m, fix that here. */
1659 if (TREE_CODE (new_arg) != SSA_NAME
1660 && TREE_CODE (new_arg) != FUNCTION_DECL
1661 && !is_gimple_val (new_arg))
1662 {
726a989a
RB
1663 gimple_seq stmts = NULL;
1664 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1665 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
36b6e793 1666 }
110cfe1c
JH
1667 add_phi_arg (new_phi, new_arg, new_edge);
1668 }
e21aff8a
SB
1669 }
1670 }
1671}
1672
726a989a 1673
e21aff8a 1674/* Wrapper for remap_decl so it can be used as a callback. */
726a989a 1675
e21aff8a
SB
1676static tree
1677remap_decl_1 (tree decl, void *data)
1678{
1b369fae 1679 return remap_decl (decl, (copy_body_data *) data);
e21aff8a
SB
1680}
1681
110cfe1c
JH
1682/* Build struct function and associated datastructures for the new clone
1683 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1684
1685static void
1686initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
1687 int frequency)
1688{
1689 struct function *new_cfun
1690 = (struct function *) ggc_alloc_cleared (sizeof (struct function));
1691 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
0178d644 1692 gcov_type count_scale, frequency_scale;
110cfe1c
JH
1693
1694 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1695 count_scale = (REG_BR_PROB_BASE * count
1696 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1697 else
1698 count_scale = 1;
1699
1700 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1701 frequency_scale = (REG_BR_PROB_BASE * frequency
1702 /
1703 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1704 else
1705 frequency_scale = count_scale;
1706
1707 /* Register specific tree functions. */
726a989a 1708 gimple_register_cfg_hooks ();
110cfe1c 1709 *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
3e87758a 1710 new_cfun->funcdef_no = get_next_funcdef_no ();
110cfe1c 1711 VALUE_HISTOGRAMS (new_cfun) = NULL;
cb91fab0 1712 new_cfun->local_decls = NULL;
110cfe1c
JH
1713 new_cfun->cfg = NULL;
1714 new_cfun->decl = new_fndecl /*= copy_node (callee_fndecl)*/;
110cfe1c
JH
1715 DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
1716 push_cfun (new_cfun);
1717 init_empty_tree_cfg ();
1718
1719 ENTRY_BLOCK_PTR->count =
1720 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1721 REG_BR_PROB_BASE);
1722 ENTRY_BLOCK_PTR->frequency =
1723 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1724 frequency_scale / REG_BR_PROB_BASE);
1725 EXIT_BLOCK_PTR->count =
1726 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1727 REG_BR_PROB_BASE);
1728 EXIT_BLOCK_PTR->frequency =
1729 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1730 frequency_scale / REG_BR_PROB_BASE);
1731 if (src_cfun->eh)
1732 init_eh_for_function ();
1733
1734 if (src_cfun->gimple_df)
1735 {
5db9ba0c 1736 init_tree_ssa (cfun);
110cfe1c
JH
1737 cfun->gimple_df->in_ssa_p = true;
1738 init_ssa_operands ();
1739 }
1740 pop_cfun ();
1741}
1742
e21aff8a
SB
1743/* Make a copy of the body of FN so that it can be inserted inline in
1744 another function. Walks FN via CFG, returns new fndecl. */
1745
1746static tree
1b369fae 1747copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
e21aff8a
SB
1748 basic_block entry_block_map, basic_block exit_block_map)
1749{
1b369fae 1750 tree callee_fndecl = id->src_fn;
e21aff8a 1751 /* Original cfun for the callee, doesn't change. */
1b369fae 1752 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
110cfe1c 1753 struct function *cfun_to_copy;
e21aff8a
SB
1754 basic_block bb;
1755 tree new_fndecl = NULL;
0178d644 1756 gcov_type count_scale, frequency_scale;
110cfe1c 1757 int last;
e21aff8a 1758
1b369fae 1759 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
e21aff8a 1760 count_scale = (REG_BR_PROB_BASE * count
1b369fae 1761 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
e21aff8a
SB
1762 else
1763 count_scale = 1;
1764
1b369fae 1765 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
e21aff8a
SB
1766 frequency_scale = (REG_BR_PROB_BASE * frequency
1767 /
1b369fae 1768 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
e21aff8a
SB
1769 else
1770 frequency_scale = count_scale;
1771
1772 /* Register specific tree functions. */
726a989a 1773 gimple_register_cfg_hooks ();
e21aff8a
SB
1774
1775 /* Must have a CFG here at this point. */
1776 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
1777 (DECL_STRUCT_FUNCTION (callee_fndecl)));
1778
110cfe1c
JH
1779 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1780
e21aff8a
SB
1781 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
1782 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
110cfe1c
JH
1783 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1784 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
e21aff8a 1785
e21aff8a
SB
1786 /* Duplicate any exception-handling regions. */
1787 if (cfun->eh)
1788 {
1b369fae 1789 id->eh_region_offset
fad41cd7
RH
1790 = duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
1791 0, id->eh_region);
e21aff8a 1792 }
726a989a 1793
e21aff8a
SB
1794 /* Use aux pointers to map the original blocks to copy. */
1795 FOR_EACH_BB_FN (bb, cfun_to_copy)
110cfe1c 1796 {
82d6e6fc
KG
1797 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
1798 bb->aux = new_bb;
1799 new_bb->aux = bb;
110cfe1c
JH
1800 }
1801
7c57be85 1802 last = last_basic_block;
726a989a 1803
e21aff8a
SB
1804 /* Now that we've duplicated the blocks, duplicate their edges. */
1805 FOR_ALL_BB_FN (bb, cfun_to_copy)
e9705dc5 1806 copy_edges_for_bb (bb, count_scale, exit_block_map);
726a989a 1807
110cfe1c
JH
1808 if (gimple_in_ssa_p (cfun))
1809 FOR_ALL_BB_FN (bb, cfun_to_copy)
1810 copy_phis_for_bb (bb, id);
726a989a 1811
e21aff8a 1812 FOR_ALL_BB_FN (bb, cfun_to_copy)
110cfe1c
JH
1813 {
1814 ((basic_block)bb->aux)->aux = NULL;
1815 bb->aux = NULL;
1816 }
726a989a 1817
110cfe1c
JH
1818 /* Zero out AUX fields of newly created block during EH edge
1819 insertion. */
7c57be85 1820 for (; last < last_basic_block; last++)
110cfe1c
JH
1821 BASIC_BLOCK (last)->aux = NULL;
1822 entry_block_map->aux = NULL;
1823 exit_block_map->aux = NULL;
e21aff8a
SB
1824
1825 return new_fndecl;
1826}
1827
e21aff8a 1828static tree
1b369fae 1829copy_body (copy_body_data *id, gcov_type count, int frequency,
e21aff8a
SB
1830 basic_block entry_block_map, basic_block exit_block_map)
1831{
1b369fae 1832 tree fndecl = id->src_fn;
e21aff8a
SB
1833 tree body;
1834
1835 /* If this body has a CFG, walk CFG and copy. */
1836 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
1837 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
1838
1839 return body;
1840}
1841
04482133
AO
1842/* Return true if VALUE is an ADDR_EXPR of an automatic variable
1843 defined in function FN, or of a data member thereof. */
1844
1845static bool
1846self_inlining_addr_expr (tree value, tree fn)
1847{
1848 tree var;
1849
1850 if (TREE_CODE (value) != ADDR_EXPR)
1851 return false;
1852
1853 var = get_base_address (TREE_OPERAND (value, 0));
e21aff8a 1854
50886bf1 1855 return var && auto_var_in_fn_p (var, fn);
04482133
AO
1856}
1857
6de9cd9a 1858static void
1b369fae 1859setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
e21aff8a 1860 basic_block bb, tree *vars)
6de9cd9a 1861{
726a989a 1862 gimple init_stmt;
6de9cd9a 1863 tree var;
f4088621 1864 tree rhs = value;
110cfe1c
JH
1865 tree def = (gimple_in_ssa_p (cfun)
1866 ? gimple_default_def (id->src_cfun, p) : NULL);
6de9cd9a 1867
f4088621
RG
1868 if (value
1869 && value != error_mark_node
1870 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
c54e3854
RG
1871 {
1872 if (fold_convertible_p (TREE_TYPE (p), value))
1873 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
1874 else
1875 /* ??? For valid (GIMPLE) programs we should not end up here.
1876 Still if something has gone wrong and we end up with truly
1877 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
1878 to not leak invalid GIMPLE to the following passes. */
1879 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
1880 }
f4088621 1881
110cfe1c
JH
1882 /* If the parameter is never assigned to, has no SSA_NAMEs created,
1883 we may not need to create a new variable here at all. Instead, we may
1884 be able to just use the argument value. */
6de9cd9a
DN
1885 if (TREE_READONLY (p)
1886 && !TREE_ADDRESSABLE (p)
110cfe1c
JH
1887 && value && !TREE_SIDE_EFFECTS (value)
1888 && !def)
6de9cd9a 1889 {
84936f6f
RH
1890 /* We may produce non-gimple trees by adding NOPs or introduce
1891 invalid sharing when operand is not really constant.
1892 It is not big deal to prohibit constant propagation here as
1893 we will constant propagate in DOM1 pass anyway. */
1894 if (is_gimple_min_invariant (value)
f4088621
RG
1895 && useless_type_conversion_p (TREE_TYPE (p),
1896 TREE_TYPE (value))
04482133
AO
1897 /* We have to be very careful about ADDR_EXPR. Make sure
1898 the base variable isn't a local variable of the inlined
1899 function, e.g., when doing recursive inlining, direct or
1900 mutually-recursive or whatever, which is why we don't
1901 just test whether fn == current_function_decl. */
1902 && ! self_inlining_addr_expr (value, fn))
6de9cd9a 1903 {
6de9cd9a
DN
1904 insert_decl_map (id, p, value);
1905 return;
1906 }
1907 }
1908
5377d5ba
RK
1909 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
1910 here since the type of this decl must be visible to the calling
8c27b7d4 1911 function. */
1b369fae 1912 var = copy_decl_to_var (p, id);
110cfe1c
JH
1913 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
1914 {
1915 get_var_ann (var);
1916 add_referenced_var (var);
1917 }
e21aff8a 1918
6de9cd9a
DN
1919 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
1920 that way, when the PARM_DECL is encountered, it will be
1921 automatically replaced by the VAR_DECL. */
7c7d3047 1922 insert_decl_map (id, p, var);
6de9cd9a
DN
1923
1924 /* Declare this new variable. */
1925 TREE_CHAIN (var) = *vars;
1926 *vars = var;
1927
1928 /* Make gimplifier happy about this variable. */
84936f6f 1929 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
6de9cd9a
DN
1930
1931 /* Even if P was TREE_READONLY, the new VAR should not be.
1932 In the original code, we would have constructed a
1933 temporary, and then the function body would have never
1934 changed the value of P. However, now, we will be
1935 constructing VAR directly. The constructor body may
1936 change its value multiple times as it is being
1937 constructed. Therefore, it must not be TREE_READONLY;
1938 the back-end assumes that TREE_READONLY variable is
1939 assigned to only once. */
1940 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
1941 TREE_READONLY (var) = 0;
1942
110cfe1c
JH
1943 /* If there is no setup required and we are in SSA, take the easy route
1944 replacing all SSA names representing the function parameter by the
1945 SSA name passed to function.
1946
1947 We need to construct map for the variable anyway as it might be used
1948 in different SSA names when parameter is set in function.
1949
1950 FIXME: This usually kills the last connection in between inlined
1951 function parameter and the actual value in debug info. Can we do
1952 better here? If we just inserted the statement, copy propagation
1953 would kill it anyway as it always did in older versions of GCC.
1954
1955 We might want to introduce a notion that single SSA_NAME might
1956 represent multiple variables for purposes of debugging. */
1957 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
1958 && (TREE_CODE (rhs) == SSA_NAME
9b718f81
JH
1959 || is_gimple_min_invariant (rhs))
1960 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
110cfe1c
JH
1961 {
1962 insert_decl_map (id, def, rhs);
1963 return;
1964 }
1965
f6f2da7d
JH
1966 /* If the value of argument is never used, don't care about initializing
1967 it. */
1968 if (gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
1969 {
1970 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
1971 return;
1972 }
1973
6de9cd9a
DN
1974 /* Initialize this VAR_DECL from the equivalent argument. Convert
1975 the argument to the proper type in case it was promoted. */
1976 if (value)
1977 {
726a989a 1978 gimple_stmt_iterator si = gsi_last_bb (bb);
6de9cd9a
DN
1979
1980 if (rhs == error_mark_node)
110cfe1c 1981 {
7c7d3047 1982 insert_decl_map (id, p, var);
110cfe1c
JH
1983 return;
1984 }
afe08db5 1985
73dab33b 1986 STRIP_USELESS_TYPE_CONVERSION (rhs);
6de9cd9a 1987
726a989a 1988 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
6de9cd9a 1989 keep our trees in gimple form. */
110cfe1c
JH
1990 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
1991 {
1992 def = remap_ssa_name (def, id);
726a989a 1993 init_stmt = gimple_build_assign (def, rhs);
110cfe1c
JH
1994 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
1995 set_default_def (var, NULL);
1996 }
1997 else
726a989a 1998 init_stmt = gimple_build_assign (var, rhs);
6de9cd9a
DN
1999
2000 /* If we did not create a gimple value and we did not create a gimple
726a989a 2001 cast of a gimple value, then we will need to gimplify INIT_STMT
6de9cd9a 2002 at the end. Note that is_gimple_cast only checks the outer
128a79fb 2003 tree code, not its operand. Thus the explicit check that its
6de9cd9a 2004 operand is a gimple value. */
b779a874 2005 if ((!is_gimple_val (rhs)
6de9cd9a
DN
2006 && (!is_gimple_cast (rhs)
2007 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
b779a874 2008 || !is_gimple_reg (var))
110cfe1c 2009 {
726a989a
RB
2010 gimple_stmt_iterator i;
2011 gimple_seq seq = gimple_seq_alloc ();
d406b663 2012 struct gimplify_ctx gctx;
110cfe1c 2013
d406b663 2014 push_gimplify_context (&gctx);
726a989a
RB
2015
2016 i = gsi_start (seq);
2017 gimple_regimplify_operands (init_stmt, &i);
2018
110cfe1c 2019 if (gimple_in_ssa_p (cfun)
726a989a
RB
2020 && init_stmt
2021 && !gimple_seq_empty_p (seq))
110cfe1c
JH
2022 {
2023 /* The replacement can expose previously unreferenced
2024 variables. */
726a989a
RB
2025 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
2026 find_new_referenced_vars (gsi_stmt (i));
2027
2028 /* Insert the gimplified sequence needed for INIT_STMT
2029 after SI. INIT_STMT will be inserted after SEQ. */
2030 gsi_insert_seq_after (&si, seq, GSI_NEW_STMT);
110cfe1c 2031 }
726a989a 2032
110cfe1c
JH
2033 pop_gimplify_context (NULL);
2034 }
52f66176
RK
2035
2036 /* If VAR represents a zero-sized variable, it's possible that the
fa10beec 2037 assignment statement may result in no gimple statements. */
047f4b2c 2038 if (init_stmt)
726a989a
RB
2039 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2040
110cfe1c 2041 if (gimple_in_ssa_p (cfun))
726a989a
RB
2042 for (;!gsi_end_p (si); gsi_next (&si))
2043 mark_symbols_for_renaming (gsi_stmt (si));
6de9cd9a
DN
2044 }
2045}
2046
d4e4baa9 2047/* Generate code to initialize the parameters of the function at the
726a989a 2048 top of the stack in ID from the GIMPLE_CALL STMT. */
d4e4baa9 2049
e21aff8a 2050static void
726a989a 2051initialize_inlined_parameters (copy_body_data *id, gimple stmt,
e21aff8a 2052 tree fn, basic_block bb)
d4e4baa9 2053{
d4e4baa9 2054 tree parms;
726a989a 2055 size_t i;
d4e4baa9 2056 tree p;
d436bff8 2057 tree vars = NULL_TREE;
726a989a 2058 tree static_chain = gimple_call_chain (stmt);
d4e4baa9
AO
2059
2060 /* Figure out what the parameters are. */
18c6ada9 2061 parms = DECL_ARGUMENTS (fn);
d4e4baa9 2062
d4e4baa9
AO
2063 /* Loop through the parameter declarations, replacing each with an
2064 equivalent VAR_DECL, appropriately initialized. */
726a989a
RB
2065 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2066 {
2067 tree val;
2068 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2069 setup_one_parameter (id, p, val, fn, bb, &vars);
2070 }
4838c5ee 2071
6de9cd9a
DN
2072 /* Initialize the static chain. */
2073 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
ea99e0be 2074 gcc_assert (fn != current_function_decl);
6de9cd9a
DN
2075 if (p)
2076 {
2077 /* No static chain? Seems like a bug in tree-nested.c. */
1e128c5f 2078 gcc_assert (static_chain);
4838c5ee 2079
e21aff8a 2080 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
4838c5ee
AO
2081 }
2082
e21aff8a 2083 declare_inline_vars (id->block, vars);
d4e4baa9
AO
2084}
2085
726a989a 2086
e21aff8a
SB
2087/* Declare a return variable to replace the RESULT_DECL for the
2088 function we are calling. An appropriate DECL_STMT is returned.
2089 The USE_STMT is filled to contain a use of the declaration to
2090 indicate the return value of the function.
2091
110cfe1c
JH
2092 RETURN_SLOT, if non-null is place where to store the result. It
2093 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
726a989a 2094 was the LHS of the MODIFY_EXPR to which this call is the RHS.
7740f00d
RH
2095
2096 The return value is a (possibly null) value that is the result of the
2097 function as seen by the callee. *USE_P is a (possibly null) value that
2098 holds the result as seen by the caller. */
d4e4baa9 2099
d436bff8 2100static tree
110cfe1c
JH
2101declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2102 tree *use_p)
d4e4baa9 2103{
1b369fae
RH
2104 tree callee = id->src_fn;
2105 tree caller = id->dst_fn;
7740f00d
RH
2106 tree result = DECL_RESULT (callee);
2107 tree callee_type = TREE_TYPE (result);
2108 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
2109 tree var, use;
d4e4baa9
AO
2110
2111 /* We don't need to do anything for functions that don't return
2112 anything. */
7740f00d 2113 if (!result || VOID_TYPE_P (callee_type))
d4e4baa9 2114 {
6de9cd9a 2115 *use_p = NULL_TREE;
d4e4baa9
AO
2116 return NULL_TREE;
2117 }
2118
cc77ae10 2119 /* If there was a return slot, then the return value is the
7740f00d 2120 dereferenced address of that object. */
110cfe1c 2121 if (return_slot)
7740f00d 2122 {
110cfe1c 2123 /* The front end shouldn't have used both return_slot and
7740f00d 2124 a modify expression. */
1e128c5f 2125 gcc_assert (!modify_dest);
cc77ae10 2126 if (DECL_BY_REFERENCE (result))
110cfe1c
JH
2127 {
2128 tree return_slot_addr = build_fold_addr_expr (return_slot);
2129 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2130
2131 /* We are going to construct *&return_slot and we can't do that
2132 for variables believed to be not addressable.
2133
2134 FIXME: This check possibly can match, because values returned
2135 via return slot optimization are not believed to have address
2136 taken by alias analysis. */
2137 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2138 if (gimple_in_ssa_p (cfun))
2139 {
2140 HOST_WIDE_INT bitsize;
2141 HOST_WIDE_INT bitpos;
2142 tree offset;
2143 enum machine_mode mode;
2144 int unsignedp;
2145 int volatilep;
2146 tree base;
2147 base = get_inner_reference (return_slot, &bitsize, &bitpos,
2148 &offset,
2149 &mode, &unsignedp, &volatilep,
2150 false);
2151 if (TREE_CODE (base) == INDIRECT_REF)
2152 base = TREE_OPERAND (base, 0);
2153 if (TREE_CODE (base) == SSA_NAME)
2154 base = SSA_NAME_VAR (base);
2155 mark_sym_for_renaming (base);
2156 }
2157 var = return_slot_addr;
2158 }
cc77ae10 2159 else
110cfe1c
JH
2160 {
2161 var = return_slot;
2162 gcc_assert (TREE_CODE (var) != SSA_NAME);
b5ca517c 2163 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
110cfe1c 2164 }
0890b981
AP
2165 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2166 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2167 && !DECL_GIMPLE_REG_P (result)
22918034 2168 && DECL_P (var))
0890b981 2169 DECL_GIMPLE_REG_P (var) = 0;
7740f00d
RH
2170 use = NULL;
2171 goto done;
2172 }
2173
2174 /* All types requiring non-trivial constructors should have been handled. */
1e128c5f 2175 gcc_assert (!TREE_ADDRESSABLE (callee_type));
7740f00d
RH
2176
2177 /* Attempt to avoid creating a new temporary variable. */
110cfe1c
JH
2178 if (modify_dest
2179 && TREE_CODE (modify_dest) != SSA_NAME)
7740f00d
RH
2180 {
2181 bool use_it = false;
2182
2183 /* We can't use MODIFY_DEST if there's type promotion involved. */
f4088621 2184 if (!useless_type_conversion_p (callee_type, caller_type))
7740f00d
RH
2185 use_it = false;
2186
2187 /* ??? If we're assigning to a variable sized type, then we must
2188 reuse the destination variable, because we've no good way to
2189 create variable sized temporaries at this point. */
2190 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2191 use_it = true;
2192
2193 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2194 reuse it as the result of the call directly. Don't do this if
2195 it would promote MODIFY_DEST to addressable. */
e2f9fe42
RH
2196 else if (TREE_ADDRESSABLE (result))
2197 use_it = false;
2198 else
2199 {
2200 tree base_m = get_base_address (modify_dest);
2201
2202 /* If the base isn't a decl, then it's a pointer, and we don't
2203 know where that's going to go. */
2204 if (!DECL_P (base_m))
2205 use_it = false;
2206 else if (is_global_var (base_m))
2207 use_it = false;
0890b981
AP
2208 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2209 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2210 && !DECL_GIMPLE_REG_P (result)
2211 && DECL_GIMPLE_REG_P (base_m))
1d327c16 2212 use_it = false;
e2f9fe42
RH
2213 else if (!TREE_ADDRESSABLE (base_m))
2214 use_it = true;
2215 }
7740f00d
RH
2216
2217 if (use_it)
2218 {
2219 var = modify_dest;
2220 use = NULL;
2221 goto done;
2222 }
2223 }
2224
1e128c5f 2225 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
7740f00d 2226
c08cd4c1 2227 var = copy_result_decl_to_var (result, id);
110cfe1c
JH
2228 if (gimple_in_ssa_p (cfun))
2229 {
2230 get_var_ann (var);
2231 add_referenced_var (var);
2232 }
e21aff8a 2233
7740f00d 2234 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
cb91fab0 2235 DECL_STRUCT_FUNCTION (caller)->local_decls
7740f00d 2236 = tree_cons (NULL_TREE, var,
cb91fab0 2237 DECL_STRUCT_FUNCTION (caller)->local_decls);
7740f00d 2238
6de9cd9a 2239 /* Do not have the rest of GCC warn about this variable as it should
471854f8 2240 not be visible to the user. */
6de9cd9a 2241 TREE_NO_WARNING (var) = 1;
d4e4baa9 2242
c08cd4c1
JM
2243 declare_inline_vars (id->block, var);
2244
7740f00d
RH
2245 /* Build the use expr. If the return type of the function was
2246 promoted, convert it back to the expected type. */
2247 use = var;
f4088621 2248 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
7740f00d 2249 use = fold_convert (caller_type, var);
73dab33b
AP
2250
2251 STRIP_USELESS_TYPE_CONVERSION (use);
7740f00d 2252
c08cd4c1
JM
2253 if (DECL_BY_REFERENCE (result))
2254 var = build_fold_addr_expr (var);
2255
7740f00d 2256 done:
d4e4baa9
AO
2257 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2258 way, when the RESULT_DECL is encountered, it will be
2259 automatically replaced by the VAR_DECL. */
5e20bdd7 2260 insert_decl_map (id, result, var);
d4e4baa9 2261
6de9cd9a
DN
2262 /* Remember this so we can ignore it in remap_decls. */
2263 id->retvar = var;
2264
7740f00d
RH
2265 *use_p = use;
2266 return var;
d4e4baa9
AO
2267}
2268
0e9e1e0a 2269/* Returns nonzero if a function can be inlined as a tree. */
4838c5ee 2270
b3c3af2f
SB
2271bool
2272tree_inlinable_function_p (tree fn)
4838c5ee 2273{
726a989a
RB
2274 bool ret = inlinable_function_p (fn);
2275
2276 if (getenv ("TUPLES_INLINE"))
2277 fprintf (stderr, "Function %s is %sinlinable\n", get_name (fn),
2278 ret ? "" : "not ");
2279
2280 return ret;
2281}
2282
2283static const char *inline_forbidden_reason;
2284
2285/* A callback for walk_gimple_seq to handle tree operands. Returns
2286 NULL_TREE if a function can be inlined, otherwise sets the reason
2287 why not and returns a tree representing the offending operand. */
2288
2289static tree
2290inline_forbidden_p_op (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
2291 void *fnp ATTRIBUTE_UNUSED)
2292{
2293 tree node = *nodep;
2294 tree t;
2295
2296 if (TREE_CODE (node) == RECORD_TYPE || TREE_CODE (node) == UNION_TYPE)
2297 {
2298 /* We cannot inline a function of the form
2299
2300 void F (int i) { struct S { int ar[i]; } s; }
2301
2302 Attempting to do so produces a catch-22.
2303 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
2304 UNION_TYPE nodes, then it goes into infinite recursion on a
2305 structure containing a pointer to its own type. If it doesn't,
2306 then the type node for S doesn't get adjusted properly when
2307 F is inlined.
2308
2309 ??? This is likely no longer true, but it's too late in the 4.0
2310 cycle to try to find out. This should be checked for 4.1. */
2311 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
2312 if (variably_modified_type_p (TREE_TYPE (t), NULL))
2313 {
2314 inline_forbidden_reason
2315 = G_("function %q+F can never be inlined "
2316 "because it uses variable sized variables");
2317 return node;
2318 }
2319 }
2320
2321 return NULL_TREE;
4838c5ee
AO
2322}
2323
726a989a
RB
2324
2325/* A callback for walk_gimple_seq to handle statements. Returns
2326 non-NULL iff a function can not be inlined. Also sets the reason
2327 why. */
c986baf6 2328
c986baf6 2329static tree
726a989a
RB
2330inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2331 struct walk_stmt_info *wip)
c986baf6 2332{
726a989a 2333 tree fn = (tree) wip->info;
f08545a8 2334 tree t;
726a989a 2335 gimple stmt = gsi_stmt (*gsi);
c986baf6 2336
726a989a 2337 switch (gimple_code (stmt))
f08545a8 2338 {
726a989a 2339 case GIMPLE_CALL:
3197c4fd
AS
2340 /* Refuse to inline alloca call unless user explicitly forced so as
2341 this may change program's memory overhead drastically when the
2342 function using alloca is called in loop. In GCC present in
2343 SPEC2000 inlining into schedule_block cause it to require 2GB of
2344 RAM instead of 256MB. */
726a989a 2345 if (gimple_alloca_call_p (stmt)
f08545a8
JH
2346 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2347 {
ddd2d57e 2348 inline_forbidden_reason
dee15844 2349 = G_("function %q+F can never be inlined because it uses "
ddd2d57e 2350 "alloca (override using the always_inline attribute)");
726a989a
RB
2351 *handled_ops_p = true;
2352 return fn;
f08545a8 2353 }
726a989a
RB
2354
2355 t = gimple_call_fndecl (stmt);
2356 if (t == NULL_TREE)
f08545a8 2357 break;
84f5e1b1 2358
f08545a8
JH
2359 /* We cannot inline functions that call setjmp. */
2360 if (setjmp_call_p (t))
2361 {
ddd2d57e 2362 inline_forbidden_reason
dee15844 2363 = G_("function %q+F can never be inlined because it uses setjmp");
726a989a
RB
2364 *handled_ops_p = true;
2365 return t;
f08545a8
JH
2366 }
2367
6de9cd9a 2368 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3197c4fd 2369 switch (DECL_FUNCTION_CODE (t))
f08545a8 2370 {
3197c4fd
AS
2371 /* We cannot inline functions that take a variable number of
2372 arguments. */
2373 case BUILT_IN_VA_START:
3197c4fd
AS
2374 case BUILT_IN_NEXT_ARG:
2375 case BUILT_IN_VA_END:
6de9cd9a 2376 inline_forbidden_reason
dee15844 2377 = G_("function %q+F can never be inlined because it "
6de9cd9a 2378 "uses variable argument lists");
726a989a
RB
2379 *handled_ops_p = true;
2380 return t;
6de9cd9a 2381
3197c4fd 2382 case BUILT_IN_LONGJMP:
6de9cd9a
DN
2383 /* We can't inline functions that call __builtin_longjmp at
2384 all. The non-local goto machinery really requires the
2385 destination be in a different function. If we allow the
2386 function calling __builtin_longjmp to be inlined into the
2387 function calling __builtin_setjmp, Things will Go Awry. */
2388 inline_forbidden_reason
dee15844 2389 = G_("function %q+F can never be inlined because "
6de9cd9a 2390 "it uses setjmp-longjmp exception handling");
726a989a
RB
2391 *handled_ops_p = true;
2392 return t;
6de9cd9a
DN
2393
2394 case BUILT_IN_NONLOCAL_GOTO:
2395 /* Similarly. */
2396 inline_forbidden_reason
dee15844 2397 = G_("function %q+F can never be inlined because "
6de9cd9a 2398 "it uses non-local goto");
726a989a
RB
2399 *handled_ops_p = true;
2400 return t;
f08545a8 2401
4b284111
JJ
2402 case BUILT_IN_RETURN:
2403 case BUILT_IN_APPLY_ARGS:
2404 /* If a __builtin_apply_args caller would be inlined,
2405 it would be saving arguments of the function it has
2406 been inlined into. Similarly __builtin_return would
2407 return from the function the inline has been inlined into. */
2408 inline_forbidden_reason
dee15844 2409 = G_("function %q+F can never be inlined because "
4b284111 2410 "it uses __builtin_return or __builtin_apply_args");
726a989a
RB
2411 *handled_ops_p = true;
2412 return t;
4b284111 2413
3197c4fd
AS
2414 default:
2415 break;
2416 }
f08545a8
JH
2417 break;
2418
726a989a
RB
2419 case GIMPLE_GOTO:
2420 t = gimple_goto_dest (stmt);
f08545a8
JH
2421
2422 /* We will not inline a function which uses computed goto. The
2423 addresses of its local labels, which may be tucked into
2424 global storage, are of course not constant across
2425 instantiations, which causes unexpected behavior. */
2426 if (TREE_CODE (t) != LABEL_DECL)
2427 {
ddd2d57e 2428 inline_forbidden_reason
dee15844 2429 = G_("function %q+F can never be inlined "
ddd2d57e 2430 "because it contains a computed goto");
726a989a
RB
2431 *handled_ops_p = true;
2432 return t;
f08545a8 2433 }
6de9cd9a 2434 break;
f08545a8 2435
726a989a
RB
2436 case GIMPLE_LABEL:
2437 t = gimple_label_label (stmt);
6de9cd9a 2438 if (DECL_NONLOCAL (t))
f08545a8 2439 {
6de9cd9a
DN
2440 /* We cannot inline a function that receives a non-local goto
2441 because we cannot remap the destination label used in the
2442 function that is performing the non-local goto. */
ddd2d57e 2443 inline_forbidden_reason
dee15844 2444 = G_("function %q+F can never be inlined "
6de9cd9a 2445 "because it receives a non-local goto");
726a989a
RB
2446 *handled_ops_p = true;
2447 return t;
f08545a8 2448 }
f08545a8
JH
2449 break;
2450
f08545a8
JH
2451 default:
2452 break;
2453 }
2454
726a989a 2455 *handled_ops_p = false;
f08545a8 2456 return NULL_TREE;
84f5e1b1
RH
2457}
2458
726a989a 2459
2092ee7d
JJ
2460static tree
2461inline_forbidden_p_2 (tree *nodep, int *walk_subtrees,
2462 void *fnp)
2463{
2464 tree node = *nodep;
2465 tree fn = (tree) fnp;
2466
2467 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2468 {
2469 inline_forbidden_reason
2470 = G_("function %q+F can never be inlined "
2471 "because it saves address of local label in a static variable");
2472 return node;
2473 }
2474
2475 if (TYPE_P (node))
2476 *walk_subtrees = 0;
2477
2478 return NULL_TREE;
2479}
2480
726a989a
RB
2481/* Return true if FNDECL is a function that cannot be inlined into
2482 another one. */
2483
2484static bool
f08545a8 2485inline_forbidden_p (tree fndecl)
84f5e1b1 2486{
070588f0 2487 location_t saved_loc = input_location;
2092ee7d
JJ
2488 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
2489 tree step;
726a989a
RB
2490 struct walk_stmt_info wi;
2491 struct pointer_set_t *visited_nodes;
2492 basic_block bb;
2493 bool forbidden_p = false;
2494
2495 visited_nodes = pointer_set_create ();
2496 memset (&wi, 0, sizeof (wi));
2497 wi.info = (void *) fndecl;
2498 wi.pset = visited_nodes;
e21aff8a 2499
2092ee7d 2500 FOR_EACH_BB_FN (bb, fun)
726a989a
RB
2501 {
2502 gimple ret;
2503 gimple_seq seq = bb_seq (bb);
2504 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt,
2505 inline_forbidden_p_op, &wi);
2506 forbidden_p = (ret != NULL);
2507 if (forbidden_p)
2508 goto egress;
2509 }
ed397c43 2510
cb91fab0 2511 for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2092ee7d
JJ
2512 {
2513 tree decl = TREE_VALUE (step);
2514 if (TREE_CODE (decl) == VAR_DECL
2515 && TREE_STATIC (decl)
2516 && !DECL_EXTERNAL (decl)
2517 && DECL_INITIAL (decl))
726a989a
RB
2518 {
2519 tree ret;
2520 ret = walk_tree_without_duplicates (&DECL_INITIAL (decl),
2521 inline_forbidden_p_2, fndecl);
2522 forbidden_p = (ret != NULL);
2523 if (forbidden_p)
2524 goto egress;
2525 }
2092ee7d
JJ
2526 }
2527
e21aff8a 2528egress:
726a989a 2529 pointer_set_destroy (visited_nodes);
070588f0 2530 input_location = saved_loc;
726a989a 2531 return forbidden_p;
84f5e1b1
RH
2532}
2533
b3c3af2f
SB
2534/* Returns nonzero if FN is a function that does not have any
2535 fundamental inline blocking properties. */
d4e4baa9 2536
b3c3af2f
SB
2537static bool
2538inlinable_function_p (tree fn)
d4e4baa9 2539{
b3c3af2f 2540 bool inlinable = true;
18177c7e
RG
2541 bool do_warning;
2542 tree always_inline;
d4e4baa9
AO
2543
2544 /* If we've already decided this function shouldn't be inlined,
2545 there's no need to check again. */
2546 if (DECL_UNINLINABLE (fn))
b3c3af2f 2547 return false;
d4e4baa9 2548
18177c7e
RG
2549 /* We only warn for functions declared `inline' by the user. */
2550 do_warning = (warn_inline
18177c7e
RG
2551 && DECL_DECLARED_INLINE_P (fn)
2552 && !DECL_IN_SYSTEM_HEADER (fn));
2553
2554 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
2555
e90acd93 2556 if (flag_no_inline
18177c7e
RG
2557 && always_inline == NULL)
2558 {
2559 if (do_warning)
2560 warning (OPT_Winline, "function %q+F can never be inlined because it "
2561 "is suppressed using -fno-inline", fn);
2562 inlinable = false;
2563 }
2564
2565 /* Don't auto-inline anything that might not be bound within
2566 this unit of translation. */
2567 else if (!DECL_DECLARED_INLINE_P (fn)
2568 && DECL_REPLACEABLE_P (fn))
2569 inlinable = false;
2570
2571 else if (!function_attribute_inlinable_p (fn))
2572 {
2573 if (do_warning)
2574 warning (OPT_Winline, "function %q+F can never be inlined because it "
2575 "uses attributes conflicting with inlining", fn);
2576 inlinable = false;
2577 }
46c5ad27 2578
b3c3af2f
SB
2579 /* If we don't have the function body available, we can't inline it.
2580 However, this should not be recorded since we also get here for
2581 forward declared inline functions. Therefore, return at once. */
726a989a 2582 if (!gimple_body (fn))
b3c3af2f
SB
2583 return false;
2584
f08545a8 2585 else if (inline_forbidden_p (fn))
b3c3af2f
SB
2586 {
2587 /* See if we should warn about uninlinable functions. Previously,
2588 some of these warnings would be issued while trying to expand
2589 the function inline, but that would cause multiple warnings
2590 about functions that would for example call alloca. But since
2591 this a property of the function, just one warning is enough.
2592 As a bonus we can now give more details about the reason why a
18177c7e
RG
2593 function is not inlinable. */
2594 if (always_inline)
dee15844 2595 sorry (inline_forbidden_reason, fn);
2d327012 2596 else if (do_warning)
d2fcbf6f 2597 warning (OPT_Winline, inline_forbidden_reason, fn);
b3c3af2f
SB
2598
2599 inlinable = false;
2600 }
d4e4baa9
AO
2601
2602 /* Squirrel away the result so that we don't have to check again. */
b3c3af2f 2603 DECL_UNINLINABLE (fn) = !inlinable;
d4e4baa9 2604
b3c3af2f
SB
2605 return inlinable;
2606}
2607
e5c4f28a
RG
2608/* Estimate the cost of a memory move. Use machine dependent
2609 word size and take possible memcpy call into account. */
2610
2611int
2612estimate_move_cost (tree type)
2613{
2614 HOST_WIDE_INT size;
2615
2616 size = int_size_in_bytes (type);
2617
2618 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
2619 /* Cost of a memcpy call, 3 arguments and the call. */
2620 return 4;
2621 else
2622 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
2623}
2624
726a989a 2625/* Returns cost of operation CODE, according to WEIGHTS */
7f9bc51b 2626
726a989a
RB
2627static int
2628estimate_operator_cost (enum tree_code code, eni_weights *weights)
6de9cd9a 2629{
726a989a 2630 switch (code)
6de9cd9a 2631 {
726a989a
RB
2632 /* These are "free" conversions, or their presumed cost
2633 is folded into other operations. */
61fcaeec 2634 case RANGE_EXPR:
1a87cf0c 2635 CASE_CONVERT:
726a989a
RB
2636 case COMPLEX_EXPR:
2637 case PAREN_EXPR:
726a989a 2638 return 0;
6de9cd9a 2639
e5c4f28a
RG
2640 /* Assign cost of 1 to usual operations.
2641 ??? We may consider mapping RTL costs to this. */
6de9cd9a 2642 case COND_EXPR:
4151978d 2643 case VEC_COND_EXPR:
6de9cd9a
DN
2644
2645 case PLUS_EXPR:
5be014d5 2646 case POINTER_PLUS_EXPR:
6de9cd9a
DN
2647 case MINUS_EXPR:
2648 case MULT_EXPR:
2649
325217ed 2650 case FIXED_CONVERT_EXPR:
6de9cd9a 2651 case FIX_TRUNC_EXPR:
6de9cd9a
DN
2652
2653 case NEGATE_EXPR:
2654 case FLOAT_EXPR:
2655 case MIN_EXPR:
2656 case MAX_EXPR:
2657 case ABS_EXPR:
2658
2659 case LSHIFT_EXPR:
2660 case RSHIFT_EXPR:
2661 case LROTATE_EXPR:
2662 case RROTATE_EXPR:
a6b46ba2
DN
2663 case VEC_LSHIFT_EXPR:
2664 case VEC_RSHIFT_EXPR:
6de9cd9a
DN
2665
2666 case BIT_IOR_EXPR:
2667 case BIT_XOR_EXPR:
2668 case BIT_AND_EXPR:
2669 case BIT_NOT_EXPR:
2670
2671 case TRUTH_ANDIF_EXPR:
2672 case TRUTH_ORIF_EXPR:
2673 case TRUTH_AND_EXPR:
2674 case TRUTH_OR_EXPR:
2675 case TRUTH_XOR_EXPR:
2676 case TRUTH_NOT_EXPR:
2677
2678 case LT_EXPR:
2679 case LE_EXPR:
2680 case GT_EXPR:
2681 case GE_EXPR:
2682 case EQ_EXPR:
2683 case NE_EXPR:
2684 case ORDERED_EXPR:
2685 case UNORDERED_EXPR:
2686
2687 case UNLT_EXPR:
2688 case UNLE_EXPR:
2689 case UNGT_EXPR:
2690 case UNGE_EXPR:
2691 case UNEQ_EXPR:
d1a7edaf 2692 case LTGT_EXPR:
6de9cd9a 2693
6de9cd9a
DN
2694 case CONJ_EXPR:
2695
2696 case PREDECREMENT_EXPR:
2697 case PREINCREMENT_EXPR:
2698 case POSTDECREMENT_EXPR:
2699 case POSTINCREMENT_EXPR:
2700
16630a2c
DN
2701 case REALIGN_LOAD_EXPR:
2702
61d3cdbb
DN
2703 case REDUC_MAX_EXPR:
2704 case REDUC_MIN_EXPR:
2705 case REDUC_PLUS_EXPR:
20f06221 2706 case WIDEN_SUM_EXPR:
726a989a
RB
2707 case WIDEN_MULT_EXPR:
2708 case DOT_PROD_EXPR:
2709
89d67cca
DN
2710 case VEC_WIDEN_MULT_HI_EXPR:
2711 case VEC_WIDEN_MULT_LO_EXPR:
2712 case VEC_UNPACK_HI_EXPR:
2713 case VEC_UNPACK_LO_EXPR:
d9987fb4
UB
2714 case VEC_UNPACK_FLOAT_HI_EXPR:
2715 case VEC_UNPACK_FLOAT_LO_EXPR:
8115817b 2716 case VEC_PACK_TRUNC_EXPR:
89d67cca 2717 case VEC_PACK_SAT_EXPR:
d9987fb4 2718 case VEC_PACK_FIX_TRUNC_EXPR:
98b44b0e
IR
2719 case VEC_EXTRACT_EVEN_EXPR:
2720 case VEC_EXTRACT_ODD_EXPR:
2721 case VEC_INTERLEAVE_HIGH_EXPR:
2722 case VEC_INTERLEAVE_LOW_EXPR:
2723
726a989a 2724 return 1;
6de9cd9a 2725
1ea7e6ad 2726 /* Few special cases of expensive operations. This is useful
6de9cd9a
DN
2727 to avoid inlining on functions having too many of these. */
2728 case TRUNC_DIV_EXPR:
2729 case CEIL_DIV_EXPR:
2730 case FLOOR_DIV_EXPR:
2731 case ROUND_DIV_EXPR:
2732 case EXACT_DIV_EXPR:
2733 case TRUNC_MOD_EXPR:
2734 case CEIL_MOD_EXPR:
2735 case FLOOR_MOD_EXPR:
2736 case ROUND_MOD_EXPR:
2737 case RDIV_EXPR:
726a989a
RB
2738 return weights->div_mod_cost;
2739
2740 default:
2741 /* We expect a copy assignment with no operator. */
2742 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
2743 return 0;
2744 }
2745}
2746
2747
2748/* Estimate number of instructions that will be created by expanding
2749 the statements in the statement sequence STMTS.
2750 WEIGHTS contains weights attributed to various constructs. */
2751
2752static
2753int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
2754{
2755 int cost;
2756 gimple_stmt_iterator gsi;
2757
2758 cost = 0;
2759 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
2760 cost += estimate_num_insns (gsi_stmt (gsi), weights);
2761
2762 return cost;
2763}
2764
2765
2766/* Estimate number of instructions that will be created by expanding STMT.
2767 WEIGHTS contains weights attributed to various constructs. */
2768
2769int
2770estimate_num_insns (gimple stmt, eni_weights *weights)
2771{
2772 unsigned cost, i;
2773 enum gimple_code code = gimple_code (stmt);
2774 tree lhs;
2775
2776 switch (code)
2777 {
2778 case GIMPLE_ASSIGN:
2779 /* Try to estimate the cost of assignments. We have three cases to
2780 deal with:
2781 1) Simple assignments to registers;
2782 2) Stores to things that must live in memory. This includes
2783 "normal" stores to scalars, but also assignments of large
2784 structures, or constructors of big arrays;
2785
2786 Let us look at the first two cases, assuming we have "a = b + C":
2787 <GIMPLE_ASSIGN <var_decl "a">
2788 <plus_expr <var_decl "b"> <constant C>>
2789 If "a" is a GIMPLE register, the assignment to it is free on almost
2790 any target, because "a" usually ends up in a real register. Hence
2791 the only cost of this expression comes from the PLUS_EXPR, and we
2792 can ignore the GIMPLE_ASSIGN.
2793 If "a" is not a GIMPLE register, the assignment to "a" will most
2794 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
2795 of moving something into "a", which we compute using the function
2796 estimate_move_cost. */
2797 lhs = gimple_assign_lhs (stmt);
2798 if (is_gimple_reg (lhs))
2799 cost = 0;
2800 else
2801 cost = estimate_move_cost (TREE_TYPE (lhs));
2802
2803 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights);
2804 break;
2805
2806 case GIMPLE_COND:
2807 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights);
2808 break;
2809
2810 case GIMPLE_SWITCH:
2811 /* Take into account cost of the switch + guess 2 conditional jumps for
2812 each case label.
2813
2814 TODO: once the switch expansion logic is sufficiently separated, we can
2815 do better job on estimating cost of the switch. */
2816 cost = gimple_switch_num_labels (stmt) * 2;
6de9cd9a 2817 break;
726a989a
RB
2818
2819 case GIMPLE_CALL:
6de9cd9a 2820 {
726a989a
RB
2821 tree decl = gimple_call_fndecl (stmt);
2822 tree addr = gimple_call_fn (stmt);
8723e2fe
JH
2823 tree funtype = TREE_TYPE (addr);
2824
726a989a
RB
2825 if (POINTER_TYPE_P (funtype))
2826 funtype = TREE_TYPE (funtype);
6de9cd9a 2827
625a2efb 2828 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
726a989a 2829 cost = weights->target_builtin_call_cost;
625a2efb 2830 else
726a989a 2831 cost = weights->call_cost;
625a2efb 2832
8c96cd51 2833 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
6de9cd9a
DN
2834 switch (DECL_FUNCTION_CODE (decl))
2835 {
2836 case BUILT_IN_CONSTANT_P:
726a989a 2837 return 0;
6de9cd9a 2838 case BUILT_IN_EXPECT:
726a989a
RB
2839 cost = 0;
2840 break;
2841
7f9bc51b
ZD
2842 /* Prefetch instruction is not expensive. */
2843 case BUILT_IN_PREFETCH:
726a989a 2844 cost = weights->target_builtin_call_cost;
7f9bc51b 2845 break;
726a989a 2846
6de9cd9a
DN
2847 default:
2848 break;
2849 }
e5c4f28a 2850
8723e2fe
JH
2851 if (decl)
2852 funtype = TREE_TYPE (decl);
2853
726a989a
RB
2854 /* Our cost must be kept in sync with
2855 cgraph_estimate_size_after_inlining that does use function
2856 declaration to figure out the arguments. */
8723e2fe
JH
2857 if (decl && DECL_ARGUMENTS (decl))
2858 {
2859 tree arg;
2860 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
726a989a 2861 cost += estimate_move_cost (TREE_TYPE (arg));
8723e2fe
JH
2862 }
2863 else if (funtype && prototype_p (funtype))
2864 {
2865 tree t;
2866 for (t = TYPE_ARG_TYPES (funtype); t; t = TREE_CHAIN (t))
726a989a 2867 cost += estimate_move_cost (TREE_VALUE (t));
8723e2fe
JH
2868 }
2869 else
c7f599d0 2870 {
726a989a
RB
2871 for (i = 0; i < gimple_call_num_args (stmt); i++)
2872 {
2873 tree arg = gimple_call_arg (stmt, i);
2874 cost += estimate_move_cost (TREE_TYPE (arg));
2875 }
c7f599d0 2876 }
e5c4f28a 2877
6de9cd9a
DN
2878 break;
2879 }
88f4034b 2880
726a989a
RB
2881 case GIMPLE_GOTO:
2882 case GIMPLE_LABEL:
2883 case GIMPLE_NOP:
2884 case GIMPLE_PHI:
2885 case GIMPLE_RETURN:
2886 case GIMPLE_CHANGE_DYNAMIC_TYPE:
2887 case GIMPLE_PREDICT:
2888 return 0;
2889
2890 case GIMPLE_ASM:
2891 case GIMPLE_RESX:
2892 return 1;
2893
2894 case GIMPLE_BIND:
2895 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
2896
2897 case GIMPLE_EH_FILTER:
2898 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
2899
2900 case GIMPLE_CATCH:
2901 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
2902
2903 case GIMPLE_TRY:
2904 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
2905 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
2906
2907 /* OpenMP directives are generally very expensive. */
2908
2909 case GIMPLE_OMP_RETURN:
2910 case GIMPLE_OMP_SECTIONS_SWITCH:
2911 case GIMPLE_OMP_ATOMIC_STORE:
2912 case GIMPLE_OMP_CONTINUE:
2913 /* ...except these, which are cheap. */
2914 return 0;
2915
2916 case GIMPLE_OMP_ATOMIC_LOAD:
2917 return weights->omp_cost;
2918
2919 case GIMPLE_OMP_FOR:
2920 return (weights->omp_cost
2921 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
2922 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
2923
2924 case GIMPLE_OMP_PARALLEL:
2925 case GIMPLE_OMP_TASK:
2926 case GIMPLE_OMP_CRITICAL:
2927 case GIMPLE_OMP_MASTER:
2928 case GIMPLE_OMP_ORDERED:
2929 case GIMPLE_OMP_SECTION:
2930 case GIMPLE_OMP_SECTIONS:
2931 case GIMPLE_OMP_SINGLE:
2932 return (weights->omp_cost
2933 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
88f4034b 2934
6de9cd9a 2935 default:
1e128c5f 2936 gcc_unreachable ();
6de9cd9a 2937 }
726a989a
RB
2938
2939 return cost;
6de9cd9a
DN
2940}
2941
726a989a
RB
2942/* Estimate number of instructions that will be created by expanding
2943 function FNDECL. WEIGHTS contains weights attributed to various
2944 constructs. */
aa4a53af 2945
6de9cd9a 2946int
726a989a 2947estimate_num_insns_fn (tree fndecl, eni_weights *weights)
6de9cd9a 2948{
726a989a
RB
2949 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
2950 gimple_stmt_iterator bsi;
e21aff8a 2951 basic_block bb;
726a989a 2952 int n = 0;
e21aff8a 2953
726a989a
RB
2954 gcc_assert (my_function && my_function->cfg);
2955 FOR_EACH_BB_FN (bb, my_function)
e21aff8a 2956 {
726a989a
RB
2957 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
2958 n += estimate_num_insns (gsi_stmt (bsi), weights);
e21aff8a 2959 }
e21aff8a 2960
726a989a 2961 return n;
7f9bc51b
ZD
2962}
2963
726a989a 2964
7f9bc51b
ZD
2965/* Initializes weights used by estimate_num_insns. */
2966
2967void
2968init_inline_once (void)
2969{
2970 eni_inlining_weights.call_cost = PARAM_VALUE (PARAM_INLINE_CALL_COST);
625a2efb 2971 eni_inlining_weights.target_builtin_call_cost = 1;
7f9bc51b 2972 eni_inlining_weights.div_mod_cost = 10;
7f9bc51b
ZD
2973 eni_inlining_weights.omp_cost = 40;
2974
2975 eni_size_weights.call_cost = 1;
625a2efb 2976 eni_size_weights.target_builtin_call_cost = 1;
7f9bc51b 2977 eni_size_weights.div_mod_cost = 1;
7f9bc51b
ZD
2978 eni_size_weights.omp_cost = 40;
2979
2980 /* Estimating time for call is difficult, since we have no idea what the
2981 called function does. In the current uses of eni_time_weights,
2982 underestimating the cost does less harm than overestimating it, so
ea2c620c 2983 we choose a rather small value here. */
7f9bc51b 2984 eni_time_weights.call_cost = 10;
625a2efb 2985 eni_time_weights.target_builtin_call_cost = 10;
7f9bc51b 2986 eni_time_weights.div_mod_cost = 10;
7f9bc51b 2987 eni_time_weights.omp_cost = 40;
6de9cd9a
DN
2988}
2989
726a989a
RB
2990/* Estimate the number of instructions in a gimple_seq. */
2991
2992int
2993count_insns_seq (gimple_seq seq, eni_weights *weights)
2994{
2995 gimple_stmt_iterator gsi;
2996 int n = 0;
2997 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
2998 n += estimate_num_insns (gsi_stmt (gsi), weights);
2999
3000 return n;
3001}
3002
3003
e21aff8a 3004/* Install new lexical TREE_BLOCK underneath 'current_block'. */
726a989a 3005
e21aff8a
SB
3006static void
3007add_lexical_block (tree current_block, tree new_block)
3008{
3009 tree *blk_p;
3010
3011 /* Walk to the last sub-block. */
3012 for (blk_p = &BLOCK_SUBBLOCKS (current_block);
3013 *blk_p;
87caf699 3014 blk_p = &BLOCK_CHAIN (*blk_p))
e21aff8a
SB
3015 ;
3016 *blk_p = new_block;
3017 BLOCK_SUPERCONTEXT (new_block) = current_block;
e21aff8a
SB
3018}
3019
3e293154
MJ
3020/* Fetch callee declaration from the call graph edge going from NODE and
3021 associated with STMR call statement. Return NULL_TREE if not found. */
3022static tree
726a989a 3023get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3e293154
MJ
3024{
3025 struct cgraph_edge *cs;
3026
3027 cs = cgraph_edge (node, stmt);
3028 if (cs)
3029 return cs->callee->decl;
3030
3031 return NULL_TREE;
3032}
3033
726a989a 3034/* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
d4e4baa9 3035
e21aff8a 3036static bool
726a989a 3037expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
d4e4baa9 3038{
1ea193c2 3039 tree retvar, use_retvar;
d436bff8 3040 tree fn;
6be42dd4 3041 struct pointer_map_t *st;
110cfe1c 3042 tree return_slot;
7740f00d 3043 tree modify_dest;
6de9cd9a 3044 location_t saved_location;
e21aff8a 3045 struct cgraph_edge *cg_edge;
dc0bfe6a 3046 const char *reason;
e21aff8a
SB
3047 basic_block return_block;
3048 edge e;
726a989a 3049 gimple_stmt_iterator gsi, stmt_gsi;
e21aff8a 3050 bool successfully_inlined = FALSE;
4f6c2131 3051 bool purge_dead_abnormal_edges;
e21aff8a
SB
3052 tree t_step;
3053 tree var;
d4e4baa9 3054
6de9cd9a
DN
3055 /* Set input_location here so we get the right instantiation context
3056 if we call instantiate_decl from inlinable_function_p. */
3057 saved_location = input_location;
726a989a
RB
3058 if (gimple_has_location (stmt))
3059 input_location = gimple_location (stmt);
6de9cd9a 3060
d4e4baa9 3061 /* From here on, we're only interested in CALL_EXPRs. */
726a989a 3062 if (gimple_code (stmt) != GIMPLE_CALL)
6de9cd9a 3063 goto egress;
d4e4baa9
AO
3064
3065 /* First, see if we can figure out what function is being called.
3066 If we cannot, then there is no hope of inlining the function. */
726a989a 3067 fn = gimple_call_fndecl (stmt);
d4e4baa9 3068 if (!fn)
3e293154
MJ
3069 {
3070 fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3071 if (!fn)
3072 goto egress;
3073 }
d4e4baa9 3074
b58b1157 3075 /* Turn forward declarations into real ones. */
d4d1ebc1 3076 fn = cgraph_node (fn)->decl;
b58b1157 3077
726a989a 3078 /* If FN is a declaration of a function in a nested scope that was
a1a0fd4e
AO
3079 globally declared inline, we don't set its DECL_INITIAL.
3080 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3081 C++ front-end uses it for cdtors to refer to their internal
3082 declarations, that are not real functions. Fortunately those
3083 don't have trees to be saved, so we can tell by checking their
726a989a
RB
3084 gimple_body. */
3085 if (!DECL_INITIAL (fn)
a1a0fd4e 3086 && DECL_ABSTRACT_ORIGIN (fn)
726a989a 3087 && gimple_body (DECL_ABSTRACT_ORIGIN (fn)))
a1a0fd4e
AO
3088 fn = DECL_ABSTRACT_ORIGIN (fn);
3089
18c6ada9
JH
3090 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3091 Kill this check once this is fixed. */
1b369fae 3092 if (!id->dst_node->analyzed)
6de9cd9a 3093 goto egress;
18c6ada9 3094
1b369fae 3095 cg_edge = cgraph_edge (id->dst_node, stmt);
18c6ada9
JH
3096
3097 /* Constant propagation on argument done during previous inlining
3098 may create new direct call. Produce an edge for it. */
e21aff8a 3099 if (!cg_edge)
18c6ada9
JH
3100 {
3101 struct cgraph_node *dest = cgraph_node (fn);
3102
6de9cd9a
DN
3103 /* We have missing edge in the callgraph. This can happen in one case
3104 where previous inlining turned indirect call into direct call by
3105 constant propagating arguments. In all other cases we hit a bug
3106 (incorrect node sharing is most common reason for missing edges. */
7e8b322a 3107 gcc_assert (dest->needed);
1b369fae 3108 cgraph_create_edge (id->dst_node, dest, stmt,
45a80bb9
JH
3109 bb->count, CGRAPH_FREQ_BASE,
3110 bb->loop_depth)->inline_failed
18c6ada9 3111 = N_("originally indirect function call not considered for inlining");
45a80bb9
JH
3112 if (dump_file)
3113 {
3114 fprintf (dump_file, "Created new direct edge to %s",
3115 cgraph_node_name (dest));
3116 }
6de9cd9a 3117 goto egress;
18c6ada9
JH
3118 }
3119
d4e4baa9
AO
3120 /* Don't try to inline functions that are not well-suited to
3121 inlining. */
e21aff8a 3122 if (!cgraph_inline_p (cg_edge, &reason))
a833faa5 3123 {
3e293154
MJ
3124 /* If this call was originally indirect, we do not want to emit any
3125 inlining related warnings or sorry messages because there are no
3126 guarantees regarding those. */
3127 if (cg_edge->indirect_call)
3128 goto egress;
3129
7fac66d4
JH
3130 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3131 /* Avoid warnings during early inline pass. */
7e8b322a 3132 && cgraph_global_info_ready)
2d327012 3133 {
dee15844 3134 sorry ("inlining failed in call to %q+F: %s", fn, reason);
2d327012
JH
3135 sorry ("called from here");
3136 }
3137 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3138 && !DECL_IN_SYSTEM_HEADER (fn)
09ebcffa 3139 && strlen (reason)
d63db217
JH
3140 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3141 /* Avoid warnings during early inline pass. */
7e8b322a 3142 && cgraph_global_info_ready)
a833faa5 3143 {
dee15844
JM
3144 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3145 fn, reason);
3176a0c2 3146 warning (OPT_Winline, "called from here");
a833faa5 3147 }
6de9cd9a 3148 goto egress;
a833faa5 3149 }
ea99e0be 3150 fn = cg_edge->callee->decl;
d4e4baa9 3151
18c6ada9 3152#ifdef ENABLE_CHECKING
1b369fae 3153 if (cg_edge->callee->decl != id->dst_node->decl)
e21aff8a 3154 verify_cgraph_node (cg_edge->callee);
18c6ada9
JH
3155#endif
3156
e21aff8a 3157 /* We will be inlining this callee. */
e21aff8a
SB
3158 id->eh_region = lookup_stmt_eh_region (stmt);
3159
726a989a 3160 /* Split the block holding the GIMPLE_CALL. */
e21aff8a
SB
3161 e = split_block (bb, stmt);
3162 bb = e->src;
3163 return_block = e->dest;
3164 remove_edge (e);
3165
4f6c2131
EB
3166 /* split_block splits after the statement; work around this by
3167 moving the call into the second block manually. Not pretty,
3168 but seems easier than doing the CFG manipulation by hand
726a989a
RB
3169 when the GIMPLE_CALL is in the last statement of BB. */
3170 stmt_gsi = gsi_last_bb (bb);
3171 gsi_remove (&stmt_gsi, false);
4f6c2131 3172
726a989a 3173 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4f6c2131
EB
3174 been the source of abnormal edges. In this case, schedule
3175 the removal of dead abnormal edges. */
726a989a
RB
3176 gsi = gsi_start_bb (return_block);
3177 if (gsi_end_p (gsi))
e21aff8a 3178 {
726a989a 3179 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4f6c2131 3180 purge_dead_abnormal_edges = true;
e21aff8a 3181 }
4f6c2131
EB
3182 else
3183 {
726a989a 3184 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4f6c2131
EB
3185 purge_dead_abnormal_edges = false;
3186 }
3187
726a989a 3188 stmt_gsi = gsi_start_bb (return_block);
742a37d5 3189
d436bff8
AH
3190 /* Build a block containing code to initialize the arguments, the
3191 actual inline expansion of the body, and a label for the return
3192 statements within the function to jump to. The type of the
3193 statement expression is the return type of the function call. */
e21aff8a
SB
3194 id->block = make_node (BLOCK);
3195 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3e2844cb 3196 BLOCK_SOURCE_LOCATION (id->block) = input_location;
726a989a 3197 add_lexical_block (gimple_block (stmt), id->block);
e21aff8a 3198
d4e4baa9
AO
3199 /* Local declarations will be replaced by their equivalents in this
3200 map. */
3201 st = id->decl_map;
6be42dd4 3202 id->decl_map = pointer_map_create ();
d4e4baa9 3203
e21aff8a 3204 /* Record the function we are about to inline. */
1b369fae
RH
3205 id->src_fn = fn;
3206 id->src_node = cg_edge->callee;
110cfe1c 3207 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
726a989a 3208 id->gimple_call = stmt;
1b369fae 3209
3c8da8a5
AO
3210 gcc_assert (!id->src_cfun->after_inlining);
3211
045685a9 3212 id->entry_bb = bb;
726a989a 3213 initialize_inlined_parameters (id, stmt, fn, bb);
d4e4baa9 3214
ea99e0be 3215 if (DECL_INITIAL (fn))
acb8f212
JH
3216 add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3217
d4e4baa9
AO
3218 /* Return statements in the function body will be replaced by jumps
3219 to the RET_LABEL. */
1e128c5f
GB
3220 gcc_assert (DECL_INITIAL (fn));
3221 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
23700f65 3222
726a989a 3223 /* Find the LHS to which the result of this call is assigned. */
110cfe1c 3224 return_slot = NULL;
726a989a 3225 if (gimple_call_lhs (stmt))
81bafd36 3226 {
726a989a 3227 modify_dest = gimple_call_lhs (stmt);
81bafd36
ILT
3228
3229 /* The function which we are inlining might not return a value,
3230 in which case we should issue a warning that the function
3231 does not return a value. In that case the optimizers will
3232 see that the variable to which the value is assigned was not
3233 initialized. We do not want to issue a warning about that
3234 uninitialized variable. */
3235 if (DECL_P (modify_dest))
3236 TREE_NO_WARNING (modify_dest) = 1;
726a989a
RB
3237
3238 if (gimple_call_return_slot_opt_p (stmt))
fa47911c 3239 {
110cfe1c 3240 return_slot = modify_dest;
fa47911c
JM
3241 modify_dest = NULL;
3242 }
81bafd36 3243 }
7740f00d
RH
3244 else
3245 modify_dest = NULL;
3246
1ea193c2
ILT
3247 /* If we are inlining a call to the C++ operator new, we don't want
3248 to use type based alias analysis on the return value. Otherwise
3249 we may get confused if the compiler sees that the inlined new
3250 function returns a pointer which was just deleted. See bug
3251 33407. */
3252 if (DECL_IS_OPERATOR_NEW (fn))
3253 {
3254 return_slot = NULL;
3255 modify_dest = NULL;
3256 }
3257
d4e4baa9 3258 /* Declare the return variable for the function. */
726a989a 3259 retvar = declare_return_variable (id, return_slot, modify_dest, &use_retvar);
1ea193c2
ILT
3260
3261 if (DECL_IS_OPERATOR_NEW (fn))
3262 {
3263 gcc_assert (TREE_CODE (retvar) == VAR_DECL
3264 && POINTER_TYPE_P (TREE_TYPE (retvar)));
3265 DECL_NO_TBAA_P (retvar) = 1;
3266 }
d4e4baa9 3267
e21aff8a
SB
3268 /* This is it. Duplicate the callee body. Assume callee is
3269 pre-gimplified. Note that we must not alter the caller
3270 function in any way before this point, as this CALL_EXPR may be
3271 a self-referential call; if we're calling ourselves, we need to
3272 duplicate our body before altering anything. */
3273 copy_body (id, bb->count, bb->frequency, bb, return_block);
50aadcbc 3274
acb8f212 3275 /* Add local vars in this inlined callee to caller. */
cb91fab0 3276 t_step = id->src_cfun->local_decls;
acb8f212
JH
3277 for (; t_step; t_step = TREE_CHAIN (t_step))
3278 {
3279 var = TREE_VALUE (t_step);
3280 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
cb91fab0
JH
3281 cfun->local_decls = tree_cons (NULL_TREE, var,
3282 cfun->local_decls);
acb8f212 3283 else
cb91fab0
JH
3284 cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3285 cfun->local_decls);
acb8f212
JH
3286 }
3287
d4e4baa9 3288 /* Clean up. */
6be42dd4 3289 pointer_map_destroy (id->decl_map);
d4e4baa9
AO
3290 id->decl_map = st;
3291
84936f6f 3292 /* If the inlined function returns a result that we care about,
726a989a
RB
3293 substitute the GIMPLE_CALL with an assignment of the return
3294 variable to the LHS of the call. That is, if STMT was
3295 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3296 if (use_retvar && gimple_call_lhs (stmt))
e21aff8a 3297 {
726a989a
RB
3298 gimple old_stmt = stmt;
3299 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3300 gsi_replace (&stmt_gsi, stmt, false);
110cfe1c
JH
3301 if (gimple_in_ssa_p (cfun))
3302 {
3303 update_stmt (stmt);
3304 mark_symbols_for_renaming (stmt);
3305 }
726a989a 3306 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
e21aff8a 3307 }
6de9cd9a 3308 else
110cfe1c 3309 {
726a989a
RB
3310 /* Handle the case of inlining a function with no return
3311 statement, which causes the return value to become undefined. */
3312 if (gimple_call_lhs (stmt)
3313 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
110cfe1c 3314 {
726a989a
RB
3315 tree name = gimple_call_lhs (stmt);
3316 tree var = SSA_NAME_VAR (name);
110cfe1c
JH
3317 tree def = gimple_default_def (cfun, var);
3318
110cfe1c
JH
3319 if (def)
3320 {
726a989a
RB
3321 /* If the variable is used undefined, make this name
3322 undefined via a move. */
3323 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3324 gsi_replace (&stmt_gsi, stmt, true);
110cfe1c
JH
3325 update_stmt (stmt);
3326 }
110cfe1c
JH
3327 else
3328 {
726a989a
RB
3329 /* Otherwise make this variable undefined. */
3330 gsi_remove (&stmt_gsi, true);
110cfe1c 3331 set_default_def (var, name);
726a989a 3332 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
110cfe1c
JH
3333 }
3334 }
3335 else
726a989a 3336 gsi_remove (&stmt_gsi, true);
110cfe1c 3337 }
d4e4baa9 3338
4f6c2131 3339 if (purge_dead_abnormal_edges)
726a989a 3340 gimple_purge_dead_abnormal_call_edges (return_block);
84936f6f 3341
e21aff8a
SB
3342 /* If the value of the new expression is ignored, that's OK. We
3343 don't warn about this for CALL_EXPRs, so we shouldn't warn about
3344 the equivalent inlined version either. */
726a989a
RB
3345 if (is_gimple_assign (stmt))
3346 {
3347 gcc_assert (gimple_assign_single_p (stmt)
1a87cf0c 3348 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
726a989a
RB
3349 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3350 }
84936f6f 3351
1eb3331e
DB
3352 /* Output the inlining info for this abstract function, since it has been
3353 inlined. If we don't do this now, we can lose the information about the
3354 variables in the function when the blocks get blown away as soon as we
3355 remove the cgraph node. */
e21aff8a 3356 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
84936f6f 3357
e72fcfe8 3358 /* Update callgraph if needed. */
e21aff8a 3359 cgraph_remove_node (cg_edge->callee);
e72fcfe8 3360
e21aff8a 3361 id->block = NULL_TREE;
e21aff8a 3362 successfully_inlined = TRUE;
742a37d5 3363
6de9cd9a
DN
3364 egress:
3365 input_location = saved_location;
e21aff8a 3366 return successfully_inlined;
d4e4baa9 3367}
6de9cd9a 3368
e21aff8a
SB
3369/* Expand call statements reachable from STMT_P.
3370 We can only have CALL_EXPRs as the "toplevel" tree code or nested
726a989a 3371 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
e21aff8a
SB
3372 unfortunately not use that function here because we need a pointer
3373 to the CALL_EXPR, not the tree itself. */
3374
3375static bool
1b369fae 3376gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
6de9cd9a 3377{
726a989a 3378 gimple_stmt_iterator gsi;
6de9cd9a 3379
726a989a 3380 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 3381 {
726a989a 3382 gimple stmt = gsi_stmt (gsi);
e21aff8a 3383
726a989a
RB
3384 if (is_gimple_call (stmt)
3385 && expand_call_inline (bb, stmt, id))
3386 return true;
6de9cd9a 3387 }
726a989a 3388
e21aff8a 3389 return false;
6de9cd9a
DN
3390}
3391
726a989a 3392
b8a00a4d
JH
3393/* Walk all basic blocks created after FIRST and try to fold every statement
3394 in the STATEMENTS pointer set. */
726a989a 3395
b8a00a4d
JH
3396static void
3397fold_marked_statements (int first, struct pointer_set_t *statements)
3398{
726a989a 3399 for (; first < n_basic_blocks; first++)
b8a00a4d
JH
3400 if (BASIC_BLOCK (first))
3401 {
726a989a
RB
3402 gimple_stmt_iterator gsi;
3403
3404 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
3405 !gsi_end_p (gsi);
3406 gsi_next (&gsi))
3407 if (pointer_set_contains (statements, gsi_stmt (gsi)))
9477eb38 3408 {
726a989a 3409 gimple old_stmt = gsi_stmt (gsi);
2bafad93 3410
726a989a 3411 if (fold_stmt (&gsi))
9477eb38 3412 {
726a989a
RB
3413 /* Re-read the statement from GSI as fold_stmt() may
3414 have changed it. */
3415 gimple new_stmt = gsi_stmt (gsi);
3416 update_stmt (new_stmt);
3417
3418 if (is_gimple_call (old_stmt))
3419 cgraph_update_edges_for_call_stmt (old_stmt, new_stmt);
3420
3421 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
3422 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
9477eb38
JH
3423 }
3424 }
b8a00a4d
JH
3425 }
3426}
3427
1084e689
JH
3428/* Return true if BB has at least one abnormal outgoing edge. */
3429
3430static inline bool
3431has_abnormal_outgoing_edge_p (basic_block bb)
3432{
3433 edge e;
3434 edge_iterator ei;
3435
3436 FOR_EACH_EDGE (e, ei, bb->succs)
3437 if (e->flags & EDGE_ABNORMAL)
3438 return true;
3439
3440 return false;
3441}
3442
d4e4baa9
AO
3443/* Expand calls to inline functions in the body of FN. */
3444
873aa8f5 3445unsigned int
46c5ad27 3446optimize_inline_calls (tree fn)
d4e4baa9 3447{
1b369fae 3448 copy_body_data id;
d4e4baa9 3449 tree prev_fn;
e21aff8a 3450 basic_block bb;
b8a00a4d 3451 int last = n_basic_blocks;
d406b663
JJ
3452 struct gimplify_ctx gctx;
3453
c5b6f18e
MM
3454 /* There is no point in performing inlining if errors have already
3455 occurred -- and we might crash if we try to inline invalid
3456 code. */
3457 if (errorcount || sorrycount)
873aa8f5 3458 return 0;
c5b6f18e 3459
d4e4baa9
AO
3460 /* Clear out ID. */
3461 memset (&id, 0, sizeof (id));
3462
1b369fae
RH
3463 id.src_node = id.dst_node = cgraph_node (fn);
3464 id.dst_fn = fn;
d4e4baa9
AO
3465 /* Or any functions that aren't finished yet. */
3466 prev_fn = NULL_TREE;
3467 if (current_function_decl)
3468 {
1b369fae 3469 id.dst_fn = current_function_decl;
d4e4baa9
AO
3470 prev_fn = current_function_decl;
3471 }
1b369fae
RH
3472
3473 id.copy_decl = copy_decl_maybe_to_var;
3474 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3475 id.transform_new_cfg = false;
3476 id.transform_return_to_modify = true;
9ff420f1 3477 id.transform_lang_insert_block = NULL;
b8a00a4d 3478 id.statements_to_fold = pointer_set_create ();
1b369fae 3479
d406b663 3480 push_gimplify_context (&gctx);
d4e4baa9 3481
672987e8
ZD
3482 /* We make no attempts to keep dominance info up-to-date. */
3483 free_dominance_info (CDI_DOMINATORS);
3484 free_dominance_info (CDI_POST_DOMINATORS);
3485
726a989a
RB
3486 /* Register specific gimple functions. */
3487 gimple_register_cfg_hooks ();
3488
e21aff8a
SB
3489 /* Reach the trees by walking over the CFG, and note the
3490 enclosing basic-blocks in the call edges. */
3491 /* We walk the blocks going forward, because inlined function bodies
3492 will split id->current_basic_block, and the new blocks will
3493 follow it; we'll trudge through them, processing their CALL_EXPRs
3494 along the way. */
3495 FOR_EACH_BB (bb)
3496 gimple_expand_calls_inline (bb, &id);
d4e4baa9 3497
e21aff8a 3498 pop_gimplify_context (NULL);
6de9cd9a 3499
18c6ada9
JH
3500#ifdef ENABLE_CHECKING
3501 {
3502 struct cgraph_edge *e;
3503
1b369fae 3504 verify_cgraph_node (id.dst_node);
18c6ada9
JH
3505
3506 /* Double check that we inlined everything we are supposed to inline. */
1b369fae 3507 for (e = id.dst_node->callees; e; e = e->next_callee)
1e128c5f 3508 gcc_assert (e->inline_failed);
18c6ada9
JH
3509 }
3510#endif
a9eafe81
AP
3511
3512 /* Fold the statements before compacting/renumbering the basic blocks. */
3513 fold_marked_statements (last, id.statements_to_fold);
3514 pointer_set_destroy (id.statements_to_fold);
3515
3516 /* Renumber the (code) basic_blocks consecutively. */
3517 compact_blocks ();
3518 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3519 number_blocks (fn);
b8a00a4d 3520
873aa8f5
JH
3521 /* We are not going to maintain the cgraph edges up to date.
3522 Kill it so it won't confuse us. */
3523 cgraph_node_remove_callees (id.dst_node);
3524
873aa8f5 3525 fold_cond_expr_cond ();
726a989a 3526
110cfe1c
JH
3527 /* It would be nice to check SSA/CFG/statement consistency here, but it is
3528 not possible yet - the IPA passes might make various functions to not
3529 throw and they don't care to proactively update local EH info. This is
3530 done later in fixup_cfg pass that also execute the verification. */
726a989a
RB
3531 return (TODO_update_ssa
3532 | TODO_cleanup_cfg
45a80bb9
JH
3533 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
3534 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
d4e4baa9
AO
3535}
3536
d4e4baa9
AO
3537/* Passed to walk_tree. Copies the node pointed to, if appropriate. */
3538
3539tree
46c5ad27 3540copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
d4e4baa9
AO
3541{
3542 enum tree_code code = TREE_CODE (*tp);
07beea0d 3543 enum tree_code_class cl = TREE_CODE_CLASS (code);
d4e4baa9
AO
3544
3545 /* We make copies of most nodes. */
07beea0d 3546 if (IS_EXPR_CODE_CLASS (cl)
d4e4baa9
AO
3547 || code == TREE_LIST
3548 || code == TREE_VEC
8843c120
DN
3549 || code == TYPE_DECL
3550 || code == OMP_CLAUSE)
d4e4baa9
AO
3551 {
3552 /* Because the chain gets clobbered when we make a copy, we save it
3553 here. */
82d6e6fc 3554 tree chain = NULL_TREE, new_tree;
07beea0d 3555
726a989a 3556 chain = TREE_CHAIN (*tp);
d4e4baa9
AO
3557
3558 /* Copy the node. */
82d6e6fc 3559 new_tree = copy_node (*tp);
6de9cd9a
DN
3560
3561 /* Propagate mudflap marked-ness. */
3562 if (flag_mudflap && mf_marked_p (*tp))
82d6e6fc 3563 mf_mark (new_tree);
6de9cd9a 3564
82d6e6fc 3565 *tp = new_tree;
d4e4baa9
AO
3566
3567 /* Now, restore the chain, if appropriate. That will cause
3568 walk_tree to walk into the chain as well. */
50674e96
DN
3569 if (code == PARM_DECL
3570 || code == TREE_LIST
aaf46ef9 3571 || code == OMP_CLAUSE)
d4e4baa9
AO
3572 TREE_CHAIN (*tp) = chain;
3573
3574 /* For now, we don't update BLOCKs when we make copies. So, we
6de9cd9a
DN
3575 have to nullify all BIND_EXPRs. */
3576 if (TREE_CODE (*tp) == BIND_EXPR)
3577 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
d4e4baa9 3578 }
4038c495
GB
3579 else if (code == CONSTRUCTOR)
3580 {
3581 /* CONSTRUCTOR nodes need special handling because
3582 we need to duplicate the vector of elements. */
82d6e6fc 3583 tree new_tree;
4038c495 3584
82d6e6fc 3585 new_tree = copy_node (*tp);
4038c495
GB
3586
3587 /* Propagate mudflap marked-ness. */
3588 if (flag_mudflap && mf_marked_p (*tp))
82d6e6fc 3589 mf_mark (new_tree);
9f63daea 3590
82d6e6fc 3591 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4038c495 3592 CONSTRUCTOR_ELTS (*tp));
82d6e6fc 3593 *tp = new_tree;
4038c495 3594 }
6615c446 3595 else if (TREE_CODE_CLASS (code) == tcc_type)
d4e4baa9 3596 *walk_subtrees = 0;
6615c446 3597 else if (TREE_CODE_CLASS (code) == tcc_declaration)
6de9cd9a 3598 *walk_subtrees = 0;
a396f8ae
GK
3599 else if (TREE_CODE_CLASS (code) == tcc_constant)
3600 *walk_subtrees = 0;
1e128c5f
GB
3601 else
3602 gcc_assert (code != STATEMENT_LIST);
d4e4baa9
AO
3603 return NULL_TREE;
3604}
3605
3606/* The SAVE_EXPR pointed to by TP is being copied. If ST contains
aa4a53af 3607 information indicating to what new SAVE_EXPR this one should be mapped,
e21aff8a
SB
3608 use that one. Otherwise, create a new node and enter it in ST. FN is
3609 the function into which the copy will be placed. */
d4e4baa9 3610
892c7e1e 3611static void
82c82743 3612remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
d4e4baa9 3613{
6be42dd4
RG
3614 struct pointer_map_t *st = (struct pointer_map_t *) st_;
3615 tree *n;
5e20bdd7 3616 tree t;
d4e4baa9
AO
3617
3618 /* See if we already encountered this SAVE_EXPR. */
6be42dd4 3619 n = (tree *) pointer_map_contains (st, *tp);
d92b4486 3620
d4e4baa9
AO
3621 /* If we didn't already remap this SAVE_EXPR, do so now. */
3622 if (!n)
3623 {
5e20bdd7 3624 t = copy_node (*tp);
d4e4baa9 3625
d4e4baa9 3626 /* Remember this SAVE_EXPR. */
6be42dd4 3627 *pointer_map_insert (st, *tp) = t;
350ebd54 3628 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
6be42dd4 3629 *pointer_map_insert (st, t) = t;
d4e4baa9
AO
3630 }
3631 else
5e20bdd7
JZ
3632 {
3633 /* We've already walked into this SAVE_EXPR; don't do it again. */
3634 *walk_subtrees = 0;
6be42dd4 3635 t = *n;
5e20bdd7 3636 }
d4e4baa9
AO
3637
3638 /* Replace this SAVE_EXPR with the copy. */
5e20bdd7 3639 *tp = t;
d4e4baa9 3640}
d436bff8 3641
aa4a53af
RK
3642/* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
3643 copies the declaration and enters it in the splay_tree in DATA (which is
1b369fae 3644 really an `copy_body_data *'). */
6de9cd9a
DN
3645
3646static tree
3647mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
3648 void *data)
3649{
1b369fae 3650 copy_body_data *id = (copy_body_data *) data;
6de9cd9a
DN
3651
3652 /* Don't walk into types. */
350fae66
RK
3653 if (TYPE_P (*tp))
3654 *walk_subtrees = 0;
6de9cd9a 3655
350fae66 3656 else if (TREE_CODE (*tp) == LABEL_EXPR)
6de9cd9a 3657 {
350fae66 3658 tree decl = TREE_OPERAND (*tp, 0);
6de9cd9a 3659
350fae66 3660 /* Copy the decl and remember the copy. */
1b369fae 3661 insert_decl_map (id, decl, id->copy_decl (decl, id));
6de9cd9a
DN
3662 }
3663
3664 return NULL_TREE;
3665}
3666
19114537
EC
3667/* Perform any modifications to EXPR required when it is unsaved. Does
3668 not recurse into EXPR's subtrees. */
3669
3670static void
3671unsave_expr_1 (tree expr)
3672{
3673 switch (TREE_CODE (expr))
3674 {
3675 case TARGET_EXPR:
3676 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
3677 It's OK for this to happen if it was part of a subtree that
3678 isn't immediately expanded, such as operand 2 of another
3679 TARGET_EXPR. */
3680 if (TREE_OPERAND (expr, 1))
3681 break;
3682
3683 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
3684 TREE_OPERAND (expr, 3) = NULL_TREE;
3685 break;
3686
3687 default:
3688 break;
3689 }
3690}
3691
6de9cd9a
DN
3692/* Called via walk_tree when an expression is unsaved. Using the
3693 splay_tree pointed to by ST (which is really a `splay_tree'),
3694 remaps all local declarations to appropriate replacements. */
d436bff8
AH
3695
3696static tree
6de9cd9a 3697unsave_r (tree *tp, int *walk_subtrees, void *data)
d436bff8 3698{
1b369fae 3699 copy_body_data *id = (copy_body_data *) data;
6be42dd4
RG
3700 struct pointer_map_t *st = id->decl_map;
3701 tree *n;
6de9cd9a
DN
3702
3703 /* Only a local declaration (variable or label). */
3704 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
3705 || TREE_CODE (*tp) == LABEL_DECL)
3706 {
3707 /* Lookup the declaration. */
6be42dd4 3708 n = (tree *) pointer_map_contains (st, *tp);
9f63daea 3709
6de9cd9a
DN
3710 /* If it's there, remap it. */
3711 if (n)
6be42dd4 3712 *tp = *n;
6de9cd9a 3713 }
aa4a53af 3714
6de9cd9a 3715 else if (TREE_CODE (*tp) == STATEMENT_LIST)
726a989a 3716 gcc_unreachable ();
6de9cd9a
DN
3717 else if (TREE_CODE (*tp) == BIND_EXPR)
3718 copy_bind_expr (tp, walk_subtrees, id);
3719 else if (TREE_CODE (*tp) == SAVE_EXPR)
82c82743 3720 remap_save_expr (tp, st, walk_subtrees);
d436bff8 3721 else
6de9cd9a
DN
3722 {
3723 copy_tree_r (tp, walk_subtrees, NULL);
3724
3725 /* Do whatever unsaving is required. */
3726 unsave_expr_1 (*tp);
3727 }
3728
3729 /* Keep iterating. */
3730 return NULL_TREE;
d436bff8
AH
3731}
3732
19114537
EC
3733/* Copies everything in EXPR and replaces variables, labels
3734 and SAVE_EXPRs local to EXPR. */
6de9cd9a
DN
3735
3736tree
19114537 3737unsave_expr_now (tree expr)
6de9cd9a 3738{
1b369fae 3739 copy_body_data id;
6de9cd9a
DN
3740
3741 /* There's nothing to do for NULL_TREE. */
3742 if (expr == 0)
3743 return expr;
3744
3745 /* Set up ID. */
3746 memset (&id, 0, sizeof (id));
1b369fae
RH
3747 id.src_fn = current_function_decl;
3748 id.dst_fn = current_function_decl;
6be42dd4 3749 id.decl_map = pointer_map_create ();
6de9cd9a 3750
1b369fae
RH
3751 id.copy_decl = copy_decl_no_change;
3752 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3753 id.transform_new_cfg = false;
3754 id.transform_return_to_modify = false;
9ff420f1 3755 id.transform_lang_insert_block = NULL;
1b369fae 3756
6de9cd9a
DN
3757 /* Walk the tree once to find local labels. */
3758 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
3759
3760 /* Walk the tree again, copying, remapping, and unsaving. */
3761 walk_tree (&expr, unsave_r, &id, NULL);
3762
3763 /* Clean up. */
6be42dd4 3764 pointer_map_destroy (id.decl_map);
6de9cd9a
DN
3765
3766 return expr;
3767}
3768
726a989a
RB
3769/* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
3770 label, copies the declaration and enters it in the splay_tree in DATA (which
3771 is really a 'copy_body_data *'. */
3772
3773static tree
3774mark_local_labels_stmt (gimple_stmt_iterator *gsip,
3775 bool *handled_ops_p ATTRIBUTE_UNUSED,
3776 struct walk_stmt_info *wi)
3777{
3778 copy_body_data *id = (copy_body_data *) wi->info;
3779 gimple stmt = gsi_stmt (*gsip);
3780
3781 if (gimple_code (stmt) == GIMPLE_LABEL)
3782 {
3783 tree decl = gimple_label_label (stmt);
3784
3785 /* Copy the decl and remember the copy. */
3786 insert_decl_map (id, decl, id->copy_decl (decl, id));
3787 }
3788
3789 return NULL_TREE;
3790}
3791
3792
3793/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
3794 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
3795 remaps all local declarations to appropriate replacements in gimple
3796 operands. */
3797
3798static tree
3799replace_locals_op (tree *tp, int *walk_subtrees, void *data)
3800{
3801 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
3802 copy_body_data *id = (copy_body_data *) wi->info;
3803 struct pointer_map_t *st = id->decl_map;
3804 tree *n;
3805 tree expr = *tp;
3806
3807 /* Only a local declaration (variable or label). */
3808 if ((TREE_CODE (expr) == VAR_DECL
3809 && !TREE_STATIC (expr))
3810 || TREE_CODE (expr) == LABEL_DECL)
3811 {
3812 /* Lookup the declaration. */
3813 n = (tree *) pointer_map_contains (st, expr);
3814
3815 /* If it's there, remap it. */
3816 if (n)
3817 *tp = *n;
3818 *walk_subtrees = 0;
3819 }
3820 else if (TREE_CODE (expr) == STATEMENT_LIST
3821 || TREE_CODE (expr) == BIND_EXPR
3822 || TREE_CODE (expr) == SAVE_EXPR)
3823 gcc_unreachable ();
3824 else if (TREE_CODE (expr) == TARGET_EXPR)
3825 {
3826 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
3827 It's OK for this to happen if it was part of a subtree that
3828 isn't immediately expanded, such as operand 2 of another
3829 TARGET_EXPR. */
3830 if (!TREE_OPERAND (expr, 1))
3831 {
3832 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
3833 TREE_OPERAND (expr, 3) = NULL_TREE;
3834 }
3835 }
3836
3837 /* Keep iterating. */
3838 return NULL_TREE;
3839}
3840
3841
3842/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
3843 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
3844 remaps all local declarations to appropriate replacements in gimple
3845 statements. */
3846
3847static tree
3848replace_locals_stmt (gimple_stmt_iterator *gsip,
3849 bool *handled_ops_p ATTRIBUTE_UNUSED,
3850 struct walk_stmt_info *wi)
3851{
3852 copy_body_data *id = (copy_body_data *) wi->info;
3853 gimple stmt = gsi_stmt (*gsip);
3854
3855 if (gimple_code (stmt) == GIMPLE_BIND)
3856 {
3857 tree block = gimple_bind_block (stmt);
3858
3859 if (block)
3860 {
3861 remap_block (&block, id);
3862 gimple_bind_set_block (stmt, block);
3863 }
3864
3865 /* This will remap a lot of the same decls again, but this should be
3866 harmless. */
3867 if (gimple_bind_vars (stmt))
3868 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), id));
3869 }
3870
3871 /* Keep iterating. */
3872 return NULL_TREE;
3873}
3874
3875
3876/* Copies everything in SEQ and replaces variables and labels local to
3877 current_function_decl. */
3878
3879gimple_seq
3880copy_gimple_seq_and_replace_locals (gimple_seq seq)
3881{
3882 copy_body_data id;
3883 struct walk_stmt_info wi;
3884 struct pointer_set_t *visited;
3885 gimple_seq copy;
3886
3887 /* There's nothing to do for NULL_TREE. */
3888 if (seq == NULL)
3889 return seq;
3890
3891 /* Set up ID. */
3892 memset (&id, 0, sizeof (id));
3893 id.src_fn = current_function_decl;
3894 id.dst_fn = current_function_decl;
3895 id.decl_map = pointer_map_create ();
3896
3897 id.copy_decl = copy_decl_no_change;
3898 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3899 id.transform_new_cfg = false;
3900 id.transform_return_to_modify = false;
3901 id.transform_lang_insert_block = NULL;
3902
3903 /* Walk the tree once to find local labels. */
3904 memset (&wi, 0, sizeof (wi));
3905 visited = pointer_set_create ();
3906 wi.info = &id;
3907 wi.pset = visited;
3908 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
3909 pointer_set_destroy (visited);
3910
3911 copy = gimple_seq_copy (seq);
3912
3913 /* Walk the copy, remapping decls. */
3914 memset (&wi, 0, sizeof (wi));
3915 wi.info = &id;
3916 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
3917
3918 /* Clean up. */
3919 pointer_map_destroy (id.decl_map);
3920
3921 return copy;
3922}
3923
3924
6de9cd9a 3925/* Allow someone to determine if SEARCH is a child of TOP from gdb. */
aa4a53af 3926
6de9cd9a
DN
3927static tree
3928debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
3929{
3930 if (*tp == data)
3931 return (tree) data;
3932 else
3933 return NULL;
3934}
3935
6de9cd9a
DN
3936bool
3937debug_find_tree (tree top, tree search)
3938{
3939 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
3940}
3941
e21aff8a 3942
6de9cd9a
DN
3943/* Declare the variables created by the inliner. Add all the variables in
3944 VARS to BIND_EXPR. */
3945
3946static void
e21aff8a 3947declare_inline_vars (tree block, tree vars)
6de9cd9a 3948{
84936f6f
RH
3949 tree t;
3950 for (t = vars; t; t = TREE_CHAIN (t))
9659ce8b
JH
3951 {
3952 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
3953 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
cb91fab0 3954 cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
9659ce8b 3955 }
6de9cd9a 3956
e21aff8a
SB
3957 if (block)
3958 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
3959}
3960
19734dd8 3961/* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
1b369fae
RH
3962 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
3963 VAR_DECL translation. */
19734dd8 3964
1b369fae
RH
3965static tree
3966copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
19734dd8 3967{
19734dd8
RL
3968 /* Don't generate debug information for the copy if we wouldn't have
3969 generated it for the copy either. */
3970 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
3971 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
3972
3973 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
3974 declaration inspired this copy. */
3975 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
3976
3977 /* The new variable/label has no RTL, yet. */
68a976f2
RL
3978 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
3979 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
19734dd8
RL
3980 SET_DECL_RTL (copy, NULL_RTX);
3981
3982 /* These args would always appear unused, if not for this. */
3983 TREE_USED (copy) = 1;
3984
3985 /* Set the context for the new declaration. */
3986 if (!DECL_CONTEXT (decl))
3987 /* Globals stay global. */
3988 ;
1b369fae 3989 else if (DECL_CONTEXT (decl) != id->src_fn)
19734dd8
RL
3990 /* Things that weren't in the scope of the function we're inlining
3991 from aren't in the scope we're inlining to, either. */
3992 ;
3993 else if (TREE_STATIC (decl))
3994 /* Function-scoped static variables should stay in the original
3995 function. */
3996 ;
3997 else
3998 /* Ordinary automatic local variables are now in the scope of the
3999 new function. */
1b369fae 4000 DECL_CONTEXT (copy) = id->dst_fn;
19734dd8
RL
4001
4002 return copy;
4003}
4004
1b369fae
RH
4005static tree
4006copy_decl_to_var (tree decl, copy_body_data *id)
4007{
4008 tree copy, type;
4009
4010 gcc_assert (TREE_CODE (decl) == PARM_DECL
4011 || TREE_CODE (decl) == RESULT_DECL);
4012
4013 type = TREE_TYPE (decl);
4014
4015 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
4016 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4017 TREE_READONLY (copy) = TREE_READONLY (decl);
4018 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
0890b981 4019 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
058dcc25 4020 DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (decl);
1b369fae
RH
4021
4022 return copy_decl_for_dup_finish (id, decl, copy);
4023}
4024
c08cd4c1
JM
4025/* Like copy_decl_to_var, but create a return slot object instead of a
4026 pointer variable for return by invisible reference. */
4027
4028static tree
4029copy_result_decl_to_var (tree decl, copy_body_data *id)
4030{
4031 tree copy, type;
4032
4033 gcc_assert (TREE_CODE (decl) == PARM_DECL
4034 || TREE_CODE (decl) == RESULT_DECL);
4035
4036 type = TREE_TYPE (decl);
4037 if (DECL_BY_REFERENCE (decl))
4038 type = TREE_TYPE (type);
4039
4040 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
4041 TREE_READONLY (copy) = TREE_READONLY (decl);
4042 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4043 if (!DECL_BY_REFERENCE (decl))
4044 {
4045 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
0890b981 4046 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
058dcc25 4047 DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (decl);
c08cd4c1
JM
4048 }
4049
4050 return copy_decl_for_dup_finish (id, decl, copy);
4051}
4052
9ff420f1 4053tree
1b369fae
RH
4054copy_decl_no_change (tree decl, copy_body_data *id)
4055{
4056 tree copy;
4057
4058 copy = copy_node (decl);
4059
4060 /* The COPY is not abstract; it will be generated in DST_FN. */
4061 DECL_ABSTRACT (copy) = 0;
4062 lang_hooks.dup_lang_specific_decl (copy);
4063
4064 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4065 been taken; it's for internal bookkeeping in expand_goto_internal. */
4066 if (TREE_CODE (copy) == LABEL_DECL)
4067 {
4068 TREE_ADDRESSABLE (copy) = 0;
4069 LABEL_DECL_UID (copy) = -1;
4070 }
4071
4072 return copy_decl_for_dup_finish (id, decl, copy);
4073}
4074
4075static tree
4076copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4077{
4078 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4079 return copy_decl_to_var (decl, id);
4080 else
4081 return copy_decl_no_change (decl, id);
4082}
4083
19734dd8
RL
4084/* Return a copy of the function's argument tree. */
4085static tree
1b369fae 4086copy_arguments_for_versioning (tree orig_parm, copy_body_data * id)
19734dd8
RL
4087{
4088 tree *arg_copy, *parg;
4089
4090 arg_copy = &orig_parm;
4091 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
4092 {
82d6e6fc
KG
4093 tree new_tree = remap_decl (*parg, id);
4094 lang_hooks.dup_lang_specific_decl (new_tree);
4095 TREE_CHAIN (new_tree) = TREE_CHAIN (*parg);
4096 *parg = new_tree;
19734dd8
RL
4097 }
4098 return orig_parm;
4099}
4100
4101/* Return a copy of the function's static chain. */
4102static tree
1b369fae 4103copy_static_chain (tree static_chain, copy_body_data * id)
19734dd8
RL
4104{
4105 tree *chain_copy, *pvar;
4106
4107 chain_copy = &static_chain;
4108 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4109 {
82d6e6fc
KG
4110 tree new_tree = remap_decl (*pvar, id);
4111 lang_hooks.dup_lang_specific_decl (new_tree);
4112 TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4113 *pvar = new_tree;
19734dd8
RL
4114 }
4115 return static_chain;
4116}
4117
4118/* Return true if the function is allowed to be versioned.
4119 This is a guard for the versioning functionality. */
4120bool
4121tree_versionable_function_p (tree fndecl)
4122{
4123 if (fndecl == NULL_TREE)
4124 return false;
4125 /* ??? There are cases where a function is
4126 uninlinable but can be versioned. */
4127 if (!tree_inlinable_function_p (fndecl))
4128 return false;
4129
4130 return true;
4131}
4132
4133/* Create a copy of a function's tree.
4134 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4135 of the original function and the new copied function
4136 respectively. In case we want to replace a DECL
4137 tree with another tree while duplicating the function's
4138 body, TREE_MAP represents the mapping between these
ea99e0be
JH
4139 trees. If UPDATE_CLONES is set, the call_stmt fields
4140 of edges of clones of the function will be updated. */
19734dd8 4141void
ea99e0be
JH
4142tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
4143 bool update_clones)
19734dd8
RL
4144{
4145 struct cgraph_node *old_version_node;
4146 struct cgraph_node *new_version_node;
1b369fae 4147 copy_body_data id;
110cfe1c 4148 tree p;
19734dd8
RL
4149 unsigned i;
4150 struct ipa_replace_map *replace_info;
4151 basic_block old_entry_block;
4152 tree t_step;
873aa8f5 4153 tree old_current_function_decl = current_function_decl;
19734dd8
RL
4154
4155 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4156 && TREE_CODE (new_decl) == FUNCTION_DECL);
4157 DECL_POSSIBLY_INLINED (old_decl) = 1;
4158
4159 old_version_node = cgraph_node (old_decl);
4160 new_version_node = cgraph_node (new_decl);
4161
19734dd8
RL
4162 DECL_ARTIFICIAL (new_decl) = 1;
4163 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
4164
3d283195
JH
4165 /* Prepare the data structures for the tree copy. */
4166 memset (&id, 0, sizeof (id));
4167
19734dd8 4168 /* Generate a new name for the new version. */
ea99e0be 4169 if (!update_clones)
19734dd8 4170 {
95c8e172
RL
4171 DECL_NAME (new_decl) = create_tmp_var_name (NULL);
4172 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
4173 SET_DECL_RTL (new_decl, NULL_RTX);
3d283195 4174 id.statements_to_fold = pointer_set_create ();
19734dd8 4175 }
19734dd8 4176
6be42dd4 4177 id.decl_map = pointer_map_create ();
1b369fae
RH
4178 id.src_fn = old_decl;
4179 id.dst_fn = new_decl;
4180 id.src_node = old_version_node;
4181 id.dst_node = new_version_node;
4182 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
19734dd8 4183
1b369fae
RH
4184 id.copy_decl = copy_decl_no_change;
4185 id.transform_call_graph_edges
4186 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
4187 id.transform_new_cfg = true;
4188 id.transform_return_to_modify = false;
9ff420f1 4189 id.transform_lang_insert_block = NULL;
1b369fae 4190
19734dd8 4191 current_function_decl = new_decl;
110cfe1c
JH
4192 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
4193 (DECL_STRUCT_FUNCTION (old_decl));
4194 initialize_cfun (new_decl, old_decl,
4195 old_entry_block->count,
4196 old_entry_block->frequency);
4197 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
19734dd8
RL
4198
4199 /* Copy the function's static chain. */
4200 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
4201 if (p)
4202 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
4203 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
4204 &id);
4205 /* Copy the function's arguments. */
4206 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
4207 DECL_ARGUMENTS (new_decl) =
4208 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id);
4209
4210 /* If there's a tree_map, prepare for substitution. */
4211 if (tree_map)
4212 for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
4213 {
726a989a
RB
4214 replace_info
4215 = (struct ipa_replace_map *) VARRAY_GENERIC_PTR (tree_map, i);
1b369fae 4216 if (replace_info->replace_p)
00fc2333
JH
4217 {
4218 if (TREE_CODE (replace_info->new_tree) == ADDR_EXPR)
4219 {
4220 tree op = TREE_OPERAND (replace_info->new_tree, 0);
4221 while (handled_component_p (op))
4222 op = TREE_OPERAND (op, 0);
4223 if (TREE_CODE (op) == VAR_DECL)
4224 add_referenced_var (op);
4225 }
4226 insert_decl_map (&id, replace_info->old_tree,
4227 replace_info->new_tree);
4228 }
19734dd8
RL
4229 }
4230
1b369fae 4231 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
19734dd8
RL
4232
4233 /* Renumber the lexical scoping (non-code) blocks consecutively. */
1b369fae 4234 number_blocks (id.dst_fn);
19734dd8 4235
cb91fab0 4236 if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
19734dd8 4237 /* Add local vars. */
cb91fab0 4238 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
19734dd8
RL
4239 t_step; t_step = TREE_CHAIN (t_step))
4240 {
4241 tree var = TREE_VALUE (t_step);
4242 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
cb91fab0 4243 cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
19734dd8 4244 else
cb91fab0 4245 cfun->local_decls =
19734dd8 4246 tree_cons (NULL_TREE, remap_decl (var, &id),
cb91fab0 4247 cfun->local_decls);
19734dd8
RL
4248 }
4249
4250 /* Copy the Function's body. */
110cfe1c 4251 copy_body (&id, old_entry_block->count, old_entry_block->frequency, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
19734dd8 4252
19734dd8
RL
4253 if (DECL_RESULT (old_decl) != NULL_TREE)
4254 {
4255 tree *res_decl = &DECL_RESULT (old_decl);
4256 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
4257 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
4258 }
4259
19734dd8
RL
4260 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4261 number_blocks (new_decl);
4262
4263 /* Clean up. */
6be42dd4 4264 pointer_map_destroy (id.decl_map);
3d283195
JH
4265 if (!update_clones)
4266 {
4267 fold_marked_statements (0, id.statements_to_fold);
4268 pointer_set_destroy (id.statements_to_fold);
4269 fold_cond_expr_cond ();
4270 }
110cfe1c
JH
4271 if (gimple_in_ssa_p (cfun))
4272 {
3e87758a
RL
4273 free_dominance_info (CDI_DOMINATORS);
4274 free_dominance_info (CDI_POST_DOMINATORS);
3d283195
JH
4275 if (!update_clones)
4276 delete_unreachable_blocks ();
110cfe1c 4277 update_ssa (TODO_update_ssa);
3d283195
JH
4278 if (!update_clones)
4279 {
4280 fold_cond_expr_cond ();
4281 if (need_ssa_update_p ())
4282 update_ssa (TODO_update_ssa);
4283 }
110cfe1c
JH
4284 }
4285 free_dominance_info (CDI_DOMINATORS);
4286 free_dominance_info (CDI_POST_DOMINATORS);
4287 pop_cfun ();
873aa8f5
JH
4288 current_function_decl = old_current_function_decl;
4289 gcc_assert (!current_function_decl
4290 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
19734dd8
RL
4291 return;
4292}
4293
52dd234b
RH
4294/* Duplicate a type, fields and all. */
4295
4296tree
4297build_duplicate_type (tree type)
4298{
1b369fae 4299 struct copy_body_data id;
52dd234b
RH
4300
4301 memset (&id, 0, sizeof (id));
1b369fae
RH
4302 id.src_fn = current_function_decl;
4303 id.dst_fn = current_function_decl;
4304 id.src_cfun = cfun;
6be42dd4 4305 id.decl_map = pointer_map_create ();
4009f2e7 4306 id.copy_decl = copy_decl_no_change;
52dd234b
RH
4307
4308 type = remap_type_1 (type, &id);
4309
6be42dd4 4310 pointer_map_destroy (id.decl_map);
52dd234b 4311
f31c9f09
DG
4312 TYPE_CANONICAL (type) = type;
4313
52dd234b
RH
4314 return type;
4315}
ab442df7
MM
4316
4317/* Return whether it is safe to inline a function because it used different
4318 target specific options or different optimization options. */
4319bool
4320tree_can_inline_p (tree caller, tree callee)
4321{
4322 /* Don't inline a function with a higher optimization level than the
4323 caller, or with different space constraints (hot/cold functions). */
4324 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
4325 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
4326
4327 if (caller_tree != callee_tree)
4328 {
4329 struct cl_optimization *caller_opt
4330 = TREE_OPTIMIZATION ((caller_tree)
4331 ? caller_tree
4332 : optimization_default_node);
4333
4334 struct cl_optimization *callee_opt
4335 = TREE_OPTIMIZATION ((callee_tree)
4336 ? callee_tree
4337 : optimization_default_node);
4338
4339 if ((caller_opt->optimize > callee_opt->optimize)
4340 || (caller_opt->optimize_size != callee_opt->optimize_size))
4341 return false;
4342 }
4343
4344 /* Allow the backend to decide if inlining is ok. */
4345 return targetm.target_option.can_inline_p (caller, callee);
4346}