]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-inline.c
Better debug info for inlined functions.
[thirdparty/gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "varray.h"
36 #include "hashtab.h"
37 #include "splay-tree.h"
38 #include "langhooks.h"
39 #include "basic-block.h"
40 #include "tree-iterator.h"
41 #include "cgraph.h"
42 #include "intl.h"
43 #include "tree-mudflap.h"
44 #include "tree-flow.h"
45 #include "function.h"
46 #include "ggc.h"
47 #include "tree-flow.h"
48 #include "diagnostic.h"
49 #include "except.h"
50 #include "debug.h"
51 #include "pointer-set.h"
52 #include "integrate.h"
53
54 /* I'm not real happy about this, but we need to handle gimple and
55 non-gimple trees. */
56 #include "tree-gimple.h"
57
58 /* Inlining, Saving, Cloning
59
60 Inlining: a function body is duplicated, but the PARM_DECLs are
61 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
62 MODIFY_EXPRs that store to a dedicated returned-value variable.
63 The duplicated eh_region info of the copy will later be appended
64 to the info for the caller; the eh_region info in copied throwing
65 statements and RESX_EXPRs is adjusted accordingly.
66
67 Saving: make a semantically-identical copy of the function body.
68 Necessary when we want to generate code for the body (a destructive
69 operation), but we expect to need this body in the future (e.g. for
70 inlining into another function).
71
72 Cloning: (only in C++) We have one body for a con/de/structor, and
73 multiple function decls, each with a unique parameter list.
74 Duplicate the body, using the given splay tree; some parameters
75 will become constants (like 0 or 1).
76
77 All of these will simultaneously lookup any callgraph edges. If
78 we're going to inline the duplicated function body, and the given
79 function has some cloned callgraph nodes (one for each place this
80 function will be inlined) those callgraph edges will be duplicated.
81 If we're saving or cloning the body, those callgraph edges will be
82 updated to point into the new body. (Note that the original
83 callgraph node and edge list will not be altered.)
84
85 See the CALL_EXPR handling case in copy_body_r (). */
86
87 /* 0 if we should not perform inlining.
88 1 if we should expand functions calls inline at the tree level.
89 2 if we should consider *all* functions to be inline
90 candidates. */
91
92 int flag_inline_trees = 0;
93
94 /* To Do:
95
96 o In order to make inlining-on-trees work, we pessimized
97 function-local static constants. In particular, they are now
98 always output, even when not addressed. Fix this by treating
99 function-local static constants just like global static
100 constants; the back-end already knows not to output them if they
101 are not needed.
102
103 o Provide heuristics to clamp inlining of recursive template
104 calls? */
105
106 /* Data required for function inlining. */
107
108 typedef struct inline_data
109 {
110 /* FUNCTION_DECL for function being inlined. */
111 tree callee;
112 /* FUNCTION_DECL for function being inlined into. */
113 tree caller;
114 /* struct function for function being inlined. Usually this is the same
115 as DECL_STRUCT_FUNCTION (callee), but can be different if saved_cfg
116 and saved_eh are in use. */
117 struct function *callee_cfun;
118 /* The VAR_DECL for the return value. */
119 tree retvar;
120 /* The map from local declarations in the inlined function to
121 equivalents in the function into which it is being inlined. */
122 splay_tree decl_map;
123 /* We use the same mechanism to build clones that we do to perform
124 inlining. However, there are a few places where we need to
125 distinguish between those two situations. This flag is true if
126 we are cloning, rather than inlining. */
127 bool cloning_p;
128 /* Similarly for saving function body. */
129 bool saving_p;
130 /* Callgraph node of function we are inlining into. */
131 struct cgraph_node *node;
132 /* Callgraph node of currently inlined function. */
133 struct cgraph_node *current_node;
134 /* Current BLOCK. */
135 tree block;
136 /* Exception region the inlined call lie in. */
137 int eh_region;
138 /* Take region number in the function being copied, add this value and
139 get eh region number of the duplicate in the function we inline into. */
140 int eh_region_offset;
141 } inline_data;
142
143 /* Prototypes. */
144
145 static tree declare_return_variable (inline_data *, tree, tree, tree *);
146 static tree copy_body_r (tree *, int *, void *);
147 static tree copy_generic_body (inline_data *);
148 static bool inlinable_function_p (tree);
149 static tree remap_decl (tree, inline_data *);
150 static tree remap_type (tree, inline_data *);
151 static void remap_block (tree *, inline_data *);
152 static tree remap_decl (tree, inline_data *);
153 static tree remap_decls (tree, inline_data *);
154 static void copy_bind_expr (tree *, int *, inline_data *);
155 static tree mark_local_for_remap_r (tree *, int *, void *);
156 static void unsave_expr_1 (tree);
157 static tree unsave_r (tree *, int *, void *);
158 static void declare_inline_vars (tree, tree);
159 static void remap_save_expr (tree *, void *, int *);
160
161 static inline bool inlining_p (inline_data *id);
162 static void add_lexical_block (tree current_block, tree new_block);
163
164 /* Insert a tree->tree mapping for ID. Despite the name suggests
165 that the trees should be variables, it is used for more than that. */
166
167 static void
168 insert_decl_map (inline_data *id, tree key, tree value)
169 {
170 splay_tree_insert (id->decl_map, (splay_tree_key) key,
171 (splay_tree_value) value);
172
173 /* Always insert an identity map as well. If we see this same new
174 node again, we won't want to duplicate it a second time. */
175 if (key != value)
176 splay_tree_insert (id->decl_map, (splay_tree_key) value,
177 (splay_tree_value) value);
178 }
179
180 /* Remap DECL during the copying of the BLOCK tree for the function. */
181
182 static tree
183 remap_decl (tree decl, inline_data *id)
184 {
185 splay_tree_node n;
186 tree fn;
187
188 /* We only remap local variables in the current function. */
189 fn = id->callee;
190
191 /* See if we have remapped this declaration. */
192
193 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
194
195 /* If we didn't already have an equivalent for this declaration,
196 create one now. */
197 if (!n)
198 {
199 /* Make a copy of the variable or label. */
200 tree t;
201 t = copy_decl_for_inlining (decl, fn, id->caller);
202
203 /* Remember it, so that if we encounter this local entity again
204 we can reuse this copy. Do this early because remap_type may
205 need this decl for TYPE_STUB_DECL. */
206 insert_decl_map (id, decl, t);
207
208 /* Remap types, if necessary. */
209 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
210 if (TREE_CODE (t) == TYPE_DECL)
211 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
212
213 /* Remap sizes as necessary. */
214 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
215 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
216
217 /* If fields, do likewise for offset and qualifier. */
218 if (TREE_CODE (t) == FIELD_DECL)
219 {
220 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
221 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
222 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
223 }
224
225 #if 0
226 /* FIXME handle anon aggrs. */
227 if (! DECL_NAME (t) && TREE_TYPE (t)
228 && lang_hooks.tree_inlining.anon_aggr_type_p (TREE_TYPE (t)))
229 {
230 /* For a VAR_DECL of anonymous type, we must also copy the
231 member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS. */
232 tree members = NULL;
233 tree src;
234
235 for (src = DECL_ANON_UNION_ELEMS (t); src;
236 src = TREE_CHAIN (src))
237 {
238 tree member = remap_decl (TREE_VALUE (src), id);
239
240 gcc_assert (!TREE_PURPOSE (src));
241 members = tree_cons (NULL, member, members);
242 }
243 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
244 }
245 #endif
246
247 /* Remember it, so that if we encounter this local entity
248 again we can reuse this copy. */
249 insert_decl_map (id, decl, t);
250 return t;
251 }
252
253 return unshare_expr ((tree) n->value);
254 }
255
256 static tree
257 remap_type (tree type, inline_data *id)
258 {
259 splay_tree_node node;
260 tree new, t;
261
262 if (type == NULL)
263 return type;
264
265 /* See if we have remapped this type. */
266 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
267 if (node)
268 return (tree) node->value;
269
270 /* The type only needs remapping if it's variably modified. */
271 if (! variably_modified_type_p (type, id->callee))
272 {
273 insert_decl_map (id, type, type);
274 return type;
275 }
276
277 /* We do need a copy. build and register it now. If this is a pointer or
278 reference type, remap the designated type and make a new pointer or
279 reference type. */
280 if (TREE_CODE (type) == POINTER_TYPE)
281 {
282 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
283 TYPE_MODE (type),
284 TYPE_REF_CAN_ALIAS_ALL (type));
285 insert_decl_map (id, type, new);
286 return new;
287 }
288 else if (TREE_CODE (type) == REFERENCE_TYPE)
289 {
290 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
291 TYPE_MODE (type),
292 TYPE_REF_CAN_ALIAS_ALL (type));
293 insert_decl_map (id, type, new);
294 return new;
295 }
296 else
297 new = copy_node (type);
298
299 insert_decl_map (id, type, new);
300
301 /* This is a new type, not a copy of an old type. Need to reassociate
302 variants. We can handle everything except the main variant lazily. */
303 t = TYPE_MAIN_VARIANT (type);
304 if (type != t)
305 {
306 t = remap_type (t, id);
307 TYPE_MAIN_VARIANT (new) = t;
308 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
309 TYPE_NEXT_VARIANT (t) = new;
310 }
311 else
312 {
313 TYPE_MAIN_VARIANT (new) = new;
314 TYPE_NEXT_VARIANT (new) = NULL;
315 }
316
317 if (TYPE_STUB_DECL (type))
318 TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
319
320 /* Lazily create pointer and reference types. */
321 TYPE_POINTER_TO (new) = NULL;
322 TYPE_REFERENCE_TO (new) = NULL;
323
324 switch (TREE_CODE (new))
325 {
326 case INTEGER_TYPE:
327 case REAL_TYPE:
328 case ENUMERAL_TYPE:
329 case BOOLEAN_TYPE:
330 case CHAR_TYPE:
331 t = TYPE_MIN_VALUE (new);
332 if (t && TREE_CODE (t) != INTEGER_CST)
333 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
334
335 t = TYPE_MAX_VALUE (new);
336 if (t && TREE_CODE (t) != INTEGER_CST)
337 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
338 return new;
339
340 case FUNCTION_TYPE:
341 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
342 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
343 return new;
344
345 case ARRAY_TYPE:
346 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
347 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
348 break;
349
350 case RECORD_TYPE:
351 case UNION_TYPE:
352 case QUAL_UNION_TYPE:
353 walk_tree (&TYPE_FIELDS (new), copy_body_r, id, NULL);
354 break;
355
356 case OFFSET_TYPE:
357 default:
358 /* Shouldn't have been thought variable sized. */
359 gcc_unreachable ();
360 }
361
362 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
363 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
364
365 return new;
366 }
367
368 static tree
369 remap_decls (tree decls, inline_data *id)
370 {
371 tree old_var;
372 tree new_decls = NULL_TREE;
373
374 /* Remap its variables. */
375 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
376 {
377 tree new_var;
378
379 /* We can not chain the local static declarations into the unexpanded_var_list
380 as we can't duplicate them or break one decl rule. Go ahead and link
381 them into unexpanded_var_list. */
382 if (!lang_hooks.tree_inlining.auto_var_in_fn_p (old_var, id->callee)
383 && !DECL_EXTERNAL (old_var))
384 {
385 cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var,
386 cfun->unexpanded_var_list);
387 continue;
388 }
389
390 /* Remap the variable. */
391 new_var = remap_decl (old_var, id);
392
393 /* If we didn't remap this variable, so we can't mess with its
394 TREE_CHAIN. If we remapped this variable to the return slot, it's
395 already declared somewhere else, so don't declare it here. */
396 if (!new_var || new_var == id->retvar)
397 ;
398 else
399 {
400 gcc_assert (DECL_P (new_var));
401 TREE_CHAIN (new_var) = new_decls;
402 new_decls = new_var;
403 }
404 }
405
406 return nreverse (new_decls);
407 }
408
409 /* Copy the BLOCK to contain remapped versions of the variables
410 therein. And hook the new block into the block-tree. */
411
412 static void
413 remap_block (tree *block, inline_data *id)
414 {
415 tree old_block;
416 tree new_block;
417 tree fn;
418
419 /* Make the new block. */
420 old_block = *block;
421 new_block = make_node (BLOCK);
422 TREE_USED (new_block) = TREE_USED (old_block);
423 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
424 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
425 *block = new_block;
426
427 /* Remap its variables. */
428 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
429
430 fn = id->caller;
431 if (id->cloning_p)
432 /* We're building a clone; DECL_INITIAL is still
433 error_mark_node, and current_binding_level is the parm
434 binding level. */
435 lang_hooks.decls.insert_block (new_block);
436 /* Remember the remapped block. */
437 insert_decl_map (id, old_block, new_block);
438 }
439
440 /* Copy the whole block tree and root it in id->block. */
441 static tree
442 remap_blocks (tree block, inline_data *id)
443 {
444 tree t;
445 tree new = block;
446
447 if (!block)
448 return NULL;
449
450 remap_block (&new, id);
451 gcc_assert (new != block);
452 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
453 add_lexical_block (new, remap_blocks (t, id));
454 return new;
455 }
456
457 static void
458 copy_statement_list (tree *tp)
459 {
460 tree_stmt_iterator oi, ni;
461 tree new;
462
463 new = alloc_stmt_list ();
464 ni = tsi_start (new);
465 oi = tsi_start (*tp);
466 *tp = new;
467
468 for (; !tsi_end_p (oi); tsi_next (&oi))
469 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
470 }
471
472 static void
473 copy_bind_expr (tree *tp, int *walk_subtrees, inline_data *id)
474 {
475 tree block = BIND_EXPR_BLOCK (*tp);
476 /* Copy (and replace) the statement. */
477 copy_tree_r (tp, walk_subtrees, NULL);
478 if (block)
479 {
480 remap_block (&block, id);
481 BIND_EXPR_BLOCK (*tp) = block;
482 }
483
484 if (BIND_EXPR_VARS (*tp))
485 /* This will remap a lot of the same decls again, but this should be
486 harmless. */
487 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
488 }
489
490 /* Called from copy_body_id via walk_tree. DATA is really an
491 `inline_data *'. */
492
493 static tree
494 copy_body_r (tree *tp, int *walk_subtrees, void *data)
495 {
496 inline_data *id = (inline_data *) data;
497 tree fn = id->callee;
498 tree new_block;
499
500 /* Begin by recognizing trees that we'll completely rewrite for the
501 inlining context. Our output for these trees is completely
502 different from out input (e.g. RETURN_EXPR is deleted, and morphs
503 into an edge). Further down, we'll handle trees that get
504 duplicated and/or tweaked. */
505
506 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
507 GOTO_STMT with the RET_LABEL as its target. */
508 if (TREE_CODE (*tp) == RETURN_EXPR && inlining_p (id))
509 {
510 tree assignment = TREE_OPERAND (*tp, 0);
511
512 /* If we're returning something, just turn that into an
513 assignment into the equivalent of the original RESULT_DECL.
514 If the "assignment" is just the result decl, the result
515 decl has already been set (e.g. a recent "foo (&result_decl,
516 ...)"); just toss the entire RETURN_EXPR. */
517 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
518 {
519 /* Replace the RETURN_EXPR with (a copy of) the
520 MODIFY_EXPR hanging underneath. */
521 *tp = copy_node (assignment);
522 }
523 else /* Else the RETURN_EXPR returns no value. */
524 {
525 *tp = NULL;
526 return (void *)1;
527 }
528 }
529
530 /* Local variables and labels need to be replaced by equivalent
531 variables. We don't want to copy static variables; there's only
532 one of those, no matter how many times we inline the containing
533 function. Similarly for globals from an outer function. */
534 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
535 {
536 tree new_decl;
537
538 /* Remap the declaration. */
539 new_decl = remap_decl (*tp, id);
540 gcc_assert (new_decl);
541 /* Replace this variable with the copy. */
542 STRIP_TYPE_NOPS (new_decl);
543 *tp = new_decl;
544 *walk_subtrees = 0;
545 }
546 else if (TREE_CODE (*tp) == STATEMENT_LIST)
547 copy_statement_list (tp);
548 else if (TREE_CODE (*tp) == SAVE_EXPR)
549 remap_save_expr (tp, id->decl_map, walk_subtrees);
550 else if (TREE_CODE (*tp) == LABEL_DECL
551 && (! DECL_CONTEXT (*tp)
552 || decl_function_context (*tp) == id->callee))
553 /* These may need to be remapped for EH handling. */
554 *tp = remap_decl (*tp, id);
555 else if (TREE_CODE (*tp) == BIND_EXPR)
556 copy_bind_expr (tp, walk_subtrees, id);
557 /* Types may need remapping as well. */
558 else if (TYPE_P (*tp))
559 *tp = remap_type (*tp, id);
560
561 /* If this is a constant, we have to copy the node iff the type will be
562 remapped. copy_tree_r will not copy a constant. */
563 else if (CONSTANT_CLASS_P (*tp))
564 {
565 tree new_type = remap_type (TREE_TYPE (*tp), id);
566
567 if (new_type == TREE_TYPE (*tp))
568 *walk_subtrees = 0;
569
570 else if (TREE_CODE (*tp) == INTEGER_CST)
571 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
572 TREE_INT_CST_HIGH (*tp));
573 else
574 {
575 *tp = copy_node (*tp);
576 TREE_TYPE (*tp) = new_type;
577 }
578 }
579
580 /* Otherwise, just copy the node. Note that copy_tree_r already
581 knows not to copy VAR_DECLs, etc., so this is safe. */
582 else
583 {
584 /* Here we handle trees that are not completely rewritten.
585 First we detect some inlining-induced bogosities for
586 discarding. */
587 if (TREE_CODE (*tp) == MODIFY_EXPR
588 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
589 && (lang_hooks.tree_inlining.auto_var_in_fn_p
590 (TREE_OPERAND (*tp, 0), fn)))
591 {
592 /* Some assignments VAR = VAR; don't generate any rtl code
593 and thus don't count as variable modification. Avoid
594 keeping bogosities like 0 = 0. */
595 tree decl = TREE_OPERAND (*tp, 0), value;
596 splay_tree_node n;
597
598 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
599 if (n)
600 {
601 value = (tree) n->value;
602 STRIP_TYPE_NOPS (value);
603 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
604 {
605 *tp = build_empty_stmt ();
606 return copy_body_r (tp, walk_subtrees, data);
607 }
608 }
609 }
610 else if (TREE_CODE (*tp) == INDIRECT_REF)
611 {
612 /* Get rid of *& from inline substitutions that can happen when a
613 pointer argument is an ADDR_EXPR. */
614 tree decl = TREE_OPERAND (*tp, 0);
615 splay_tree_node n;
616
617 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
618 if (n)
619 {
620 /* If we happen to get an ADDR_EXPR in n->value, strip
621 it manually here as we'll eventually get ADDR_EXPRs
622 which lie about their types pointed to. In this case
623 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
624 but we absolutely rely on that. As fold_indirect_ref
625 does other useful transformations, try that first, though. */
626 tree type = TREE_TYPE (TREE_TYPE ((tree)n->value));
627 *tp = fold_indirect_ref_1 (type, (tree)n->value);
628 if (! *tp)
629 {
630 if (TREE_CODE ((tree)n->value) == ADDR_EXPR)
631 *tp = TREE_OPERAND ((tree)n->value, 0);
632 else
633 *tp = build1 (INDIRECT_REF, type, (tree)n->value);
634 }
635 *walk_subtrees = 0;
636 return NULL;
637 }
638 }
639
640 /* Here is the "usual case". Copy this tree node, and then
641 tweak some special cases. */
642 copy_tree_r (tp, walk_subtrees, NULL);
643
644 /* If EXPR has block defined, map it to newly constructed block.
645 When inlining we want EXPRs without block appear in the block
646 of function call. */
647 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (*tp))))
648 {
649 new_block = id->block;
650 if (TREE_BLOCK (*tp))
651 {
652 splay_tree_node n;
653 n = splay_tree_lookup (id->decl_map,
654 (splay_tree_key) TREE_BLOCK (*tp));
655 gcc_assert (n);
656 new_block = (tree) n->value;
657 }
658 TREE_BLOCK (*tp) = new_block;
659 }
660
661 if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
662 TREE_OPERAND (*tp, 0) =
663 build_int_cst
664 (NULL_TREE,
665 id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
666
667 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
668
669 /* The copied TARGET_EXPR has never been expanded, even if the
670 original node was expanded already. */
671 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
672 {
673 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
674 TREE_OPERAND (*tp, 3) = NULL_TREE;
675 }
676
677 /* Variable substitution need not be simple. In particular, the
678 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
679 and friends are up-to-date. */
680 else if (TREE_CODE (*tp) == ADDR_EXPR)
681 {
682 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
683 recompute_tree_invarant_for_addr_expr (*tp);
684 *walk_subtrees = 0;
685 }
686 }
687
688 /* Keep iterating. */
689 return NULL_TREE;
690 }
691
692 /* Copy basic block, scale profile accordingly. Edges will be taken care of
693 later */
694
695 static basic_block
696 copy_bb (inline_data *id, basic_block bb, int frequency_scale, int count_scale)
697 {
698 block_stmt_iterator bsi, copy_bsi;
699 basic_block copy_basic_block;
700
701 /* create_basic_block() will append every new block to
702 basic_block_info automatically. */
703 copy_basic_block = create_basic_block (NULL, (void *) 0, bb->prev_bb->aux);
704 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
705 copy_basic_block->frequency = (bb->frequency
706 * frequency_scale / REG_BR_PROB_BASE);
707 copy_bsi = bsi_start (copy_basic_block);
708
709 for (bsi = bsi_start (bb);
710 !bsi_end_p (bsi); bsi_next (&bsi))
711 {
712 tree stmt = bsi_stmt (bsi);
713 tree orig_stmt = stmt;
714
715 walk_tree (&stmt, copy_body_r, id, NULL);
716
717 /* RETURN_EXPR might be removed,
718 this is signalled by making stmt pointer NULL. */
719 if (stmt)
720 {
721 tree call, decl;
722 bsi_insert_after (&copy_bsi, stmt, BSI_NEW_STMT);
723 call = get_call_expr_in (stmt);
724 /* We're duplicating a CALL_EXPR. Find any corresponding
725 callgraph edges and update or duplicate them. */
726 if (call && (decl = get_callee_fndecl (call)))
727 {
728 if (id->saving_p)
729 {
730 struct cgraph_node *node;
731 struct cgraph_edge *edge;
732
733 /* We're saving a copy of the body, so we'll update the
734 callgraph nodes in place. Note that we avoid
735 altering the original callgraph node; we begin with
736 the first clone. */
737 for (node = id->node->next_clone;
738 node;
739 node = node->next_clone)
740 {
741 edge = cgraph_edge (node, orig_stmt);
742 gcc_assert (edge);
743 edge->call_stmt = stmt;
744 }
745 }
746 else
747 {
748 struct cgraph_edge *edge;
749
750 /* We're cloning or inlining this body; duplicate the
751 associate callgraph nodes. */
752 edge = cgraph_edge (id->current_node, orig_stmt);
753 if (edge)
754 cgraph_clone_edge (edge, id->node, stmt,
755 REG_BR_PROB_BASE, 1);
756 }
757 }
758 /* If you think we can abort here, you are wrong.
759 There is no region 0 in tree land. */
760 gcc_assert (lookup_stmt_eh_region_fn (id->callee_cfun, orig_stmt)
761 != 0);
762
763 if (tree_could_throw_p (stmt))
764 {
765 int region = lookup_stmt_eh_region_fn (id->callee_cfun, orig_stmt);
766 /* Add an entry for the copied tree in the EH hashtable.
767 When saving or cloning or versioning, use the hashtable in
768 cfun, and just copy the EH number. When inlining, use the
769 hashtable in the caller, and adjust the region number. */
770 if (region > 0)
771 add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
772
773 /* If this tree doesn't have a region associated with it,
774 and there is a "current region,"
775 then associate this tree with the current region
776 and add edges associated with this region. */
777 if ((lookup_stmt_eh_region_fn (id->callee_cfun,
778 orig_stmt) <= 0
779 && id->eh_region > 0)
780 && tree_could_throw_p (stmt))
781 add_stmt_to_eh_region (stmt, id->eh_region);
782 }
783 }
784 }
785 return copy_basic_block;
786 }
787
788 /* Copy edges from BB into its copy constructed earlier, scale profile
789 accordingly. Edges will be taken care of later. Assume aux
790 pointers to point to the copies of each BB. */
791 static void
792 copy_edges_for_bb (basic_block bb, int count_scale)
793 {
794 basic_block new_bb = bb->aux;
795 edge_iterator ei;
796 edge old_edge;
797 block_stmt_iterator bsi;
798 int flags;
799
800 /* Use the indices from the original blocks to create edges for the
801 new ones. */
802 FOR_EACH_EDGE (old_edge, ei, bb->succs)
803 if (!(old_edge->flags & EDGE_EH))
804 {
805 edge new;
806
807 flags = old_edge->flags;
808
809 /* Return edges do get a FALLTHRU flag when the get inlined. */
810 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
811 && old_edge->dest->aux != EXIT_BLOCK_PTR)
812 flags |= EDGE_FALLTHRU;
813 new = make_edge (new_bb, old_edge->dest->aux, flags);
814 new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
815 new->probability = old_edge->probability;
816 }
817
818 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
819 return;
820
821 for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
822 {
823 tree copy_stmt;
824
825 copy_stmt = bsi_stmt (bsi);
826 update_stmt (copy_stmt);
827 /* Do this before the possible split_block. */
828 bsi_next (&bsi);
829
830 /* If this tree could throw an exception, there are two
831 cases where we need to add abnormal edge(s): the
832 tree wasn't in a region and there is a "current
833 region" in the caller; or the original tree had
834 EH edges. In both cases split the block after the tree,
835 and add abnormal edge(s) as needed; we need both
836 those from the callee and the caller.
837 We check whether the copy can throw, because the const
838 propagation can change an INDIRECT_REF which throws
839 into a COMPONENT_REF which doesn't. If the copy
840 can throw, the original could also throw. */
841
842 if (tree_can_throw_internal (copy_stmt))
843 {
844 if (!bsi_end_p (bsi))
845 /* Note that bb's predecessor edges aren't necessarily
846 right at this point; split_block doesn't care. */
847 {
848 edge e = split_block (new_bb, copy_stmt);
849 new_bb = e->dest;
850 bsi = bsi_start (new_bb);
851 }
852
853 make_eh_edges (copy_stmt);
854 }
855 }
856 }
857
858 /* Wrapper for remap_decl so it can be used as a callback. */
859 static tree
860 remap_decl_1 (tree decl, void *data)
861 {
862 return remap_decl (decl, data);
863 }
864
865 /* Make a copy of the body of FN so that it can be inserted inline in
866 another function. Walks FN via CFG, returns new fndecl. */
867
868 static tree
869 copy_cfg_body (inline_data * id, gcov_type count, int frequency,
870 basic_block entry_block_map, basic_block exit_block_map)
871 {
872 tree callee_fndecl = id->callee;
873 /* Original cfun for the callee, doesn't change. */
874 struct function *callee_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
875 /* Copy, built by this function. */
876 struct function *new_cfun;
877 /* Place to copy from; when a copy of the function was saved off earlier,
878 use that instead of the main copy. */
879 struct function *cfun_to_copy =
880 (struct function *) ggc_alloc_cleared (sizeof (struct function));
881 basic_block bb;
882 tree new_fndecl = NULL;
883 bool saving_or_cloning;
884 int count_scale, frequency_scale;
885
886 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count)
887 count_scale = (REG_BR_PROB_BASE * count
888 / ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count);
889 else
890 count_scale = 1;
891
892 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency)
893 frequency_scale = (REG_BR_PROB_BASE * frequency
894 /
895 ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency);
896 else
897 frequency_scale = count_scale;
898
899 /* Register specific tree functions. */
900 tree_register_cfg_hooks ();
901
902 /* Must have a CFG here at this point. */
903 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
904 (DECL_STRUCT_FUNCTION (callee_fndecl)));
905
906 *cfun_to_copy = *DECL_STRUCT_FUNCTION (callee_fndecl);
907
908 /* If there is a saved_cfg+saved_args lurking in the
909 struct function, a copy of the callee body was saved there, and
910 the 'struct cgraph edge' nodes have been fudged to point into the
911 saved body. Accordingly, we want to copy that saved body so the
912 callgraph edges will be recognized and cloned properly. */
913 if (cfun_to_copy->saved_cfg)
914 {
915 cfun_to_copy->cfg = cfun_to_copy->saved_cfg;
916 cfun_to_copy->eh = cfun_to_copy->saved_eh;
917 }
918 id->callee_cfun = cfun_to_copy;
919
920 /* If saving or cloning a function body, create new basic_block_info
921 and label_to_block_maps. Otherwise, we're duplicating a function
922 body for inlining; insert our new blocks and labels into the
923 existing varrays. */
924 saving_or_cloning = (id->saving_p || id->cloning_p);
925 if (saving_or_cloning)
926 {
927 new_cfun =
928 (struct function *) ggc_alloc_cleared (sizeof (struct function));
929 *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
930 new_cfun->cfg = NULL;
931 new_cfun->decl = new_fndecl = copy_node (callee_fndecl);
932 new_cfun->ib_boundaries_block = (varray_type) 0;
933 DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
934 push_cfun (new_cfun);
935 init_empty_tree_cfg ();
936
937 ENTRY_BLOCK_PTR->count =
938 (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count * count_scale /
939 REG_BR_PROB_BASE);
940 ENTRY_BLOCK_PTR->frequency =
941 (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency *
942 frequency_scale / REG_BR_PROB_BASE);
943 EXIT_BLOCK_PTR->count =
944 (EXIT_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count * count_scale /
945 REG_BR_PROB_BASE);
946 EXIT_BLOCK_PTR->frequency =
947 (EXIT_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency *
948 frequency_scale / REG_BR_PROB_BASE);
949
950 entry_block_map = ENTRY_BLOCK_PTR;
951 exit_block_map = EXIT_BLOCK_PTR;
952 }
953
954 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
955 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
956
957
958 /* Duplicate any exception-handling regions. */
959 if (cfun->eh)
960 {
961 if (saving_or_cloning)
962 init_eh_for_function ();
963 id->eh_region_offset = duplicate_eh_regions (cfun_to_copy,
964 remap_decl_1,
965 id, id->eh_region);
966 gcc_assert (inlining_p (id) || !id->eh_region_offset);
967 }
968 /* Use aux pointers to map the original blocks to copy. */
969 FOR_EACH_BB_FN (bb, cfun_to_copy)
970 bb->aux = copy_bb (id, bb, frequency_scale, count_scale);
971 /* Now that we've duplicated the blocks, duplicate their edges. */
972 FOR_ALL_BB_FN (bb, cfun_to_copy)
973 copy_edges_for_bb (bb, count_scale);
974 FOR_ALL_BB_FN (bb, cfun_to_copy)
975 bb->aux = NULL;
976
977 if (saving_or_cloning)
978 pop_cfun ();
979
980 return new_fndecl;
981 }
982
983 /* Make a copy of the body of FN so that it can be inserted inline in
984 another function. */
985
986 static tree
987 copy_generic_body (inline_data *id)
988 {
989 tree body;
990 tree fndecl = id->callee;
991
992 body = DECL_SAVED_TREE (fndecl);
993 walk_tree (&body, copy_body_r, id, NULL);
994
995 return body;
996 }
997
998 static tree
999 copy_body (inline_data *id, gcov_type count, int frequency,
1000 basic_block entry_block_map, basic_block exit_block_map)
1001 {
1002 tree fndecl = id->callee;
1003 tree body;
1004
1005 /* If this body has a CFG, walk CFG and copy. */
1006 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
1007 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
1008
1009 return body;
1010 }
1011
1012 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
1013 defined in function FN, or of a data member thereof. */
1014
1015 static bool
1016 self_inlining_addr_expr (tree value, tree fn)
1017 {
1018 tree var;
1019
1020 if (TREE_CODE (value) != ADDR_EXPR)
1021 return false;
1022
1023 var = get_base_address (TREE_OPERAND (value, 0));
1024
1025 return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
1026 }
1027
1028 static void
1029 setup_one_parameter (inline_data *id, tree p, tree value, tree fn,
1030 basic_block bb, tree *vars)
1031 {
1032 tree init_stmt;
1033 tree var;
1034 tree var_sub;
1035
1036 /* If the parameter is never assigned to, we may not need to
1037 create a new variable here at all. Instead, we may be able
1038 to just use the argument value. */
1039 if (TREE_READONLY (p)
1040 && !TREE_ADDRESSABLE (p)
1041 && value && !TREE_SIDE_EFFECTS (value))
1042 {
1043 /* We may produce non-gimple trees by adding NOPs or introduce
1044 invalid sharing when operand is not really constant.
1045 It is not big deal to prohibit constant propagation here as
1046 we will constant propagate in DOM1 pass anyway. */
1047 if (is_gimple_min_invariant (value)
1048 && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
1049 /* We have to be very careful about ADDR_EXPR. Make sure
1050 the base variable isn't a local variable of the inlined
1051 function, e.g., when doing recursive inlining, direct or
1052 mutually-recursive or whatever, which is why we don't
1053 just test whether fn == current_function_decl. */
1054 && ! self_inlining_addr_expr (value, fn))
1055 {
1056 insert_decl_map (id, p, value);
1057 return;
1058 }
1059 }
1060
1061 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
1062 here since the type of this decl must be visible to the calling
1063 function. */
1064 var = copy_decl_for_inlining (p, fn, id->caller);
1065
1066 /* See if the frontend wants to pass this by invisible reference. If
1067 so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
1068 replace uses of the PARM_DECL with dereferences. */
1069 if (TREE_TYPE (var) != TREE_TYPE (p)
1070 && POINTER_TYPE_P (TREE_TYPE (var))
1071 && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
1072 {
1073 insert_decl_map (id, var, var);
1074 var_sub = build_fold_indirect_ref (var);
1075 }
1076 else
1077 var_sub = var;
1078
1079 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
1080 that way, when the PARM_DECL is encountered, it will be
1081 automatically replaced by the VAR_DECL. */
1082 insert_decl_map (id, p, var_sub);
1083
1084 /* Declare this new variable. */
1085 TREE_CHAIN (var) = *vars;
1086 *vars = var;
1087
1088 /* Make gimplifier happy about this variable. */
1089 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1090
1091 /* Even if P was TREE_READONLY, the new VAR should not be.
1092 In the original code, we would have constructed a
1093 temporary, and then the function body would have never
1094 changed the value of P. However, now, we will be
1095 constructing VAR directly. The constructor body may
1096 change its value multiple times as it is being
1097 constructed. Therefore, it must not be TREE_READONLY;
1098 the back-end assumes that TREE_READONLY variable is
1099 assigned to only once. */
1100 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
1101 TREE_READONLY (var) = 0;
1102
1103 /* Initialize this VAR_DECL from the equivalent argument. Convert
1104 the argument to the proper type in case it was promoted. */
1105 if (value)
1106 {
1107 tree rhs = fold_convert (TREE_TYPE (var), value);
1108 block_stmt_iterator bsi = bsi_last (bb);
1109
1110 if (rhs == error_mark_node)
1111 return;
1112
1113 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
1114 keep our trees in gimple form. */
1115 init_stmt = build (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
1116
1117 /* If we did not create a gimple value and we did not create a gimple
1118 cast of a gimple value, then we will need to gimplify INIT_STMTS
1119 at the end. Note that is_gimple_cast only checks the outer
1120 tree code, not its operand. Thus the explicit check that its
1121 operand is a gimple value. */
1122 if (!is_gimple_val (rhs)
1123 && (!is_gimple_cast (rhs)
1124 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
1125 gimplify_stmt (&init_stmt);
1126 bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
1127 }
1128 }
1129
1130 /* Generate code to initialize the parameters of the function at the
1131 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
1132
1133 static void
1134 initialize_inlined_parameters (inline_data *id, tree args, tree static_chain,
1135 tree fn, basic_block bb)
1136 {
1137 tree parms;
1138 tree a;
1139 tree p;
1140 tree vars = NULL_TREE;
1141 int argnum = 0;
1142
1143 /* Figure out what the parameters are. */
1144 parms = DECL_ARGUMENTS (fn);
1145 if (fn == current_function_decl)
1146 parms = cfun->saved_args;
1147
1148 /* Loop through the parameter declarations, replacing each with an
1149 equivalent VAR_DECL, appropriately initialized. */
1150 for (p = parms, a = args; p;
1151 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
1152 {
1153 tree value;
1154
1155 ++argnum;
1156
1157 /* Find the initializer. */
1158 value = lang_hooks.tree_inlining.convert_parm_for_inlining
1159 (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
1160
1161 setup_one_parameter (id, p, value, fn, bb, &vars);
1162 }
1163
1164 /* Initialize the static chain. */
1165 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1166 if (fn == current_function_decl)
1167 p = DECL_STRUCT_FUNCTION (fn)->saved_static_chain_decl;
1168 if (p)
1169 {
1170 /* No static chain? Seems like a bug in tree-nested.c. */
1171 gcc_assert (static_chain);
1172
1173 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
1174 }
1175
1176 declare_inline_vars (id->block, vars);
1177 }
1178
1179 /* Declare a return variable to replace the RESULT_DECL for the
1180 function we are calling. An appropriate DECL_STMT is returned.
1181 The USE_STMT is filled to contain a use of the declaration to
1182 indicate the return value of the function.
1183
1184 RETURN_SLOT_ADDR, if non-null, was a fake parameter that
1185 took the address of the result. MODIFY_DEST, if non-null, was the LHS of
1186 the MODIFY_EXPR to which this call is the RHS.
1187
1188 The return value is a (possibly null) value that is the result of the
1189 function as seen by the callee. *USE_P is a (possibly null) value that
1190 holds the result as seen by the caller. */
1191
1192 static tree
1193 declare_return_variable (inline_data *id, tree return_slot_addr,
1194 tree modify_dest, tree *use_p)
1195 {
1196 tree callee = id->callee;
1197 tree caller = id->caller;
1198 tree result = DECL_RESULT (callee);
1199 tree callee_type = TREE_TYPE (result);
1200 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
1201 tree var, use;
1202
1203 /* We don't need to do anything for functions that don't return
1204 anything. */
1205 if (!result || VOID_TYPE_P (callee_type))
1206 {
1207 *use_p = NULL_TREE;
1208 return NULL_TREE;
1209 }
1210
1211 /* If there was a return slot, then the return value is the
1212 dereferenced address of that object. */
1213 if (return_slot_addr)
1214 {
1215 /* The front end shouldn't have used both return_slot_addr and
1216 a modify expression. */
1217 gcc_assert (!modify_dest);
1218 if (DECL_BY_REFERENCE (result))
1219 var = return_slot_addr;
1220 else
1221 var = build_fold_indirect_ref (return_slot_addr);
1222 use = NULL;
1223 goto done;
1224 }
1225
1226 /* All types requiring non-trivial constructors should have been handled. */
1227 gcc_assert (!TREE_ADDRESSABLE (callee_type));
1228
1229 /* Attempt to avoid creating a new temporary variable. */
1230 if (modify_dest)
1231 {
1232 bool use_it = false;
1233
1234 /* We can't use MODIFY_DEST if there's type promotion involved. */
1235 if (!lang_hooks.types_compatible_p (caller_type, callee_type))
1236 use_it = false;
1237
1238 /* ??? If we're assigning to a variable sized type, then we must
1239 reuse the destination variable, because we've no good way to
1240 create variable sized temporaries at this point. */
1241 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
1242 use_it = true;
1243
1244 /* If the callee cannot possibly modify MODIFY_DEST, then we can
1245 reuse it as the result of the call directly. Don't do this if
1246 it would promote MODIFY_DEST to addressable. */
1247 else if (!TREE_STATIC (modify_dest)
1248 && !TREE_ADDRESSABLE (modify_dest)
1249 && !TREE_ADDRESSABLE (result))
1250 use_it = true;
1251
1252 if (use_it)
1253 {
1254 var = modify_dest;
1255 use = NULL;
1256 goto done;
1257 }
1258 }
1259
1260 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
1261
1262 var = copy_decl_for_inlining (result, callee, caller);
1263
1264 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1265 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
1266 = tree_cons (NULL_TREE, var,
1267 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
1268
1269 /* Do not have the rest of GCC warn about this variable as it should
1270 not be visible to the user. */
1271 TREE_NO_WARNING (var) = 1;
1272
1273 /* Build the use expr. If the return type of the function was
1274 promoted, convert it back to the expected type. */
1275 use = var;
1276 if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
1277 use = fold_convert (caller_type, var);
1278
1279 done:
1280 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
1281 way, when the RESULT_DECL is encountered, it will be
1282 automatically replaced by the VAR_DECL. */
1283 insert_decl_map (id, result, var);
1284
1285 /* Remember this so we can ignore it in remap_decls. */
1286 id->retvar = var;
1287
1288 *use_p = use;
1289 return var;
1290 }
1291
1292 /* Returns nonzero if a function can be inlined as a tree. */
1293
1294 bool
1295 tree_inlinable_function_p (tree fn)
1296 {
1297 return inlinable_function_p (fn);
1298 }
1299
1300 static const char *inline_forbidden_reason;
1301
1302 static tree
1303 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
1304 void *fnp)
1305 {
1306 tree node = *nodep;
1307 tree fn = (tree) fnp;
1308 tree t;
1309
1310 switch (TREE_CODE (node))
1311 {
1312 case CALL_EXPR:
1313 /* Refuse to inline alloca call unless user explicitly forced so as
1314 this may change program's memory overhead drastically when the
1315 function using alloca is called in loop. In GCC present in
1316 SPEC2000 inlining into schedule_block cause it to require 2GB of
1317 RAM instead of 256MB. */
1318 if (alloca_call_p (node)
1319 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1320 {
1321 inline_forbidden_reason
1322 = G_("function %q+F can never be inlined because it uses "
1323 "alloca (override using the always_inline attribute)");
1324 return node;
1325 }
1326 t = get_callee_fndecl (node);
1327 if (! t)
1328 break;
1329
1330 /* We cannot inline functions that call setjmp. */
1331 if (setjmp_call_p (t))
1332 {
1333 inline_forbidden_reason
1334 = G_("function %q+F can never be inlined because it uses setjmp");
1335 return node;
1336 }
1337
1338 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
1339 switch (DECL_FUNCTION_CODE (t))
1340 {
1341 /* We cannot inline functions that take a variable number of
1342 arguments. */
1343 case BUILT_IN_VA_START:
1344 case BUILT_IN_STDARG_START:
1345 case BUILT_IN_NEXT_ARG:
1346 case BUILT_IN_VA_END:
1347 inline_forbidden_reason
1348 = G_("function %q+F can never be inlined because it "
1349 "uses variable argument lists");
1350 return node;
1351
1352 case BUILT_IN_LONGJMP:
1353 /* We can't inline functions that call __builtin_longjmp at
1354 all. The non-local goto machinery really requires the
1355 destination be in a different function. If we allow the
1356 function calling __builtin_longjmp to be inlined into the
1357 function calling __builtin_setjmp, Things will Go Awry. */
1358 inline_forbidden_reason
1359 = G_("function %q+F can never be inlined because "
1360 "it uses setjmp-longjmp exception handling");
1361 return node;
1362
1363 case BUILT_IN_NONLOCAL_GOTO:
1364 /* Similarly. */
1365 inline_forbidden_reason
1366 = G_("function %q+F can never be inlined because "
1367 "it uses non-local goto");
1368 return node;
1369
1370 case BUILT_IN_RETURN:
1371 case BUILT_IN_APPLY_ARGS:
1372 /* If a __builtin_apply_args caller would be inlined,
1373 it would be saving arguments of the function it has
1374 been inlined into. Similarly __builtin_return would
1375 return from the function the inline has been inlined into. */
1376 inline_forbidden_reason
1377 = G_("function %q+F can never be inlined because "
1378 "it uses __builtin_return or __builtin_apply_args");
1379 return node;
1380
1381 default:
1382 break;
1383 }
1384 break;
1385
1386 case GOTO_EXPR:
1387 t = TREE_OPERAND (node, 0);
1388
1389 /* We will not inline a function which uses computed goto. The
1390 addresses of its local labels, which may be tucked into
1391 global storage, are of course not constant across
1392 instantiations, which causes unexpected behavior. */
1393 if (TREE_CODE (t) != LABEL_DECL)
1394 {
1395 inline_forbidden_reason
1396 = G_("function %q+F can never be inlined "
1397 "because it contains a computed goto");
1398 return node;
1399 }
1400 break;
1401
1402 case LABEL_EXPR:
1403 t = TREE_OPERAND (node, 0);
1404 if (DECL_NONLOCAL (t))
1405 {
1406 /* We cannot inline a function that receives a non-local goto
1407 because we cannot remap the destination label used in the
1408 function that is performing the non-local goto. */
1409 inline_forbidden_reason
1410 = G_("function %q+F can never be inlined "
1411 "because it receives a non-local goto");
1412 return node;
1413 }
1414 break;
1415
1416 case RECORD_TYPE:
1417 case UNION_TYPE:
1418 /* We cannot inline a function of the form
1419
1420 void F (int i) { struct S { int ar[i]; } s; }
1421
1422 Attempting to do so produces a catch-22.
1423 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1424 UNION_TYPE nodes, then it goes into infinite recursion on a
1425 structure containing a pointer to its own type. If it doesn't,
1426 then the type node for S doesn't get adjusted properly when
1427 F is inlined.
1428
1429 ??? This is likely no longer true, but it's too late in the 4.0
1430 cycle to try to find out. This should be checked for 4.1. */
1431 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1432 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1433 {
1434 inline_forbidden_reason
1435 = G_("function %q+F can never be inlined "
1436 "because it uses variable sized variables");
1437 return node;
1438 }
1439
1440 default:
1441 break;
1442 }
1443
1444 return NULL_TREE;
1445 }
1446
1447 /* Return subexpression representing possible alloca call, if any. */
1448 static tree
1449 inline_forbidden_p (tree fndecl)
1450 {
1451 location_t saved_loc = input_location;
1452 block_stmt_iterator bsi;
1453 basic_block bb;
1454 tree ret = NULL_TREE;
1455
1456 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (fndecl))
1457 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1458 {
1459 ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
1460 inline_forbidden_p_1, fndecl);
1461 if (ret)
1462 goto egress;
1463 }
1464
1465 egress:
1466 input_location = saved_loc;
1467 return ret;
1468 }
1469
1470 /* Returns nonzero if FN is a function that does not have any
1471 fundamental inline blocking properties. */
1472
1473 static bool
1474 inlinable_function_p (tree fn)
1475 {
1476 bool inlinable = true;
1477
1478 /* If we've already decided this function shouldn't be inlined,
1479 there's no need to check again. */
1480 if (DECL_UNINLINABLE (fn))
1481 return false;
1482
1483 /* See if there is any language-specific reason it cannot be
1484 inlined. (It is important that this hook be called early because
1485 in C++ it may result in template instantiation.)
1486 If the function is not inlinable for language-specific reasons,
1487 it is left up to the langhook to explain why. */
1488 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1489
1490 /* If we don't have the function body available, we can't inline it.
1491 However, this should not be recorded since we also get here for
1492 forward declared inline functions. Therefore, return at once. */
1493 if (!DECL_SAVED_TREE (fn))
1494 return false;
1495
1496 /* If we're not inlining at all, then we cannot inline this function. */
1497 else if (!flag_inline_trees)
1498 inlinable = false;
1499
1500 /* Only try to inline functions if DECL_INLINE is set. This should be
1501 true for all functions declared `inline', and for all other functions
1502 as well with -finline-functions.
1503
1504 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1505 it's the front-end that must set DECL_INLINE in this case, because
1506 dwarf2out loses if a function that does not have DECL_INLINE set is
1507 inlined anyway. That is why we have both DECL_INLINE and
1508 DECL_DECLARED_INLINE_P. */
1509 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1510 here should be redundant. */
1511 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1512 inlinable = false;
1513
1514 else if (inline_forbidden_p (fn))
1515 {
1516 /* See if we should warn about uninlinable functions. Previously,
1517 some of these warnings would be issued while trying to expand
1518 the function inline, but that would cause multiple warnings
1519 about functions that would for example call alloca. But since
1520 this a property of the function, just one warning is enough.
1521 As a bonus we can now give more details about the reason why a
1522 function is not inlinable.
1523 We only warn for functions declared `inline' by the user. */
1524 bool do_warning = (warn_inline
1525 && DECL_INLINE (fn)
1526 && DECL_DECLARED_INLINE_P (fn)
1527 && !DECL_IN_SYSTEM_HEADER (fn));
1528
1529 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1530 sorry (inline_forbidden_reason, fn);
1531 else if (do_warning)
1532 warning (OPT_Winline, inline_forbidden_reason, fn);
1533
1534 inlinable = false;
1535 }
1536
1537 /* Squirrel away the result so that we don't have to check again. */
1538 DECL_UNINLINABLE (fn) = !inlinable;
1539
1540 return inlinable;
1541 }
1542
1543 /* Estimate the cost of a memory move. Use machine dependent
1544 word size and take possible memcpy call into account. */
1545
1546 int
1547 estimate_move_cost (tree type)
1548 {
1549 HOST_WIDE_INT size;
1550
1551 size = int_size_in_bytes (type);
1552
1553 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1554 /* Cost of a memcpy call, 3 arguments and the call. */
1555 return 4;
1556 else
1557 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1558 }
1559
1560 /* Used by estimate_num_insns. Estimate number of instructions seen
1561 by given statement. */
1562
1563 static tree
1564 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1565 {
1566 int *count = data;
1567 tree x = *tp;
1568
1569 if (IS_TYPE_OR_DECL_P (x))
1570 {
1571 *walk_subtrees = 0;
1572 return NULL;
1573 }
1574 /* Assume that constants and references counts nothing. These should
1575 be majorized by amount of operations among them we count later
1576 and are common target of CSE and similar optimizations. */
1577 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
1578 return NULL;
1579
1580 switch (TREE_CODE (x))
1581 {
1582 /* Containers have no cost. */
1583 case TREE_LIST:
1584 case TREE_VEC:
1585 case BLOCK:
1586 case COMPONENT_REF:
1587 case BIT_FIELD_REF:
1588 case INDIRECT_REF:
1589 case ALIGN_INDIRECT_REF:
1590 case MISALIGNED_INDIRECT_REF:
1591 case ARRAY_REF:
1592 case ARRAY_RANGE_REF:
1593 case OBJ_TYPE_REF:
1594 case EXC_PTR_EXPR: /* ??? */
1595 case FILTER_EXPR: /* ??? */
1596 case COMPOUND_EXPR:
1597 case BIND_EXPR:
1598 case WITH_CLEANUP_EXPR:
1599 case NOP_EXPR:
1600 case VIEW_CONVERT_EXPR:
1601 case SAVE_EXPR:
1602 case ADDR_EXPR:
1603 case COMPLEX_EXPR:
1604 case RANGE_EXPR:
1605 case CASE_LABEL_EXPR:
1606 case SSA_NAME:
1607 case CATCH_EXPR:
1608 case EH_FILTER_EXPR:
1609 case STATEMENT_LIST:
1610 case ERROR_MARK:
1611 case NON_LVALUE_EXPR:
1612 case FDESC_EXPR:
1613 case VA_ARG_EXPR:
1614 case TRY_CATCH_EXPR:
1615 case TRY_FINALLY_EXPR:
1616 case LABEL_EXPR:
1617 case GOTO_EXPR:
1618 case RETURN_EXPR:
1619 case EXIT_EXPR:
1620 case LOOP_EXPR:
1621 case PHI_NODE:
1622 case WITH_SIZE_EXPR:
1623 break;
1624
1625 /* We don't account constants for now. Assume that the cost is amortized
1626 by operations that do use them. We may re-consider this decision once
1627 we are able to optimize the tree before estimating its size and break
1628 out static initializers. */
1629 case IDENTIFIER_NODE:
1630 case INTEGER_CST:
1631 case REAL_CST:
1632 case COMPLEX_CST:
1633 case VECTOR_CST:
1634 case STRING_CST:
1635 *walk_subtrees = 0;
1636 return NULL;
1637
1638 /* Try to estimate the cost of assignments. We have three cases to
1639 deal with:
1640 1) Simple assignments to registers;
1641 2) Stores to things that must live in memory. This includes
1642 "normal" stores to scalars, but also assignments of large
1643 structures, or constructors of big arrays;
1644 3) TARGET_EXPRs.
1645
1646 Let us look at the first two cases, assuming we have "a = b + C":
1647 <modify_expr <var_decl "a"> <plus_expr <var_decl "b"> <constant C>>
1648 If "a" is a GIMPLE register, the assignment to it is free on almost
1649 any target, because "a" usually ends up in a real register. Hence
1650 the only cost of this expression comes from the PLUS_EXPR, and we
1651 can ignore the MODIFY_EXPR.
1652 If "a" is not a GIMPLE register, the assignment to "a" will most
1653 likely be a real store, so the cost of the MODIFY_EXPR is the cost
1654 of moving something into "a", which we compute using the function
1655 estimate_move_cost.
1656
1657 The third case deals with TARGET_EXPRs, for which the semantics are
1658 that a temporary is assigned, unless the TARGET_EXPR itself is being
1659 assigned to something else. In the latter case we do not need the
1660 temporary. E.g. in <modify_expr <var_decl "a"> <target_expr>>, the
1661 MODIFY_EXPR is free. */
1662 case INIT_EXPR:
1663 case MODIFY_EXPR:
1664 /* Is the right and side a TARGET_EXPR? */
1665 if (TREE_CODE (TREE_OPERAND (x, 1)) == TARGET_EXPR)
1666 break;
1667 /* ... fall through ... */
1668
1669 case TARGET_EXPR:
1670 x = TREE_OPERAND (x, 0);
1671 /* Is this an assignments to a register? */
1672 if (is_gimple_reg (x))
1673 break;
1674 /* Otherwise it's a store, so fall through to compute the move cost. */
1675
1676 case CONSTRUCTOR:
1677 *count += estimate_move_cost (TREE_TYPE (x));
1678 break;
1679
1680 /* Assign cost of 1 to usual operations.
1681 ??? We may consider mapping RTL costs to this. */
1682 case COND_EXPR:
1683 case VEC_COND_EXPR:
1684
1685 case PLUS_EXPR:
1686 case MINUS_EXPR:
1687 case MULT_EXPR:
1688
1689 case FIX_TRUNC_EXPR:
1690 case FIX_CEIL_EXPR:
1691 case FIX_FLOOR_EXPR:
1692 case FIX_ROUND_EXPR:
1693
1694 case NEGATE_EXPR:
1695 case FLOAT_EXPR:
1696 case MIN_EXPR:
1697 case MAX_EXPR:
1698 case ABS_EXPR:
1699
1700 case LSHIFT_EXPR:
1701 case RSHIFT_EXPR:
1702 case LROTATE_EXPR:
1703 case RROTATE_EXPR:
1704 case VEC_LSHIFT_EXPR:
1705 case VEC_RSHIFT_EXPR:
1706
1707 case BIT_IOR_EXPR:
1708 case BIT_XOR_EXPR:
1709 case BIT_AND_EXPR:
1710 case BIT_NOT_EXPR:
1711
1712 case TRUTH_ANDIF_EXPR:
1713 case TRUTH_ORIF_EXPR:
1714 case TRUTH_AND_EXPR:
1715 case TRUTH_OR_EXPR:
1716 case TRUTH_XOR_EXPR:
1717 case TRUTH_NOT_EXPR:
1718
1719 case LT_EXPR:
1720 case LE_EXPR:
1721 case GT_EXPR:
1722 case GE_EXPR:
1723 case EQ_EXPR:
1724 case NE_EXPR:
1725 case ORDERED_EXPR:
1726 case UNORDERED_EXPR:
1727
1728 case UNLT_EXPR:
1729 case UNLE_EXPR:
1730 case UNGT_EXPR:
1731 case UNGE_EXPR:
1732 case UNEQ_EXPR:
1733 case LTGT_EXPR:
1734
1735 case CONVERT_EXPR:
1736
1737 case CONJ_EXPR:
1738
1739 case PREDECREMENT_EXPR:
1740 case PREINCREMENT_EXPR:
1741 case POSTDECREMENT_EXPR:
1742 case POSTINCREMENT_EXPR:
1743
1744 case SWITCH_EXPR:
1745
1746 case ASM_EXPR:
1747
1748 case REALIGN_LOAD_EXPR:
1749
1750 case REDUC_MAX_EXPR:
1751 case REDUC_MIN_EXPR:
1752 case REDUC_PLUS_EXPR:
1753
1754 case RESX_EXPR:
1755 *count += 1;
1756 break;
1757
1758 /* Few special cases of expensive operations. This is useful
1759 to avoid inlining on functions having too many of these. */
1760 case TRUNC_DIV_EXPR:
1761 case CEIL_DIV_EXPR:
1762 case FLOOR_DIV_EXPR:
1763 case ROUND_DIV_EXPR:
1764 case EXACT_DIV_EXPR:
1765 case TRUNC_MOD_EXPR:
1766 case CEIL_MOD_EXPR:
1767 case FLOOR_MOD_EXPR:
1768 case ROUND_MOD_EXPR:
1769 case RDIV_EXPR:
1770 *count += 10;
1771 break;
1772 case CALL_EXPR:
1773 {
1774 tree decl = get_callee_fndecl (x);
1775 tree arg;
1776
1777 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1778 switch (DECL_FUNCTION_CODE (decl))
1779 {
1780 case BUILT_IN_CONSTANT_P:
1781 *walk_subtrees = 0;
1782 return NULL_TREE;
1783 case BUILT_IN_EXPECT:
1784 return NULL_TREE;
1785 default:
1786 break;
1787 }
1788
1789 /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
1790 that does use function declaration to figure out the arguments. */
1791 if (!decl)
1792 {
1793 for (arg = TREE_OPERAND (x, 1); arg; arg = TREE_CHAIN (arg))
1794 *count += estimate_move_cost (TREE_TYPE (TREE_VALUE (arg)));
1795 }
1796 else
1797 {
1798 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1799 *count += estimate_move_cost (TREE_TYPE (arg));
1800 }
1801
1802 *count += PARAM_VALUE (PARAM_INLINE_CALL_COST);
1803 break;
1804 }
1805 default:
1806 gcc_unreachable ();
1807 }
1808 return NULL;
1809 }
1810
1811 /* Estimate number of instructions that will be created by expanding EXPR. */
1812
1813 int
1814 estimate_num_insns (tree expr)
1815 {
1816 int num = 0;
1817 struct pointer_set_t *visited_nodes;
1818 basic_block bb;
1819 block_stmt_iterator bsi;
1820 struct function *my_function;
1821
1822 /* If we're given an entire function, walk the CFG. */
1823 if (TREE_CODE (expr) == FUNCTION_DECL)
1824 {
1825 my_function = DECL_STRUCT_FUNCTION (expr);
1826 gcc_assert (my_function && my_function->cfg);
1827 visited_nodes = pointer_set_create ();
1828 FOR_EACH_BB_FN (bb, my_function)
1829 {
1830 for (bsi = bsi_start (bb);
1831 !bsi_end_p (bsi);
1832 bsi_next (&bsi))
1833 {
1834 walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
1835 &num, visited_nodes);
1836 }
1837 }
1838 pointer_set_destroy (visited_nodes);
1839 }
1840 else
1841 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
1842
1843 return num;
1844 }
1845
1846 typedef struct function *function_p;
1847
1848 DEF_VEC_P(function_p);
1849 DEF_VEC_ALLOC_P(function_p,heap);
1850
1851 /* Initialized with NOGC, making this poisonous to the garbage collector. */
1852 static VEC(function_p,heap) *cfun_stack;
1853
1854 void
1855 push_cfun (struct function *new_cfun)
1856 {
1857 VEC_safe_push (function_p, heap, cfun_stack, cfun);
1858 cfun = new_cfun;
1859 }
1860
1861 void
1862 pop_cfun (void)
1863 {
1864 cfun = VEC_pop (function_p, cfun_stack);
1865 }
1866
1867 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
1868 static void
1869 add_lexical_block (tree current_block, tree new_block)
1870 {
1871 tree *blk_p;
1872
1873 /* Walk to the last sub-block. */
1874 for (blk_p = &BLOCK_SUBBLOCKS (current_block);
1875 *blk_p;
1876 blk_p = &TREE_CHAIN (*blk_p))
1877 ;
1878 *blk_p = new_block;
1879 BLOCK_SUPERCONTEXT (new_block) = current_block;
1880 }
1881
1882 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
1883
1884 static bool
1885 expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
1886 {
1887 inline_data *id;
1888 tree t;
1889 tree use_retvar;
1890 tree fn;
1891 splay_tree st;
1892 tree args;
1893 tree return_slot_addr;
1894 tree modify_dest;
1895 location_t saved_location;
1896 struct cgraph_edge *cg_edge;
1897 const char *reason;
1898 basic_block return_block;
1899 edge e;
1900 block_stmt_iterator bsi, stmt_bsi;
1901 bool successfully_inlined = FALSE;
1902 tree t_step;
1903 tree var;
1904 struct cgraph_node *old_node;
1905 tree decl;
1906
1907 /* See what we've got. */
1908 id = (inline_data *) data;
1909 t = *tp;
1910
1911 /* Set input_location here so we get the right instantiation context
1912 if we call instantiate_decl from inlinable_function_p. */
1913 saved_location = input_location;
1914 if (EXPR_HAS_LOCATION (t))
1915 input_location = EXPR_LOCATION (t);
1916
1917 /* From here on, we're only interested in CALL_EXPRs. */
1918 if (TREE_CODE (t) != CALL_EXPR)
1919 goto egress;
1920
1921 /* First, see if we can figure out what function is being called.
1922 If we cannot, then there is no hope of inlining the function. */
1923 fn = get_callee_fndecl (t);
1924 if (!fn)
1925 goto egress;
1926
1927 /* Turn forward declarations into real ones. */
1928 fn = cgraph_node (fn)->decl;
1929
1930 /* If fn is a declaration of a function in a nested scope that was
1931 globally declared inline, we don't set its DECL_INITIAL.
1932 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1933 C++ front-end uses it for cdtors to refer to their internal
1934 declarations, that are not real functions. Fortunately those
1935 don't have trees to be saved, so we can tell by checking their
1936 DECL_SAVED_TREE. */
1937 if (! DECL_INITIAL (fn)
1938 && DECL_ABSTRACT_ORIGIN (fn)
1939 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
1940 fn = DECL_ABSTRACT_ORIGIN (fn);
1941
1942 /* Objective C and fortran still calls tree_rest_of_compilation directly.
1943 Kill this check once this is fixed. */
1944 if (!id->current_node->analyzed)
1945 goto egress;
1946
1947 cg_edge = cgraph_edge (id->current_node, stmt);
1948
1949 /* Constant propagation on argument done during previous inlining
1950 may create new direct call. Produce an edge for it. */
1951 if (!cg_edge)
1952 {
1953 struct cgraph_node *dest = cgraph_node (fn);
1954
1955 /* We have missing edge in the callgraph. This can happen in one case
1956 where previous inlining turned indirect call into direct call by
1957 constant propagating arguments. In all other cases we hit a bug
1958 (incorrect node sharing is most common reason for missing edges. */
1959 gcc_assert (dest->needed || !flag_unit_at_a_time);
1960 cgraph_create_edge (id->node, dest, stmt,
1961 bb->count, bb->loop_depth)->inline_failed
1962 = N_("originally indirect function call not considered for inlining");
1963 goto egress;
1964 }
1965
1966 /* Don't try to inline functions that are not well-suited to
1967 inlining. */
1968 if (!cgraph_inline_p (cg_edge, &reason))
1969 {
1970 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
1971 /* Avoid warnings during early inline pass. */
1972 && (!flag_unit_at_a_time || cgraph_global_info_ready))
1973 {
1974 sorry ("inlining failed in call to %q+F: %s", fn, reason);
1975 sorry ("called from here");
1976 }
1977 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
1978 && !DECL_IN_SYSTEM_HEADER (fn)
1979 && strlen (reason)
1980 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
1981 /* Avoid warnings during early inline pass. */
1982 && (!flag_unit_at_a_time || cgraph_global_info_ready))
1983 {
1984 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
1985 fn, reason);
1986 warning (OPT_Winline, "called from here");
1987 }
1988 goto egress;
1989 }
1990
1991 #ifdef ENABLE_CHECKING
1992 if (cg_edge->callee->decl != id->node->decl)
1993 verify_cgraph_node (cg_edge->callee);
1994 #endif
1995
1996 /* We will be inlining this callee. */
1997
1998 id->eh_region = lookup_stmt_eh_region (stmt);
1999
2000 /* Split the block holding the CALL_EXPR. */
2001
2002 e = split_block (bb, stmt);
2003 bb = e->src;
2004 return_block = e->dest;
2005 remove_edge (e);
2006
2007 /* split_block splits before the statement, work around this by moving
2008 the call into the first half_bb. Not pretty, but seems easier than
2009 doing the CFG manipulation by hand when the CALL_EXPR is in the last
2010 statement in BB. */
2011 stmt_bsi = bsi_last (bb);
2012 bsi = bsi_start (return_block);
2013 if (!bsi_end_p (bsi))
2014 bsi_move_before (&stmt_bsi, &bsi);
2015 else
2016 {
2017 tree stmt = bsi_stmt (stmt_bsi);
2018 bsi_remove (&stmt_bsi);
2019 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2020 }
2021 stmt_bsi = bsi_start (return_block);
2022
2023 /* Build a block containing code to initialize the arguments, the
2024 actual inline expansion of the body, and a label for the return
2025 statements within the function to jump to. The type of the
2026 statement expression is the return type of the function call. */
2027 id->block = make_node (BLOCK);
2028 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
2029 BLOCK_SOURCE_LOCATION (id->block) = input_location;
2030 add_lexical_block (TREE_BLOCK (stmt), id->block);
2031
2032 /* Local declarations will be replaced by their equivalents in this
2033 map. */
2034 st = id->decl_map;
2035 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
2036 NULL, NULL);
2037
2038 /* Initialize the parameters. */
2039 args = TREE_OPERAND (t, 1);
2040
2041 initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2), fn, bb);
2042
2043 /* Record the function we are about to inline. */
2044 id->callee = fn;
2045
2046 if (DECL_STRUCT_FUNCTION (fn)->saved_blocks)
2047 add_lexical_block (id->block, remap_blocks (DECL_STRUCT_FUNCTION (fn)->saved_blocks, id));
2048 else if (DECL_INITIAL (fn))
2049 add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
2050
2051 /* Return statements in the function body will be replaced by jumps
2052 to the RET_LABEL. */
2053
2054 gcc_assert (DECL_INITIAL (fn));
2055 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
2056
2057 /* Find the lhs to which the result of this call is assigned. */
2058 return_slot_addr = NULL;
2059 if (TREE_CODE (stmt) == MODIFY_EXPR)
2060 {
2061 modify_dest = TREE_OPERAND (stmt, 0);
2062
2063 /* The function which we are inlining might not return a value,
2064 in which case we should issue a warning that the function
2065 does not return a value. In that case the optimizers will
2066 see that the variable to which the value is assigned was not
2067 initialized. We do not want to issue a warning about that
2068 uninitialized variable. */
2069 if (DECL_P (modify_dest))
2070 TREE_NO_WARNING (modify_dest) = 1;
2071 if (CALL_EXPR_RETURN_SLOT_OPT (t))
2072 {
2073 return_slot_addr = build_fold_addr_expr (modify_dest);
2074 modify_dest = NULL;
2075 }
2076 }
2077 else
2078 modify_dest = NULL;
2079
2080 /* Declare the return variable for the function. */
2081 decl = declare_return_variable (id, return_slot_addr,
2082 modify_dest, &use_retvar);
2083 /* Do this only if declare_return_variable created a new one. */
2084 if (decl && !return_slot_addr && decl != modify_dest)
2085 declare_inline_vars (id->block, decl);
2086
2087 /* After we've initialized the parameters, we insert the body of the
2088 function itself. */
2089 old_node = id->current_node;
2090
2091 /* Anoint the callee-to-be-duplicated as the "current_node." When
2092 CALL_EXPRs within callee are duplicated, the edges from callee to
2093 callee's callees (caller's grandchildren) will be cloned. */
2094 id->current_node = cg_edge->callee;
2095
2096 /* This is it. Duplicate the callee body. Assume callee is
2097 pre-gimplified. Note that we must not alter the caller
2098 function in any way before this point, as this CALL_EXPR may be
2099 a self-referential call; if we're calling ourselves, we need to
2100 duplicate our body before altering anything. */
2101 copy_body (id, bb->count, bb->frequency, bb, return_block);
2102 id->current_node = old_node;
2103
2104 /* Add local vars in this inlined callee to caller. */
2105 t_step = id->callee_cfun->unexpanded_var_list;
2106 if (id->callee_cfun->saved_unexpanded_var_list)
2107 t_step = id->callee_cfun->saved_unexpanded_var_list;
2108 for (; t_step; t_step = TREE_CHAIN (t_step))
2109 {
2110 var = TREE_VALUE (t_step);
2111 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2112 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2113 cfun->unexpanded_var_list);
2114 else
2115 cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id),
2116 cfun->unexpanded_var_list);
2117 }
2118
2119 /* Clean up. */
2120 splay_tree_delete (id->decl_map);
2121 id->decl_map = st;
2122
2123 /* If the inlined function returns a result that we care about,
2124 clobber the CALL_EXPR with a reference to the return variable. */
2125 if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
2126 {
2127 *tp = use_retvar;
2128 maybe_clean_or_replace_eh_stmt (stmt, stmt);
2129 }
2130 else
2131 /* We're modifying a TSI owned by gimple_expand_calls_inline();
2132 tsi_delink() will leave the iterator in a sane state. */
2133 bsi_remove (&stmt_bsi);
2134
2135 bsi_next (&bsi);
2136 if (bsi_end_p (bsi))
2137 tree_purge_dead_eh_edges (return_block);
2138
2139 /* If the value of the new expression is ignored, that's OK. We
2140 don't warn about this for CALL_EXPRs, so we shouldn't warn about
2141 the equivalent inlined version either. */
2142 TREE_USED (*tp) = 1;
2143
2144 /* Output the inlining info for this abstract function, since it has been
2145 inlined. If we don't do this now, we can lose the information about the
2146 variables in the function when the blocks get blown away as soon as we
2147 remove the cgraph node. */
2148 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
2149
2150 /* Update callgraph if needed. */
2151 cgraph_remove_node (cg_edge->callee);
2152
2153 /* Declare the 'auto' variables added with this inlined body. */
2154 record_vars (BLOCK_VARS (id->block));
2155 id->block = NULL_TREE;
2156 successfully_inlined = TRUE;
2157
2158 egress:
2159 input_location = saved_location;
2160 return successfully_inlined;
2161 }
2162
2163 /* Expand call statements reachable from STMT_P.
2164 We can only have CALL_EXPRs as the "toplevel" tree code or nested
2165 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
2166 unfortunately not use that function here because we need a pointer
2167 to the CALL_EXPR, not the tree itself. */
2168
2169 static bool
2170 gimple_expand_calls_inline (basic_block bb, inline_data *id)
2171 {
2172 block_stmt_iterator bsi;
2173
2174 /* Register specific tree functions. */
2175 tree_register_cfg_hooks ();
2176 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2177 {
2178 tree *expr_p = bsi_stmt_ptr (bsi);
2179 tree stmt = *expr_p;
2180
2181 if (TREE_CODE (*expr_p) == MODIFY_EXPR)
2182 expr_p = &TREE_OPERAND (*expr_p, 1);
2183 if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
2184 expr_p = &TREE_OPERAND (*expr_p, 0);
2185 if (TREE_CODE (*expr_p) == CALL_EXPR)
2186 if (expand_call_inline (bb, stmt, expr_p, id))
2187 return true;
2188 }
2189 return false;
2190 }
2191
2192 /* Expand calls to inline functions in the body of FN. */
2193
2194 void
2195 optimize_inline_calls (tree fn)
2196 {
2197 inline_data id;
2198 tree prev_fn;
2199 basic_block bb;
2200 /* There is no point in performing inlining if errors have already
2201 occurred -- and we might crash if we try to inline invalid
2202 code. */
2203 if (errorcount || sorrycount)
2204 return;
2205
2206 /* Clear out ID. */
2207 memset (&id, 0, sizeof (id));
2208
2209 id.current_node = id.node = cgraph_node (fn);
2210 id.caller = fn;
2211 /* Or any functions that aren't finished yet. */
2212 prev_fn = NULL_TREE;
2213 if (current_function_decl)
2214 {
2215 id.caller = current_function_decl;
2216 prev_fn = current_function_decl;
2217 }
2218 push_gimplify_context ();
2219
2220 /* Reach the trees by walking over the CFG, and note the
2221 enclosing basic-blocks in the call edges. */
2222 /* We walk the blocks going forward, because inlined function bodies
2223 will split id->current_basic_block, and the new blocks will
2224 follow it; we'll trudge through them, processing their CALL_EXPRs
2225 along the way. */
2226 FOR_EACH_BB (bb)
2227 gimple_expand_calls_inline (bb, &id);
2228
2229
2230 pop_gimplify_context (NULL);
2231 /* Renumber the (code) basic_blocks consecutively. */
2232 compact_blocks ();
2233 /* Renumber the lexical scoping (non-code) blocks consecutively. */
2234 number_blocks (fn);
2235
2236 #ifdef ENABLE_CHECKING
2237 {
2238 struct cgraph_edge *e;
2239
2240 verify_cgraph_node (id.node);
2241
2242 /* Double check that we inlined everything we are supposed to inline. */
2243 for (e = id.node->callees; e; e = e->next_callee)
2244 gcc_assert (e->inline_failed);
2245 }
2246 #endif
2247 /* We need to rescale frequencies again to peak at REG_BR_PROB_BASE
2248 as inlining loops might increase the maximum. */
2249 if (ENTRY_BLOCK_PTR->count)
2250 counts_to_freqs ();
2251 fold_cond_expr_cond ();
2252 }
2253
2254 /* FN is a function that has a complete body, and CLONE is a function whose
2255 body is to be set to a copy of FN, mapping argument declarations according
2256 to the ARG_MAP splay_tree. */
2257
2258 void
2259 clone_body (tree clone, tree fn, void *arg_map)
2260 {
2261 inline_data id;
2262
2263 /* Clone the body, as if we were making an inline call. But, remap the
2264 parameters in the callee to the parameters of caller. */
2265 memset (&id, 0, sizeof (id));
2266 id.caller = clone;
2267 id.callee = fn;
2268 id.callee_cfun = DECL_STRUCT_FUNCTION (fn);
2269 id.decl_map = (splay_tree)arg_map;
2270
2271 /* Cloning is treated slightly differently from inlining. Set
2272 CLONING_P so that it's clear which operation we're performing. */
2273 id.cloning_p = true;
2274
2275 /* We're not inside any EH region. */
2276 id.eh_region = -1;
2277
2278 /* Actually copy the body. */
2279 append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
2280 }
2281
2282 /* Save duplicate body in FN. MAP is used to pass around splay tree
2283 used to update arguments in restore_body. */
2284
2285 /* Make and return duplicate of body in FN. Put copies of DECL_ARGUMENTS
2286 in *arg_copy and of the static chain, if any, in *sc_copy. */
2287
2288 void
2289 save_body (tree fn, tree *arg_copy, tree *sc_copy)
2290 {
2291 inline_data id;
2292 tree newdecl, *parg;
2293 basic_block fn_entry_block;
2294 tree t_step;
2295
2296 memset (&id, 0, sizeof (id));
2297 id.callee = fn;
2298 id.callee_cfun = DECL_STRUCT_FUNCTION (fn);
2299 id.caller = fn;
2300 id.node = cgraph_node (fn);
2301 id.saving_p = true;
2302 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2303 *arg_copy = DECL_ARGUMENTS (fn);
2304
2305 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
2306 {
2307 tree new = copy_node (*parg);
2308
2309 lang_hooks.dup_lang_specific_decl (new);
2310 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
2311 insert_decl_map (&id, *parg, new);
2312 TREE_CHAIN (new) = TREE_CHAIN (*parg);
2313 *parg = new;
2314 }
2315
2316 *sc_copy = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2317 if (*sc_copy)
2318 {
2319 tree new = copy_node (*sc_copy);
2320
2321 lang_hooks.dup_lang_specific_decl (new);
2322 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*sc_copy);
2323 insert_decl_map (&id, *sc_copy, new);
2324 TREE_CHAIN (new) = TREE_CHAIN (*sc_copy);
2325 *sc_copy = new;
2326 }
2327
2328 /* We're not inside any EH region. */
2329 id.eh_region = -1;
2330
2331 insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
2332
2333 DECL_STRUCT_FUNCTION (fn)->saved_blocks
2334 = remap_blocks (DECL_INITIAL (fn), &id);
2335 for (t_step = id.callee_cfun->unexpanded_var_list;
2336 t_step;
2337 t_step = TREE_CHAIN (t_step))
2338 {
2339 tree var = TREE_VALUE (t_step);
2340 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2341 cfun->saved_unexpanded_var_list
2342 = tree_cons (NULL_TREE, var, cfun->saved_unexpanded_var_list);
2343 else
2344 cfun->saved_unexpanded_var_list
2345 = tree_cons (NULL_TREE, remap_decl (var, &id),
2346 cfun->saved_unexpanded_var_list);
2347 }
2348
2349 /* Actually copy the body, including a new (struct function *) and CFG.
2350 EH info is also duplicated so its labels point into the copied
2351 CFG, not the original. */
2352 fn_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fn));
2353 newdecl = copy_body (&id, fn_entry_block->count, fn_entry_block->frequency,
2354 NULL, NULL);
2355 DECL_STRUCT_FUNCTION (fn)->saved_cfg = DECL_STRUCT_FUNCTION (newdecl)->cfg;
2356 DECL_STRUCT_FUNCTION (fn)->saved_eh = DECL_STRUCT_FUNCTION (newdecl)->eh;
2357
2358 /* Clean up. */
2359 splay_tree_delete (id.decl_map);
2360 }
2361
2362 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2363
2364 tree
2365 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2366 {
2367 enum tree_code code = TREE_CODE (*tp);
2368
2369 /* We make copies of most nodes. */
2370 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
2371 || code == TREE_LIST
2372 || code == TREE_VEC
2373 || code == TYPE_DECL)
2374 {
2375 /* Because the chain gets clobbered when we make a copy, we save it
2376 here. */
2377 tree chain = TREE_CHAIN (*tp);
2378 tree new;
2379
2380 /* Copy the node. */
2381 new = copy_node (*tp);
2382
2383 /* Propagate mudflap marked-ness. */
2384 if (flag_mudflap && mf_marked_p (*tp))
2385 mf_mark (new);
2386
2387 *tp = new;
2388
2389 /* Now, restore the chain, if appropriate. That will cause
2390 walk_tree to walk into the chain as well. */
2391 if (code == PARM_DECL || code == TREE_LIST)
2392 TREE_CHAIN (*tp) = chain;
2393
2394 /* For now, we don't update BLOCKs when we make copies. So, we
2395 have to nullify all BIND_EXPRs. */
2396 if (TREE_CODE (*tp) == BIND_EXPR)
2397 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2398 }
2399 else if (code == CONSTRUCTOR)
2400 {
2401 /* CONSTRUCTOR nodes need special handling because
2402 we need to duplicate the vector of elements. */
2403 tree new;
2404
2405 new = copy_node (*tp);
2406
2407 /* Propagate mudflap marked-ness. */
2408 if (flag_mudflap && mf_marked_p (*tp))
2409 mf_mark (new);
2410
2411 CONSTRUCTOR_ELTS (new) = VEC_copy (constructor_elt, gc,
2412 CONSTRUCTOR_ELTS (*tp));
2413 *tp = new;
2414 }
2415 else if (TREE_CODE_CLASS (code) == tcc_type)
2416 *walk_subtrees = 0;
2417 else if (TREE_CODE_CLASS (code) == tcc_declaration)
2418 *walk_subtrees = 0;
2419 else if (TREE_CODE_CLASS (code) == tcc_constant)
2420 *walk_subtrees = 0;
2421 else
2422 gcc_assert (code != STATEMENT_LIST);
2423 return NULL_TREE;
2424 }
2425
2426 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2427 information indicating to what new SAVE_EXPR this one should be mapped,
2428 use that one. Otherwise, create a new node and enter it in ST. FN is
2429 the function into which the copy will be placed. */
2430
2431 static void
2432 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2433 {
2434 splay_tree st = (splay_tree) st_;
2435 splay_tree_node n;
2436 tree t;
2437
2438 /* See if we already encountered this SAVE_EXPR. */
2439 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2440
2441 /* If we didn't already remap this SAVE_EXPR, do so now. */
2442 if (!n)
2443 {
2444 t = copy_node (*tp);
2445
2446 /* Remember this SAVE_EXPR. */
2447 splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2448 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2449 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2450 }
2451 else
2452 {
2453 /* We've already walked into this SAVE_EXPR; don't do it again. */
2454 *walk_subtrees = 0;
2455 t = (tree) n->value;
2456 }
2457
2458 /* Replace this SAVE_EXPR with the copy. */
2459 *tp = t;
2460 }
2461
2462 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
2463 copies the declaration and enters it in the splay_tree in DATA (which is
2464 really an `inline_data *'). */
2465
2466 static tree
2467 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2468 void *data)
2469 {
2470 inline_data *id = (inline_data *) data;
2471
2472 /* Don't walk into types. */
2473 if (TYPE_P (*tp))
2474 *walk_subtrees = 0;
2475
2476 else if (TREE_CODE (*tp) == LABEL_EXPR)
2477 {
2478 tree decl = TREE_OPERAND (*tp, 0);
2479
2480 /* Copy the decl and remember the copy. */
2481 insert_decl_map (id, decl,
2482 copy_decl_for_inlining (decl, DECL_CONTEXT (decl),
2483 DECL_CONTEXT (decl)));
2484 }
2485
2486 return NULL_TREE;
2487 }
2488
2489 /* Perform any modifications to EXPR required when it is unsaved. Does
2490 not recurse into EXPR's subtrees. */
2491
2492 static void
2493 unsave_expr_1 (tree expr)
2494 {
2495 switch (TREE_CODE (expr))
2496 {
2497 case TARGET_EXPR:
2498 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
2499 It's OK for this to happen if it was part of a subtree that
2500 isn't immediately expanded, such as operand 2 of another
2501 TARGET_EXPR. */
2502 if (TREE_OPERAND (expr, 1))
2503 break;
2504
2505 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
2506 TREE_OPERAND (expr, 3) = NULL_TREE;
2507 break;
2508
2509 default:
2510 break;
2511 }
2512 }
2513
2514 /* Called via walk_tree when an expression is unsaved. Using the
2515 splay_tree pointed to by ST (which is really a `splay_tree'),
2516 remaps all local declarations to appropriate replacements. */
2517
2518 static tree
2519 unsave_r (tree *tp, int *walk_subtrees, void *data)
2520 {
2521 inline_data *id = (inline_data *) data;
2522 splay_tree st = id->decl_map;
2523 splay_tree_node n;
2524
2525 /* Only a local declaration (variable or label). */
2526 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
2527 || TREE_CODE (*tp) == LABEL_DECL)
2528 {
2529 /* Lookup the declaration. */
2530 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2531
2532 /* If it's there, remap it. */
2533 if (n)
2534 *tp = (tree) n->value;
2535 }
2536
2537 else if (TREE_CODE (*tp) == STATEMENT_LIST)
2538 copy_statement_list (tp);
2539 else if (TREE_CODE (*tp) == BIND_EXPR)
2540 copy_bind_expr (tp, walk_subtrees, id);
2541 else if (TREE_CODE (*tp) == SAVE_EXPR)
2542 remap_save_expr (tp, st, walk_subtrees);
2543 else
2544 {
2545 copy_tree_r (tp, walk_subtrees, NULL);
2546
2547 /* Do whatever unsaving is required. */
2548 unsave_expr_1 (*tp);
2549 }
2550
2551 /* Keep iterating. */
2552 return NULL_TREE;
2553 }
2554
2555 /* Copies everything in EXPR and replaces variables, labels
2556 and SAVE_EXPRs local to EXPR. */
2557
2558 tree
2559 unsave_expr_now (tree expr)
2560 {
2561 inline_data id;
2562
2563 /* There's nothing to do for NULL_TREE. */
2564 if (expr == 0)
2565 return expr;
2566
2567 /* Set up ID. */
2568 memset (&id, 0, sizeof (id));
2569 id.callee = current_function_decl;
2570 id.caller = current_function_decl;
2571 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2572
2573 /* Walk the tree once to find local labels. */
2574 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
2575
2576 /* Walk the tree again, copying, remapping, and unsaving. */
2577 walk_tree (&expr, unsave_r, &id, NULL);
2578
2579 /* Clean up. */
2580 splay_tree_delete (id.decl_map);
2581
2582 return expr;
2583 }
2584
2585 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
2586
2587 static tree
2588 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
2589 {
2590 if (*tp == data)
2591 return (tree) data;
2592 else
2593 return NULL;
2594 }
2595
2596 bool
2597 debug_find_tree (tree top, tree search)
2598 {
2599 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
2600 }
2601
2602
2603 /* Declare the variables created by the inliner. Add all the variables in
2604 VARS to BIND_EXPR. */
2605
2606 static void
2607 declare_inline_vars (tree block, tree vars)
2608 {
2609 tree t;
2610 for (t = vars; t; t = TREE_CHAIN (t))
2611 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
2612
2613 if (block)
2614 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
2615 }
2616
2617 /* Returns true if we're inlining. */
2618 static inline bool
2619 inlining_p (inline_data *id)
2620 {
2621 return (!id->saving_p && !id->cloning_p);
2622 }