2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 #include "diagnostic-core.h"
27 #include "tree-inline.h"
31 #include "insn-config.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-iterator.h"
38 #include "gimple-iterator.h"
39 #include "gimple-walk.h"
40 #include "gimple-ssa.h"
42 #include "tree-phinodes.h"
43 #include "ssa-iterators.h"
44 #include "tree-ssanames.h"
45 #include "tree-into-ssa.h"
49 #include "tree-pretty-print.h"
52 #include "pointer-set.h"
54 #include "value-prof.h"
55 #include "tree-pass.h"
59 #include "rtl.h" /* FIXME: For asm_str_count. */
61 /* I'm not real happy about this, but we need to handle gimple and
64 /* Inlining, Cloning, Versioning, Parallelization
66 Inlining: a function body is duplicated, but the PARM_DECLs are
67 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
68 MODIFY_EXPRs that store to a dedicated returned-value variable.
69 The duplicated eh_region info of the copy will later be appended
70 to the info for the caller; the eh_region info in copied throwing
71 statements and RESX statements are adjusted accordingly.
73 Cloning: (only in C++) We have one body for a con/de/structor, and
74 multiple function decls, each with a unique parameter list.
75 Duplicate the body, using the given splay tree; some parameters
76 will become constants (like 0 or 1).
78 Versioning: a function body is duplicated and the result is a new
79 function rather than into blocks of an existing function as with
80 inlining. Some parameters will become constants.
82 Parallelization: a region of a function is duplicated resulting in
83 a new function. Variables may be replaced with complex expressions
84 to enable shared variable semantics.
86 All of these will simultaneously lookup any callgraph edges. If
87 we're going to inline the duplicated function body, and the given
88 function has some cloned callgraph nodes (one for each place this
89 function will be inlined) those callgraph edges will be duplicated.
90 If we're cloning the body, those callgraph edges will be
91 updated to point into the new body. (Note that the original
92 callgraph node and edge list will not be altered.)
94 See the CALL_EXPR handling case in copy_tree_body_r (). */
98 o In order to make inlining-on-trees work, we pessimized
99 function-local static constants. In particular, they are now
100 always output, even when not addressed. Fix this by treating
101 function-local static constants just like global static
102 constants; the back-end already knows not to output them if they
105 o Provide heuristics to clamp inlining of recursive template
109 /* Weights that estimate_num_insns uses to estimate the size of the
112 eni_weights eni_size_weights
;
114 /* Weights that estimate_num_insns uses to estimate the time necessary
115 to execute the produced code. */
117 eni_weights eni_time_weights
;
121 static tree
declare_return_variable (copy_body_data
*, tree
, tree
, basic_block
);
122 static void remap_block (tree
*, copy_body_data
*);
123 static void copy_bind_expr (tree
*, int *, copy_body_data
*);
124 static void declare_inline_vars (tree
, tree
);
125 static void remap_save_expr (tree
*, void *, int *);
126 static void prepend_lexical_block (tree current_block
, tree new_block
);
127 static tree
copy_decl_to_var (tree
, copy_body_data
*);
128 static tree
copy_result_decl_to_var (tree
, copy_body_data
*);
129 static tree
copy_decl_maybe_to_var (tree
, copy_body_data
*);
130 static gimple
remap_gimple_stmt (gimple
, copy_body_data
*);
131 static bool delete_unreachable_blocks_update_callgraph (copy_body_data
*id
);
133 /* Insert a tree->tree mapping for ID. Despite the name suggests
134 that the trees should be variables, it is used for more than that. */
137 insert_decl_map (copy_body_data
*id
, tree key
, tree value
)
139 *pointer_map_insert (id
->decl_map
, key
) = value
;
141 /* Always insert an identity map as well. If we see this same new
142 node again, we won't want to duplicate it a second time. */
144 *pointer_map_insert (id
->decl_map
, value
) = value
;
147 /* Insert a tree->tree mapping for ID. This is only used for
151 insert_debug_decl_map (copy_body_data
*id
, tree key
, tree value
)
153 if (!gimple_in_ssa_p (id
->src_cfun
))
156 if (!MAY_HAVE_DEBUG_STMTS
)
159 if (!target_for_debug_bind (key
))
162 gcc_assert (TREE_CODE (key
) == PARM_DECL
);
163 gcc_assert (TREE_CODE (value
) == VAR_DECL
);
166 id
->debug_map
= pointer_map_create ();
168 *pointer_map_insert (id
->debug_map
, key
) = value
;
171 /* If nonzero, we're remapping the contents of inlined debug
172 statements. If negative, an error has occurred, such as a
173 reference to a variable that isn't available in the inlined
175 static int processing_debug_stmt
= 0;
177 /* Construct new SSA name for old NAME. ID is the inline context. */
180 remap_ssa_name (tree name
, copy_body_data
*id
)
185 gcc_assert (TREE_CODE (name
) == SSA_NAME
);
187 n
= (tree
*) pointer_map_contains (id
->decl_map
, name
);
189 return unshare_expr (*n
);
191 if (processing_debug_stmt
)
193 if (SSA_NAME_IS_DEFAULT_DEF (name
)
194 && TREE_CODE (SSA_NAME_VAR (name
)) == PARM_DECL
195 && id
->entry_bb
== NULL
196 && single_succ_p (ENTRY_BLOCK_PTR
))
198 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
200 gimple_stmt_iterator gsi
;
201 tree val
= SSA_NAME_VAR (name
);
203 n
= (tree
*) pointer_map_contains (id
->decl_map
, val
);
206 if (TREE_CODE (val
) != PARM_DECL
)
208 processing_debug_stmt
= -1;
211 def_temp
= gimple_build_debug_source_bind (vexpr
, val
, NULL
);
212 DECL_ARTIFICIAL (vexpr
) = 1;
213 TREE_TYPE (vexpr
) = TREE_TYPE (name
);
214 DECL_MODE (vexpr
) = DECL_MODE (SSA_NAME_VAR (name
));
215 gsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR
));
216 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
220 processing_debug_stmt
= -1;
224 /* Remap anonymous SSA names or SSA names of anonymous decls. */
225 var
= SSA_NAME_VAR (name
);
227 || (!SSA_NAME_IS_DEFAULT_DEF (name
)
228 && TREE_CODE (var
) == VAR_DECL
229 && !VAR_DECL_IS_VIRTUAL_OPERAND (var
)
230 && DECL_ARTIFICIAL (var
)
231 && DECL_IGNORED_P (var
)
232 && !DECL_NAME (var
)))
234 struct ptr_info_def
*pi
;
235 new_tree
= make_ssa_name (remap_type (TREE_TYPE (name
), id
), NULL
);
236 if (!var
&& SSA_NAME_IDENTIFIER (name
))
237 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree
, SSA_NAME_IDENTIFIER (name
));
238 insert_decl_map (id
, name
, new_tree
);
239 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree
)
240 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
);
241 /* At least IPA points-to info can be directly transferred. */
242 if (id
->src_cfun
->gimple_df
243 && id
->src_cfun
->gimple_df
->ipa_pta
244 && (pi
= SSA_NAME_PTR_INFO (name
))
247 struct ptr_info_def
*new_pi
= get_ptr_info (new_tree
);
253 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
255 new_tree
= remap_decl (var
, id
);
257 /* We might've substituted constant or another SSA_NAME for
260 Replace the SSA name representing RESULT_DECL by variable during
261 inlining: this saves us from need to introduce PHI node in a case
262 return value is just partly initialized. */
263 if ((TREE_CODE (new_tree
) == VAR_DECL
|| TREE_CODE (new_tree
) == PARM_DECL
)
264 && (!SSA_NAME_VAR (name
)
265 || TREE_CODE (SSA_NAME_VAR (name
)) != RESULT_DECL
266 || !id
->transform_return_to_modify
))
268 struct ptr_info_def
*pi
;
269 new_tree
= make_ssa_name (new_tree
, NULL
);
270 insert_decl_map (id
, name
, new_tree
);
271 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree
)
272 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
);
273 /* At least IPA points-to info can be directly transferred. */
274 if (id
->src_cfun
->gimple_df
275 && id
->src_cfun
->gimple_df
->ipa_pta
276 && (pi
= SSA_NAME_PTR_INFO (name
))
279 struct ptr_info_def
*new_pi
= get_ptr_info (new_tree
);
282 if (SSA_NAME_IS_DEFAULT_DEF (name
))
284 /* By inlining function having uninitialized variable, we might
285 extend the lifetime (variable might get reused). This cause
286 ICE in the case we end up extending lifetime of SSA name across
287 abnormal edge, but also increase register pressure.
289 We simply initialize all uninitialized vars by 0 except
290 for case we are inlining to very first BB. We can avoid
291 this for all BBs that are not inside strongly connected
292 regions of the CFG, but this is expensive to test. */
294 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
)
295 && (!SSA_NAME_VAR (name
)
296 || TREE_CODE (SSA_NAME_VAR (name
)) != PARM_DECL
)
297 && (id
->entry_bb
!= EDGE_SUCC (ENTRY_BLOCK_PTR
, 0)->dest
298 || EDGE_COUNT (id
->entry_bb
->preds
) != 1))
300 gimple_stmt_iterator gsi
= gsi_last_bb (id
->entry_bb
);
302 tree zero
= build_zero_cst (TREE_TYPE (new_tree
));
304 init_stmt
= gimple_build_assign (new_tree
, zero
);
305 gsi_insert_after (&gsi
, init_stmt
, GSI_NEW_STMT
);
306 SSA_NAME_IS_DEFAULT_DEF (new_tree
) = 0;
310 SSA_NAME_DEF_STMT (new_tree
) = gimple_build_nop ();
311 set_ssa_default_def (cfun
, SSA_NAME_VAR (new_tree
), new_tree
);
316 insert_decl_map (id
, name
, new_tree
);
320 /* Remap DECL during the copying of the BLOCK tree for the function. */
323 remap_decl (tree decl
, copy_body_data
*id
)
327 /* We only remap local variables in the current function. */
329 /* See if we have remapped this declaration. */
331 n
= (tree
*) pointer_map_contains (id
->decl_map
, decl
);
333 if (!n
&& processing_debug_stmt
)
335 processing_debug_stmt
= -1;
339 /* If we didn't already have an equivalent for this declaration,
343 /* Make a copy of the variable or label. */
344 tree t
= id
->copy_decl (decl
, id
);
346 /* Remember it, so that if we encounter this local entity again
347 we can reuse this copy. Do this early because remap_type may
348 need this decl for TYPE_STUB_DECL. */
349 insert_decl_map (id
, decl
, t
);
354 /* Remap types, if necessary. */
355 TREE_TYPE (t
) = remap_type (TREE_TYPE (t
), id
);
356 if (TREE_CODE (t
) == TYPE_DECL
)
357 DECL_ORIGINAL_TYPE (t
) = remap_type (DECL_ORIGINAL_TYPE (t
), id
);
359 /* Remap sizes as necessary. */
360 walk_tree (&DECL_SIZE (t
), copy_tree_body_r
, id
, NULL
);
361 walk_tree (&DECL_SIZE_UNIT (t
), copy_tree_body_r
, id
, NULL
);
363 /* If fields, do likewise for offset and qualifier. */
364 if (TREE_CODE (t
) == FIELD_DECL
)
366 walk_tree (&DECL_FIELD_OFFSET (t
), copy_tree_body_r
, id
, NULL
);
367 if (TREE_CODE (DECL_CONTEXT (t
)) == QUAL_UNION_TYPE
)
368 walk_tree (&DECL_QUALIFIER (t
), copy_tree_body_r
, id
, NULL
);
374 if (id
->do_not_unshare
)
377 return unshare_expr (*n
);
381 remap_type_1 (tree type
, copy_body_data
*id
)
385 /* We do need a copy. build and register it now. If this is a pointer or
386 reference type, remap the designated type and make a new pointer or
388 if (TREE_CODE (type
) == POINTER_TYPE
)
390 new_tree
= build_pointer_type_for_mode (remap_type (TREE_TYPE (type
), id
),
392 TYPE_REF_CAN_ALIAS_ALL (type
));
393 if (TYPE_ATTRIBUTES (type
) || TYPE_QUALS (type
))
394 new_tree
= build_type_attribute_qual_variant (new_tree
,
395 TYPE_ATTRIBUTES (type
),
397 insert_decl_map (id
, type
, new_tree
);
400 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
402 new_tree
= build_reference_type_for_mode (remap_type (TREE_TYPE (type
), id
),
404 TYPE_REF_CAN_ALIAS_ALL (type
));
405 if (TYPE_ATTRIBUTES (type
) || TYPE_QUALS (type
))
406 new_tree
= build_type_attribute_qual_variant (new_tree
,
407 TYPE_ATTRIBUTES (type
),
409 insert_decl_map (id
, type
, new_tree
);
413 new_tree
= copy_node (type
);
415 insert_decl_map (id
, type
, new_tree
);
417 /* This is a new type, not a copy of an old type. Need to reassociate
418 variants. We can handle everything except the main variant lazily. */
419 t
= TYPE_MAIN_VARIANT (type
);
422 t
= remap_type (t
, id
);
423 TYPE_MAIN_VARIANT (new_tree
) = t
;
424 TYPE_NEXT_VARIANT (new_tree
) = TYPE_NEXT_VARIANT (t
);
425 TYPE_NEXT_VARIANT (t
) = new_tree
;
429 TYPE_MAIN_VARIANT (new_tree
) = new_tree
;
430 TYPE_NEXT_VARIANT (new_tree
) = NULL
;
433 if (TYPE_STUB_DECL (type
))
434 TYPE_STUB_DECL (new_tree
) = remap_decl (TYPE_STUB_DECL (type
), id
);
436 /* Lazily create pointer and reference types. */
437 TYPE_POINTER_TO (new_tree
) = NULL
;
438 TYPE_REFERENCE_TO (new_tree
) = NULL
;
440 switch (TREE_CODE (new_tree
))
444 case FIXED_POINT_TYPE
:
447 t
= TYPE_MIN_VALUE (new_tree
);
448 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
449 walk_tree (&TYPE_MIN_VALUE (new_tree
), copy_tree_body_r
, id
, NULL
);
451 t
= TYPE_MAX_VALUE (new_tree
);
452 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
453 walk_tree (&TYPE_MAX_VALUE (new_tree
), copy_tree_body_r
, id
, NULL
);
457 TREE_TYPE (new_tree
) = remap_type (TREE_TYPE (new_tree
), id
);
458 walk_tree (&TYPE_ARG_TYPES (new_tree
), copy_tree_body_r
, id
, NULL
);
462 TREE_TYPE (new_tree
) = remap_type (TREE_TYPE (new_tree
), id
);
463 TYPE_DOMAIN (new_tree
) = remap_type (TYPE_DOMAIN (new_tree
), id
);
468 case QUAL_UNION_TYPE
:
472 for (f
= TYPE_FIELDS (new_tree
); f
; f
= DECL_CHAIN (f
))
474 t
= remap_decl (f
, id
);
475 DECL_CONTEXT (t
) = new_tree
;
479 TYPE_FIELDS (new_tree
) = nreverse (nf
);
485 /* Shouldn't have been thought variable sized. */
489 walk_tree (&TYPE_SIZE (new_tree
), copy_tree_body_r
, id
, NULL
);
490 walk_tree (&TYPE_SIZE_UNIT (new_tree
), copy_tree_body_r
, id
, NULL
);
496 remap_type (tree type
, copy_body_data
*id
)
504 /* See if we have remapped this type. */
505 node
= (tree
*) pointer_map_contains (id
->decl_map
, type
);
509 /* The type only needs remapping if it's variably modified. */
510 if (! variably_modified_type_p (type
, id
->src_fn
))
512 insert_decl_map (id
, type
, type
);
516 id
->remapping_type_depth
++;
517 tmp
= remap_type_1 (type
, id
);
518 id
->remapping_type_depth
--;
523 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
526 can_be_nonlocal (tree decl
, copy_body_data
*id
)
528 /* We can not duplicate function decls. */
529 if (TREE_CODE (decl
) == FUNCTION_DECL
)
532 /* Local static vars must be non-local or we get multiple declaration
534 if (TREE_CODE (decl
) == VAR_DECL
535 && !auto_var_in_fn_p (decl
, id
->src_fn
))
542 remap_decls (tree decls
, vec
<tree
, va_gc
> **nonlocalized_list
,
546 tree new_decls
= NULL_TREE
;
548 /* Remap its variables. */
549 for (old_var
= decls
; old_var
; old_var
= DECL_CHAIN (old_var
))
553 if (can_be_nonlocal (old_var
, id
))
555 /* We need to add this variable to the local decls as otherwise
556 nothing else will do so. */
557 if (TREE_CODE (old_var
) == VAR_DECL
558 && ! DECL_EXTERNAL (old_var
))
559 add_local_decl (cfun
, old_var
);
560 if ((!optimize
|| debug_info_level
> DINFO_LEVEL_TERSE
)
561 && !DECL_IGNORED_P (old_var
)
562 && nonlocalized_list
)
563 vec_safe_push (*nonlocalized_list
, old_var
);
567 /* Remap the variable. */
568 new_var
= remap_decl (old_var
, id
);
570 /* If we didn't remap this variable, we can't mess with its
571 TREE_CHAIN. If we remapped this variable to the return slot, it's
572 already declared somewhere else, so don't declare it here. */
574 if (new_var
== id
->retvar
)
578 if ((!optimize
|| debug_info_level
> DINFO_LEVEL_TERSE
)
579 && !DECL_IGNORED_P (old_var
)
580 && nonlocalized_list
)
581 vec_safe_push (*nonlocalized_list
, old_var
);
585 gcc_assert (DECL_P (new_var
));
586 DECL_CHAIN (new_var
) = new_decls
;
589 /* Also copy value-expressions. */
590 if (TREE_CODE (new_var
) == VAR_DECL
591 && DECL_HAS_VALUE_EXPR_P (new_var
))
593 tree tem
= DECL_VALUE_EXPR (new_var
);
594 bool old_regimplify
= id
->regimplify
;
595 id
->remapping_type_depth
++;
596 walk_tree (&tem
, copy_tree_body_r
, id
, NULL
);
597 id
->remapping_type_depth
--;
598 id
->regimplify
= old_regimplify
;
599 SET_DECL_VALUE_EXPR (new_var
, tem
);
604 return nreverse (new_decls
);
607 /* Copy the BLOCK to contain remapped versions of the variables
608 therein. And hook the new block into the block-tree. */
611 remap_block (tree
*block
, copy_body_data
*id
)
616 /* Make the new block. */
618 new_block
= make_node (BLOCK
);
619 TREE_USED (new_block
) = TREE_USED (old_block
);
620 BLOCK_ABSTRACT_ORIGIN (new_block
) = old_block
;
621 BLOCK_SOURCE_LOCATION (new_block
) = BLOCK_SOURCE_LOCATION (old_block
);
622 BLOCK_NONLOCALIZED_VARS (new_block
)
623 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block
));
626 /* Remap its variables. */
627 BLOCK_VARS (new_block
) = remap_decls (BLOCK_VARS (old_block
),
628 &BLOCK_NONLOCALIZED_VARS (new_block
),
631 if (id
->transform_lang_insert_block
)
632 id
->transform_lang_insert_block (new_block
);
634 /* Remember the remapped block. */
635 insert_decl_map (id
, old_block
, new_block
);
638 /* Copy the whole block tree and root it in id->block. */
640 remap_blocks (tree block
, copy_body_data
*id
)
643 tree new_tree
= block
;
648 remap_block (&new_tree
, id
);
649 gcc_assert (new_tree
!= block
);
650 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
651 prepend_lexical_block (new_tree
, remap_blocks (t
, id
));
652 /* Blocks are in arbitrary order, but make things slightly prettier and do
653 not swap order when producing a copy. */
654 BLOCK_SUBBLOCKS (new_tree
) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree
));
658 /* Remap the block tree rooted at BLOCK to nothing. */
660 remap_blocks_to_null (tree block
, copy_body_data
*id
)
663 insert_decl_map (id
, block
, NULL_TREE
);
664 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
665 remap_blocks_to_null (t
, id
);
669 copy_statement_list (tree
*tp
)
671 tree_stmt_iterator oi
, ni
;
674 new_tree
= alloc_stmt_list ();
675 ni
= tsi_start (new_tree
);
676 oi
= tsi_start (*tp
);
677 TREE_TYPE (new_tree
) = TREE_TYPE (*tp
);
680 for (; !tsi_end_p (oi
); tsi_next (&oi
))
682 tree stmt
= tsi_stmt (oi
);
683 if (TREE_CODE (stmt
) == STATEMENT_LIST
)
684 /* This copy is not redundant; tsi_link_after will smash this
685 STATEMENT_LIST into the end of the one we're building, and we
686 don't want to do that with the original. */
687 copy_statement_list (&stmt
);
688 tsi_link_after (&ni
, stmt
, TSI_CONTINUE_LINKING
);
693 copy_bind_expr (tree
*tp
, int *walk_subtrees
, copy_body_data
*id
)
695 tree block
= BIND_EXPR_BLOCK (*tp
);
696 /* Copy (and replace) the statement. */
697 copy_tree_r (tp
, walk_subtrees
, NULL
);
700 remap_block (&block
, id
);
701 BIND_EXPR_BLOCK (*tp
) = block
;
704 if (BIND_EXPR_VARS (*tp
))
705 /* This will remap a lot of the same decls again, but this should be
707 BIND_EXPR_VARS (*tp
) = remap_decls (BIND_EXPR_VARS (*tp
), NULL
, id
);
711 /* Create a new gimple_seq by remapping all the statements in BODY
712 using the inlining information in ID. */
715 remap_gimple_seq (gimple_seq body
, copy_body_data
*id
)
717 gimple_stmt_iterator si
;
718 gimple_seq new_body
= NULL
;
720 for (si
= gsi_start (body
); !gsi_end_p (si
); gsi_next (&si
))
722 gimple new_stmt
= remap_gimple_stmt (gsi_stmt (si
), id
);
723 gimple_seq_add_stmt (&new_body
, new_stmt
);
730 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
731 block using the mapping information in ID. */
734 copy_gimple_bind (gimple stmt
, copy_body_data
*id
)
737 tree new_block
, new_vars
;
738 gimple_seq body
, new_body
;
740 /* Copy the statement. Note that we purposely don't use copy_stmt
741 here because we need to remap statements as we copy. */
742 body
= gimple_bind_body (stmt
);
743 new_body
= remap_gimple_seq (body
, id
);
745 new_block
= gimple_bind_block (stmt
);
747 remap_block (&new_block
, id
);
749 /* This will remap a lot of the same decls again, but this should be
751 new_vars
= gimple_bind_vars (stmt
);
753 new_vars
= remap_decls (new_vars
, NULL
, id
);
755 new_bind
= gimple_build_bind (new_vars
, new_body
, new_block
);
760 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
765 if (TREE_CODE (decl
) == SSA_NAME
)
767 decl
= SSA_NAME_VAR (decl
);
772 return (TREE_CODE (decl
) == PARM_DECL
);
775 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
776 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
777 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
778 recursing into the children nodes of *TP. */
781 remap_gimple_op_r (tree
*tp
, int *walk_subtrees
, void *data
)
783 struct walk_stmt_info
*wi_p
= (struct walk_stmt_info
*) data
;
784 copy_body_data
*id
= (copy_body_data
*) wi_p
->info
;
785 tree fn
= id
->src_fn
;
787 if (TREE_CODE (*tp
) == SSA_NAME
)
789 *tp
= remap_ssa_name (*tp
, id
);
793 else if (auto_var_in_fn_p (*tp
, fn
))
795 /* Local variables and labels need to be replaced by equivalent
796 variables. We don't want to copy static variables; there's
797 only one of those, no matter how many times we inline the
798 containing function. Similarly for globals from an outer
802 /* Remap the declaration. */
803 new_decl
= remap_decl (*tp
, id
);
804 gcc_assert (new_decl
);
805 /* Replace this variable with the copy. */
806 STRIP_TYPE_NOPS (new_decl
);
807 /* ??? The C++ frontend uses void * pointer zero to initialize
808 any other type. This confuses the middle-end type verification.
809 As cloned bodies do not go through gimplification again the fixup
810 there doesn't trigger. */
811 if (TREE_CODE (new_decl
) == INTEGER_CST
812 && !useless_type_conversion_p (TREE_TYPE (*tp
), TREE_TYPE (new_decl
)))
813 new_decl
= fold_convert (TREE_TYPE (*tp
), new_decl
);
817 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
819 else if (TREE_CODE (*tp
) == SAVE_EXPR
)
821 else if (TREE_CODE (*tp
) == LABEL_DECL
822 && (!DECL_CONTEXT (*tp
)
823 || decl_function_context (*tp
) == id
->src_fn
))
824 /* These may need to be remapped for EH handling. */
825 *tp
= remap_decl (*tp
, id
);
826 else if (TREE_CODE (*tp
) == FIELD_DECL
)
828 /* If the enclosing record type is variably_modified_type_p, the field
829 has already been remapped. Otherwise, it need not be. */
830 tree
*n
= (tree
*) pointer_map_contains (id
->decl_map
, *tp
);
835 else if (TYPE_P (*tp
))
836 /* Types may need remapping as well. */
837 *tp
= remap_type (*tp
, id
);
838 else if (CONSTANT_CLASS_P (*tp
))
840 /* If this is a constant, we have to copy the node iff the type
841 will be remapped. copy_tree_r will not copy a constant. */
842 tree new_type
= remap_type (TREE_TYPE (*tp
), id
);
844 if (new_type
== TREE_TYPE (*tp
))
847 else if (TREE_CODE (*tp
) == INTEGER_CST
)
848 *tp
= build_int_cst_wide (new_type
, TREE_INT_CST_LOW (*tp
),
849 TREE_INT_CST_HIGH (*tp
));
852 *tp
= copy_node (*tp
);
853 TREE_TYPE (*tp
) = new_type
;
858 /* Otherwise, just copy the node. Note that copy_tree_r already
859 knows not to copy VAR_DECLs, etc., so this is safe. */
861 if (TREE_CODE (*tp
) == MEM_REF
)
863 /* We need to re-canonicalize MEM_REFs from inline substitutions
864 that can happen when a pointer argument is an ADDR_EXPR.
865 Recurse here manually to allow that. */
866 tree ptr
= TREE_OPERAND (*tp
, 0);
867 tree type
= remap_type (TREE_TYPE (*tp
), id
);
869 walk_tree (&ptr
, remap_gimple_op_r
, data
, NULL
);
870 *tp
= fold_build2 (MEM_REF
, type
, ptr
, TREE_OPERAND (*tp
, 1));
871 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
872 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
873 TREE_NO_WARNING (*tp
) = TREE_NO_WARNING (old
);
874 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
875 remapped a parameter as the property might be valid only
876 for the parameter itself. */
877 if (TREE_THIS_NOTRAP (old
)
878 && (!is_parm (TREE_OPERAND (old
, 0))
879 || (!id
->transform_parameter
&& is_parm (ptr
))))
880 TREE_THIS_NOTRAP (*tp
) = 1;
885 /* Here is the "usual case". Copy this tree node, and then
886 tweak some special cases. */
887 copy_tree_r (tp
, walk_subtrees
, NULL
);
889 if (TREE_CODE (*tp
) != OMP_CLAUSE
)
890 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
892 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
894 /* The copied TARGET_EXPR has never been expanded, even if the
895 original node was expanded already. */
896 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
897 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
899 else if (TREE_CODE (*tp
) == ADDR_EXPR
)
901 /* Variable substitution need not be simple. In particular,
902 the MEM_REF substitution above. Make sure that
903 TREE_CONSTANT and friends are up-to-date. */
904 int invariant
= is_gimple_min_invariant (*tp
);
905 walk_tree (&TREE_OPERAND (*tp
, 0), remap_gimple_op_r
, data
, NULL
);
906 recompute_tree_invariant_for_addr_expr (*tp
);
908 /* If this used to be invariant, but is not any longer,
909 then regimplification is probably needed. */
910 if (invariant
&& !is_gimple_min_invariant (*tp
))
911 id
->regimplify
= true;
917 /* Update the TREE_BLOCK for the cloned expr. */
920 tree new_block
= id
->remapping_type_depth
== 0 ? id
->block
: NULL
;
921 tree old_block
= TREE_BLOCK (*tp
);
925 n
= (tree
*) pointer_map_contains (id
->decl_map
,
930 TREE_SET_BLOCK (*tp
, new_block
);
933 /* Keep iterating. */
938 /* Called from copy_body_id via walk_tree. DATA is really a
939 `copy_body_data *'. */
942 copy_tree_body_r (tree
*tp
, int *walk_subtrees
, void *data
)
944 copy_body_data
*id
= (copy_body_data
*) data
;
945 tree fn
= id
->src_fn
;
948 /* Begin by recognizing trees that we'll completely rewrite for the
949 inlining context. Our output for these trees is completely
950 different from out input (e.g. RETURN_EXPR is deleted, and morphs
951 into an edge). Further down, we'll handle trees that get
952 duplicated and/or tweaked. */
954 /* When requested, RETURN_EXPRs should be transformed to just the
955 contained MODIFY_EXPR. The branch semantics of the return will
956 be handled elsewhere by manipulating the CFG rather than a statement. */
957 if (TREE_CODE (*tp
) == RETURN_EXPR
&& id
->transform_return_to_modify
)
959 tree assignment
= TREE_OPERAND (*tp
, 0);
961 /* If we're returning something, just turn that into an
962 assignment into the equivalent of the original RESULT_DECL.
963 If the "assignment" is just the result decl, the result
964 decl has already been set (e.g. a recent "foo (&result_decl,
965 ...)"); just toss the entire RETURN_EXPR. */
966 if (assignment
&& TREE_CODE (assignment
) == MODIFY_EXPR
)
968 /* Replace the RETURN_EXPR with (a copy of) the
969 MODIFY_EXPR hanging underneath. */
970 *tp
= copy_node (assignment
);
972 else /* Else the RETURN_EXPR returns no value. */
975 return (tree
) (void *)1;
978 else if (TREE_CODE (*tp
) == SSA_NAME
)
980 *tp
= remap_ssa_name (*tp
, id
);
985 /* Local variables and labels need to be replaced by equivalent
986 variables. We don't want to copy static variables; there's only
987 one of those, no matter how many times we inline the containing
988 function. Similarly for globals from an outer function. */
989 else if (auto_var_in_fn_p (*tp
, fn
))
993 /* Remap the declaration. */
994 new_decl
= remap_decl (*tp
, id
);
995 gcc_assert (new_decl
);
996 /* Replace this variable with the copy. */
997 STRIP_TYPE_NOPS (new_decl
);
1001 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
1002 copy_statement_list (tp
);
1003 else if (TREE_CODE (*tp
) == SAVE_EXPR
1004 || TREE_CODE (*tp
) == TARGET_EXPR
)
1005 remap_save_expr (tp
, id
->decl_map
, walk_subtrees
);
1006 else if (TREE_CODE (*tp
) == LABEL_DECL
1007 && (! DECL_CONTEXT (*tp
)
1008 || decl_function_context (*tp
) == id
->src_fn
))
1009 /* These may need to be remapped for EH handling. */
1010 *tp
= remap_decl (*tp
, id
);
1011 else if (TREE_CODE (*tp
) == BIND_EXPR
)
1012 copy_bind_expr (tp
, walk_subtrees
, id
);
1013 /* Types may need remapping as well. */
1014 else if (TYPE_P (*tp
))
1015 *tp
= remap_type (*tp
, id
);
1017 /* If this is a constant, we have to copy the node iff the type will be
1018 remapped. copy_tree_r will not copy a constant. */
1019 else if (CONSTANT_CLASS_P (*tp
))
1021 tree new_type
= remap_type (TREE_TYPE (*tp
), id
);
1023 if (new_type
== TREE_TYPE (*tp
))
1026 else if (TREE_CODE (*tp
) == INTEGER_CST
)
1027 *tp
= build_int_cst_wide (new_type
, TREE_INT_CST_LOW (*tp
),
1028 TREE_INT_CST_HIGH (*tp
));
1031 *tp
= copy_node (*tp
);
1032 TREE_TYPE (*tp
) = new_type
;
1036 /* Otherwise, just copy the node. Note that copy_tree_r already
1037 knows not to copy VAR_DECLs, etc., so this is safe. */
1040 /* Here we handle trees that are not completely rewritten.
1041 First we detect some inlining-induced bogosities for
1043 if (TREE_CODE (*tp
) == MODIFY_EXPR
1044 && TREE_OPERAND (*tp
, 0) == TREE_OPERAND (*tp
, 1)
1045 && (auto_var_in_fn_p (TREE_OPERAND (*tp
, 0), fn
)))
1047 /* Some assignments VAR = VAR; don't generate any rtl code
1048 and thus don't count as variable modification. Avoid
1049 keeping bogosities like 0 = 0. */
1050 tree decl
= TREE_OPERAND (*tp
, 0), value
;
1053 n
= (tree
*) pointer_map_contains (id
->decl_map
, decl
);
1057 STRIP_TYPE_NOPS (value
);
1058 if (TREE_CONSTANT (value
) || TREE_READONLY (value
))
1060 *tp
= build_empty_stmt (EXPR_LOCATION (*tp
));
1061 return copy_tree_body_r (tp
, walk_subtrees
, data
);
1065 else if (TREE_CODE (*tp
) == INDIRECT_REF
)
1067 /* Get rid of *& from inline substitutions that can happen when a
1068 pointer argument is an ADDR_EXPR. */
1069 tree decl
= TREE_OPERAND (*tp
, 0);
1070 tree
*n
= (tree
*) pointer_map_contains (id
->decl_map
, decl
);
1073 /* If we happen to get an ADDR_EXPR in n->value, strip
1074 it manually here as we'll eventually get ADDR_EXPRs
1075 which lie about their types pointed to. In this case
1076 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1077 but we absolutely rely on that. As fold_indirect_ref
1078 does other useful transformations, try that first, though. */
1079 tree type
= TREE_TYPE (*tp
);
1080 tree ptr
= id
->do_not_unshare
? *n
: unshare_expr (*n
);
1082 *tp
= gimple_fold_indirect_ref (ptr
);
1085 if (TREE_CODE (ptr
) == ADDR_EXPR
)
1088 = fold_indirect_ref_1 (EXPR_LOCATION (ptr
), type
, ptr
);
1089 /* ??? We should either assert here or build
1090 a VIEW_CONVERT_EXPR instead of blindly leaking
1091 incompatible types to our IL. */
1093 *tp
= TREE_OPERAND (ptr
, 0);
1097 *tp
= build1 (INDIRECT_REF
, type
, ptr
);
1098 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1099 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1100 TREE_READONLY (*tp
) = TREE_READONLY (old
);
1101 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1102 have remapped a parameter as the property might be
1103 valid only for the parameter itself. */
1104 if (TREE_THIS_NOTRAP (old
)
1105 && (!is_parm (TREE_OPERAND (old
, 0))
1106 || (!id
->transform_parameter
&& is_parm (ptr
))))
1107 TREE_THIS_NOTRAP (*tp
) = 1;
1114 else if (TREE_CODE (*tp
) == MEM_REF
)
1116 /* We need to re-canonicalize MEM_REFs from inline substitutions
1117 that can happen when a pointer argument is an ADDR_EXPR.
1118 Recurse here manually to allow that. */
1119 tree ptr
= TREE_OPERAND (*tp
, 0);
1120 tree type
= remap_type (TREE_TYPE (*tp
), id
);
1122 walk_tree (&ptr
, copy_tree_body_r
, data
, NULL
);
1123 *tp
= fold_build2 (MEM_REF
, type
, ptr
, TREE_OPERAND (*tp
, 1));
1124 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1125 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1126 TREE_NO_WARNING (*tp
) = TREE_NO_WARNING (old
);
1127 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1128 remapped a parameter as the property might be valid only
1129 for the parameter itself. */
1130 if (TREE_THIS_NOTRAP (old
)
1131 && (!is_parm (TREE_OPERAND (old
, 0))
1132 || (!id
->transform_parameter
&& is_parm (ptr
))))
1133 TREE_THIS_NOTRAP (*tp
) = 1;
1138 /* Here is the "usual case". Copy this tree node, and then
1139 tweak some special cases. */
1140 copy_tree_r (tp
, walk_subtrees
, NULL
);
1142 /* If EXPR has block defined, map it to newly constructed block.
1143 When inlining we want EXPRs without block appear in the block
1144 of function call if we are not remapping a type. */
1147 new_block
= id
->remapping_type_depth
== 0 ? id
->block
: NULL
;
1148 if (TREE_BLOCK (*tp
))
1151 n
= (tree
*) pointer_map_contains (id
->decl_map
,
1156 TREE_SET_BLOCK (*tp
, new_block
);
1159 if (TREE_CODE (*tp
) != OMP_CLAUSE
)
1160 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
1162 /* The copied TARGET_EXPR has never been expanded, even if the
1163 original node was expanded already. */
1164 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
1166 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
1167 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
1170 /* Variable substitution need not be simple. In particular, the
1171 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1172 and friends are up-to-date. */
1173 else if (TREE_CODE (*tp
) == ADDR_EXPR
)
1175 int invariant
= is_gimple_min_invariant (*tp
);
1176 walk_tree (&TREE_OPERAND (*tp
, 0), copy_tree_body_r
, id
, NULL
);
1178 /* Handle the case where we substituted an INDIRECT_REF
1179 into the operand of the ADDR_EXPR. */
1180 if (TREE_CODE (TREE_OPERAND (*tp
, 0)) == INDIRECT_REF
)
1181 *tp
= TREE_OPERAND (TREE_OPERAND (*tp
, 0), 0);
1183 recompute_tree_invariant_for_addr_expr (*tp
);
1185 /* If this used to be invariant, but is not any longer,
1186 then regimplification is probably needed. */
1187 if (invariant
&& !is_gimple_min_invariant (*tp
))
1188 id
->regimplify
= true;
1194 /* Keep iterating. */
1198 /* Helper for remap_gimple_stmt. Given an EH region number for the
1199 source function, map that to the duplicate EH region number in
1200 the destination function. */
1203 remap_eh_region_nr (int old_nr
, copy_body_data
*id
)
1205 eh_region old_r
, new_r
;
1208 old_r
= get_eh_region_from_number_fn (id
->src_cfun
, old_nr
);
1209 slot
= pointer_map_contains (id
->eh_map
, old_r
);
1210 new_r
= (eh_region
) *slot
;
1212 return new_r
->index
;
1215 /* Similar, but operate on INTEGER_CSTs. */
1218 remap_eh_region_tree_nr (tree old_t_nr
, copy_body_data
*id
)
1222 old_nr
= tree_low_cst (old_t_nr
, 0);
1223 new_nr
= remap_eh_region_nr (old_nr
, id
);
1225 return build_int_cst (integer_type_node
, new_nr
);
1228 /* Helper for copy_bb. Remap statement STMT using the inlining
1229 information in ID. Return the new statement copy. */
1232 remap_gimple_stmt (gimple stmt
, copy_body_data
*id
)
1235 struct walk_stmt_info wi
;
1236 bool skip_first
= false;
1238 /* Begin by recognizing trees that we'll completely rewrite for the
1239 inlining context. Our output for these trees is completely
1240 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1241 into an edge). Further down, we'll handle trees that get
1242 duplicated and/or tweaked. */
1244 /* When requested, GIMPLE_RETURNs should be transformed to just the
1245 contained GIMPLE_ASSIGN. The branch semantics of the return will
1246 be handled elsewhere by manipulating the CFG rather than the
1248 if (gimple_code (stmt
) == GIMPLE_RETURN
&& id
->transform_return_to_modify
)
1250 tree retval
= gimple_return_retval (stmt
);
1252 /* If we're returning something, just turn that into an
1253 assignment into the equivalent of the original RESULT_DECL.
1254 If RETVAL is just the result decl, the result decl has
1255 already been set (e.g. a recent "foo (&result_decl, ...)");
1256 just toss the entire GIMPLE_RETURN. */
1258 && (TREE_CODE (retval
) != RESULT_DECL
1259 && (TREE_CODE (retval
) != SSA_NAME
1260 || ! SSA_NAME_VAR (retval
)
1261 || TREE_CODE (SSA_NAME_VAR (retval
)) != RESULT_DECL
)))
1263 copy
= gimple_build_assign (id
->retvar
, retval
);
1264 /* id->retvar is already substituted. Skip it on later remapping. */
1268 return gimple_build_nop ();
1270 else if (gimple_has_substatements (stmt
))
1274 /* When cloning bodies from the C++ front end, we will be handed bodies
1275 in High GIMPLE form. Handle here all the High GIMPLE statements that
1276 have embedded statements. */
1277 switch (gimple_code (stmt
))
1280 copy
= copy_gimple_bind (stmt
, id
);
1284 s1
= remap_gimple_seq (gimple_catch_handler (stmt
), id
);
1285 copy
= gimple_build_catch (gimple_catch_types (stmt
), s1
);
1288 case GIMPLE_EH_FILTER
:
1289 s1
= remap_gimple_seq (gimple_eh_filter_failure (stmt
), id
);
1290 copy
= gimple_build_eh_filter (gimple_eh_filter_types (stmt
), s1
);
1294 s1
= remap_gimple_seq (gimple_try_eval (stmt
), id
);
1295 s2
= remap_gimple_seq (gimple_try_cleanup (stmt
), id
);
1296 copy
= gimple_build_try (s1
, s2
, gimple_try_kind (stmt
));
1299 case GIMPLE_WITH_CLEANUP_EXPR
:
1300 s1
= remap_gimple_seq (gimple_wce_cleanup (stmt
), id
);
1301 copy
= gimple_build_wce (s1
);
1304 case GIMPLE_OMP_PARALLEL
:
1305 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1306 copy
= gimple_build_omp_parallel
1308 gimple_omp_parallel_clauses (stmt
),
1309 gimple_omp_parallel_child_fn (stmt
),
1310 gimple_omp_parallel_data_arg (stmt
));
1313 case GIMPLE_OMP_TASK
:
1314 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1315 copy
= gimple_build_omp_task
1317 gimple_omp_task_clauses (stmt
),
1318 gimple_omp_task_child_fn (stmt
),
1319 gimple_omp_task_data_arg (stmt
),
1320 gimple_omp_task_copy_fn (stmt
),
1321 gimple_omp_task_arg_size (stmt
),
1322 gimple_omp_task_arg_align (stmt
));
1325 case GIMPLE_OMP_FOR
:
1326 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1327 s2
= remap_gimple_seq (gimple_omp_for_pre_body (stmt
), id
);
1328 copy
= gimple_build_omp_for (s1
, gimple_omp_for_kind (stmt
),
1329 gimple_omp_for_clauses (stmt
),
1330 gimple_omp_for_collapse (stmt
), s2
);
1333 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
1335 gimple_omp_for_set_index (copy
, i
,
1336 gimple_omp_for_index (stmt
, i
));
1337 gimple_omp_for_set_initial (copy
, i
,
1338 gimple_omp_for_initial (stmt
, i
));
1339 gimple_omp_for_set_final (copy
, i
,
1340 gimple_omp_for_final (stmt
, i
));
1341 gimple_omp_for_set_incr (copy
, i
,
1342 gimple_omp_for_incr (stmt
, i
));
1343 gimple_omp_for_set_cond (copy
, i
,
1344 gimple_omp_for_cond (stmt
, i
));
1349 case GIMPLE_OMP_MASTER
:
1350 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1351 copy
= gimple_build_omp_master (s1
);
1354 case GIMPLE_OMP_TASKGROUP
:
1355 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1356 copy
= gimple_build_omp_taskgroup (s1
);
1359 case GIMPLE_OMP_ORDERED
:
1360 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1361 copy
= gimple_build_omp_ordered (s1
);
1364 case GIMPLE_OMP_SECTION
:
1365 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1366 copy
= gimple_build_omp_section (s1
);
1369 case GIMPLE_OMP_SECTIONS
:
1370 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1371 copy
= gimple_build_omp_sections
1372 (s1
, gimple_omp_sections_clauses (stmt
));
1375 case GIMPLE_OMP_SINGLE
:
1376 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1377 copy
= gimple_build_omp_single
1378 (s1
, gimple_omp_single_clauses (stmt
));
1381 case GIMPLE_OMP_TARGET
:
1382 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1383 copy
= gimple_build_omp_target
1384 (s1
, gimple_omp_target_kind (stmt
),
1385 gimple_omp_target_clauses (stmt
));
1388 case GIMPLE_OMP_TEAMS
:
1389 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1390 copy
= gimple_build_omp_teams
1391 (s1
, gimple_omp_teams_clauses (stmt
));
1394 case GIMPLE_OMP_CRITICAL
:
1395 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1397 = gimple_build_omp_critical (s1
, gimple_omp_critical_name (stmt
));
1400 case GIMPLE_TRANSACTION
:
1401 s1
= remap_gimple_seq (gimple_transaction_body (stmt
), id
);
1402 copy
= gimple_build_transaction (s1
, gimple_transaction_label (stmt
));
1403 gimple_transaction_set_subcode (copy
, gimple_transaction_subcode (stmt
));
1412 if (gimple_assign_copy_p (stmt
)
1413 && gimple_assign_lhs (stmt
) == gimple_assign_rhs1 (stmt
)
1414 && auto_var_in_fn_p (gimple_assign_lhs (stmt
), id
->src_fn
))
1416 /* Here we handle statements that are not completely rewritten.
1417 First we detect some inlining-induced bogosities for
1420 /* Some assignments VAR = VAR; don't generate any rtl code
1421 and thus don't count as variable modification. Avoid
1422 keeping bogosities like 0 = 0. */
1423 tree decl
= gimple_assign_lhs (stmt
), value
;
1426 n
= (tree
*) pointer_map_contains (id
->decl_map
, decl
);
1430 STRIP_TYPE_NOPS (value
);
1431 if (TREE_CONSTANT (value
) || TREE_READONLY (value
))
1432 return gimple_build_nop ();
1436 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1437 in a block that we aren't copying during tree_function_versioning,
1438 just drop the clobber stmt. */
1439 if (id
->blocks_to_copy
&& gimple_clobber_p (stmt
))
1441 tree lhs
= gimple_assign_lhs (stmt
);
1442 if (TREE_CODE (lhs
) == MEM_REF
1443 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
)
1445 gimple def_stmt
= SSA_NAME_DEF_STMT (TREE_OPERAND (lhs
, 0));
1446 if (gimple_bb (def_stmt
)
1447 && !bitmap_bit_p (id
->blocks_to_copy
,
1448 gimple_bb (def_stmt
)->index
))
1449 return gimple_build_nop ();
1453 if (gimple_debug_bind_p (stmt
))
1455 copy
= gimple_build_debug_bind (gimple_debug_bind_get_var (stmt
),
1456 gimple_debug_bind_get_value (stmt
),
1458 id
->debug_stmts
.safe_push (copy
);
1461 if (gimple_debug_source_bind_p (stmt
))
1463 copy
= gimple_build_debug_source_bind
1464 (gimple_debug_source_bind_get_var (stmt
),
1465 gimple_debug_source_bind_get_value (stmt
), stmt
);
1466 id
->debug_stmts
.safe_push (copy
);
1470 /* Create a new deep copy of the statement. */
1471 copy
= gimple_copy (stmt
);
1473 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1474 RESX and EH_DISPATCH. */
1476 switch (gimple_code (copy
))
1480 tree r
, fndecl
= gimple_call_fndecl (copy
);
1481 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
1482 switch (DECL_FUNCTION_CODE (fndecl
))
1484 case BUILT_IN_EH_COPY_VALUES
:
1485 r
= gimple_call_arg (copy
, 1);
1486 r
= remap_eh_region_tree_nr (r
, id
);
1487 gimple_call_set_arg (copy
, 1, r
);
1490 case BUILT_IN_EH_POINTER
:
1491 case BUILT_IN_EH_FILTER
:
1492 r
= gimple_call_arg (copy
, 0);
1493 r
= remap_eh_region_tree_nr (r
, id
);
1494 gimple_call_set_arg (copy
, 0, r
);
1501 /* Reset alias info if we didn't apply measures to
1502 keep it valid over inlining by setting DECL_PT_UID. */
1503 if (!id
->src_cfun
->gimple_df
1504 || !id
->src_cfun
->gimple_df
->ipa_pta
)
1505 gimple_call_reset_alias_info (copy
);
1511 int r
= gimple_resx_region (copy
);
1512 r
= remap_eh_region_nr (r
, id
);
1513 gimple_resx_set_region (copy
, r
);
1517 case GIMPLE_EH_DISPATCH
:
1519 int r
= gimple_eh_dispatch_region (copy
);
1520 r
= remap_eh_region_nr (r
, id
);
1521 gimple_eh_dispatch_set_region (copy
, r
);
1530 /* If STMT has a block defined, map it to the newly constructed
1532 if (gimple_block (copy
))
1535 n
= (tree
*) pointer_map_contains (id
->decl_map
, gimple_block (copy
));
1537 gimple_set_block (copy
, *n
);
1540 if (gimple_debug_bind_p (copy
) || gimple_debug_source_bind_p (copy
))
1543 /* Remap all the operands in COPY. */
1544 memset (&wi
, 0, sizeof (wi
));
1547 walk_tree (gimple_op_ptr (copy
, 1), remap_gimple_op_r
, &wi
, NULL
);
1549 walk_gimple_op (copy
, remap_gimple_op_r
, &wi
);
1551 /* Clear the copied virtual operands. We are not remapping them here
1552 but are going to recreate them from scratch. */
1553 if (gimple_has_mem_ops (copy
))
1555 gimple_set_vdef (copy
, NULL_TREE
);
1556 gimple_set_vuse (copy
, NULL_TREE
);
1563 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1567 copy_bb (copy_body_data
*id
, basic_block bb
, int frequency_scale
,
1568 gcov_type count_scale
)
1570 gimple_stmt_iterator gsi
, copy_gsi
, seq_gsi
;
1571 basic_block copy_basic_block
;
1576 /* Search for previous copied basic block. */
1579 prev
= prev
->prev_bb
;
1581 /* create_basic_block() will append every new block to
1582 basic_block_info automatically. */
1583 copy_basic_block
= create_basic_block (NULL
, (void *) 0,
1584 (basic_block
) prev
->aux
);
1585 copy_basic_block
->count
= apply_scale (bb
->count
, count_scale
);
1587 /* We are going to rebuild frequencies from scratch. These values
1588 have just small importance to drive canonicalize_loop_headers. */
1589 freq
= apply_scale ((gcov_type
)bb
->frequency
, frequency_scale
);
1591 /* We recompute frequencies after inlining, so this is quite safe. */
1592 if (freq
> BB_FREQ_MAX
)
1594 copy_basic_block
->frequency
= freq
;
1596 copy_gsi
= gsi_start_bb (copy_basic_block
);
1598 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1600 gimple stmt
= gsi_stmt (gsi
);
1601 gimple orig_stmt
= stmt
;
1603 id
->regimplify
= false;
1604 stmt
= remap_gimple_stmt (stmt
, id
);
1605 if (gimple_nop_p (stmt
))
1608 gimple_duplicate_stmt_histograms (cfun
, stmt
, id
->src_cfun
, orig_stmt
);
1611 /* With return slot optimization we can end up with
1612 non-gimple (foo *)&this->m, fix that here. */
1613 if (is_gimple_assign (stmt
)
1614 && gimple_assign_rhs_code (stmt
) == NOP_EXPR
1615 && !is_gimple_val (gimple_assign_rhs1 (stmt
)))
1618 new_rhs
= force_gimple_operand_gsi (&seq_gsi
,
1619 gimple_assign_rhs1 (stmt
),
1621 GSI_CONTINUE_LINKING
);
1622 gimple_assign_set_rhs1 (stmt
, new_rhs
);
1623 id
->regimplify
= false;
1626 gsi_insert_after (&seq_gsi
, stmt
, GSI_NEW_STMT
);
1629 gimple_regimplify_operands (stmt
, &seq_gsi
);
1631 /* If copy_basic_block has been empty at the start of this iteration,
1632 call gsi_start_bb again to get at the newly added statements. */
1633 if (gsi_end_p (copy_gsi
))
1634 copy_gsi
= gsi_start_bb (copy_basic_block
);
1636 gsi_next (©_gsi
);
1638 /* Process the new statement. The call to gimple_regimplify_operands
1639 possibly turned the statement into multiple statements, we
1640 need to process all of them. */
1645 stmt
= gsi_stmt (copy_gsi
);
1646 if (is_gimple_call (stmt
)
1647 && gimple_call_va_arg_pack_p (stmt
)
1650 /* __builtin_va_arg_pack () should be replaced by
1651 all arguments corresponding to ... in the caller. */
1655 size_t nargs
= gimple_call_num_args (id
->gimple_call
);
1658 for (p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
1661 /* Create the new array of arguments. */
1662 n
= nargs
+ gimple_call_num_args (stmt
);
1663 argarray
.create (n
);
1664 argarray
.safe_grow_cleared (n
);
1666 /* Copy all the arguments before '...' */
1667 memcpy (argarray
.address (),
1668 gimple_call_arg_ptr (stmt
, 0),
1669 gimple_call_num_args (stmt
) * sizeof (tree
));
1671 /* Append the arguments passed in '...' */
1672 memcpy (argarray
.address () + gimple_call_num_args (stmt
),
1673 gimple_call_arg_ptr (id
->gimple_call
, 0)
1674 + (gimple_call_num_args (id
->gimple_call
) - nargs
),
1675 nargs
* sizeof (tree
));
1677 new_call
= gimple_build_call_vec (gimple_call_fn (stmt
),
1680 argarray
.release ();
1682 /* Copy all GIMPLE_CALL flags, location and block, except
1683 GF_CALL_VA_ARG_PACK. */
1684 gimple_call_copy_flags (new_call
, stmt
);
1685 gimple_call_set_va_arg_pack (new_call
, false);
1686 gimple_set_location (new_call
, gimple_location (stmt
));
1687 gimple_set_block (new_call
, gimple_block (stmt
));
1688 gimple_call_set_lhs (new_call
, gimple_call_lhs (stmt
));
1690 gsi_replace (©_gsi
, new_call
, false);
1693 else if (is_gimple_call (stmt
)
1695 && (decl
= gimple_call_fndecl (stmt
))
1696 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
1697 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_VA_ARG_PACK_LEN
)
1699 /* __builtin_va_arg_pack_len () should be replaced by
1700 the number of anonymous arguments. */
1701 size_t nargs
= gimple_call_num_args (id
->gimple_call
);
1705 for (p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
1708 count
= build_int_cst (integer_type_node
, nargs
);
1709 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
), count
);
1710 gsi_replace (©_gsi
, new_stmt
, false);
1714 /* Statements produced by inlining can be unfolded, especially
1715 when we constant propagated some operands. We can't fold
1716 them right now for two reasons:
1717 1) folding require SSA_NAME_DEF_STMTs to be correct
1718 2) we can't change function calls to builtins.
1719 So we just mark statement for later folding. We mark
1720 all new statements, instead just statements that has changed
1721 by some nontrivial substitution so even statements made
1722 foldable indirectly are updated. If this turns out to be
1723 expensive, copy_body can be told to watch for nontrivial
1725 if (id
->statements_to_fold
)
1726 pointer_set_insert (id
->statements_to_fold
, stmt
);
1728 /* We're duplicating a CALL_EXPR. Find any corresponding
1729 callgraph edges and update or duplicate them. */
1730 if (is_gimple_call (stmt
))
1732 struct cgraph_edge
*edge
;
1735 switch (id
->transform_call_graph_edges
)
1737 case CB_CGE_DUPLICATE
:
1738 edge
= cgraph_edge (id
->src_node
, orig_stmt
);
1741 int edge_freq
= edge
->frequency
;
1743 struct cgraph_edge
*old_edge
= edge
;
1744 edge
= cgraph_clone_edge (edge
, id
->dst_node
, stmt
,
1746 REG_BR_PROB_BASE
, CGRAPH_FREQ_BASE
,
1748 /* We could also just rescale the frequency, but
1749 doing so would introduce roundoff errors and make
1750 verifier unhappy. */
1751 new_freq
= compute_call_stmt_bb_frequency (id
->dst_node
->decl
,
1754 /* Speculative calls consist of two edges - direct and indirect.
1755 Duplicate the whole thing and distribute frequencies accordingly. */
1756 if (edge
->speculative
)
1758 struct cgraph_edge
*direct
, *indirect
;
1759 struct ipa_ref
*ref
;
1761 gcc_assert (!edge
->indirect_unknown_callee
);
1762 cgraph_speculative_call_info (old_edge
, direct
, indirect
, ref
);
1763 indirect
= cgraph_clone_edge (indirect
, id
->dst_node
, stmt
,
1765 REG_BR_PROB_BASE
, CGRAPH_FREQ_BASE
,
1767 if (old_edge
->frequency
+ indirect
->frequency
)
1769 edge
->frequency
= MIN (RDIV ((gcov_type
)new_freq
* old_edge
->frequency
,
1770 (old_edge
->frequency
+ indirect
->frequency
)),
1772 indirect
->frequency
= MIN (RDIV ((gcov_type
)new_freq
* indirect
->frequency
,
1773 (old_edge
->frequency
+ indirect
->frequency
)),
1776 ipa_clone_ref (ref
, id
->dst_node
, stmt
);
1780 edge
->frequency
= new_freq
;
1782 && profile_status_for_function (cfun
) != PROFILE_ABSENT
1783 && (edge_freq
> edge
->frequency
+ 10
1784 || edge_freq
< edge
->frequency
- 10))
1786 fprintf (dump_file
, "Edge frequency estimated by "
1787 "cgraph %i diverge from inliner's estimate %i\n",
1791 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1794 copy_basic_block
->frequency
);
1800 case CB_CGE_MOVE_CLONES
:
1801 cgraph_set_call_stmt_including_clones (id
->dst_node
,
1803 edge
= cgraph_edge (id
->dst_node
, stmt
);
1807 edge
= cgraph_edge (id
->dst_node
, orig_stmt
);
1809 cgraph_set_call_stmt (edge
, stmt
);
1816 /* Constant propagation on argument done during inlining
1817 may create new direct call. Produce an edge for it. */
1819 || (edge
->indirect_inlining_edge
1820 && id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
))
1821 && id
->dst_node
->definition
1822 && (fn
= gimple_call_fndecl (stmt
)) != NULL
)
1824 struct cgraph_node
*dest
= cgraph_get_node (fn
);
1826 /* We have missing edge in the callgraph. This can happen
1827 when previous inlining turned an indirect call into a
1828 direct call by constant propagating arguments or we are
1829 producing dead clone (for further cloning). In all
1830 other cases we hit a bug (incorrect node sharing is the
1831 most common reason for missing edges). */
1832 gcc_assert (!dest
->definition
1833 || dest
->address_taken
1834 || !id
->src_node
->definition
1835 || !id
->dst_node
->definition
);
1836 if (id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
)
1837 cgraph_create_edge_including_clones
1838 (id
->dst_node
, dest
, orig_stmt
, stmt
, bb
->count
,
1839 compute_call_stmt_bb_frequency (id
->dst_node
->decl
,
1841 CIF_ORIGINALLY_INDIRECT_CALL
);
1843 cgraph_create_edge (id
->dst_node
, dest
, stmt
,
1845 compute_call_stmt_bb_frequency
1846 (id
->dst_node
->decl
,
1847 copy_basic_block
))->inline_failed
1848 = CIF_ORIGINALLY_INDIRECT_CALL
;
1851 fprintf (dump_file
, "Created new direct edge to %s\n",
1852 cgraph_node_name (dest
));
1856 flags
= gimple_call_flags (stmt
);
1857 if (flags
& ECF_MAY_BE_ALLOCA
)
1858 cfun
->calls_alloca
= true;
1859 if (flags
& ECF_RETURNS_TWICE
)
1860 cfun
->calls_setjmp
= true;
1863 maybe_duplicate_eh_stmt_fn (cfun
, stmt
, id
->src_cfun
, orig_stmt
,
1864 id
->eh_map
, id
->eh_lp_nr
);
1866 if (gimple_in_ssa_p (cfun
) && !is_gimple_debug (stmt
))
1871 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, i
, SSA_OP_DEF
)
1872 if (TREE_CODE (def
) == SSA_NAME
)
1873 SSA_NAME_DEF_STMT (def
) = stmt
;
1876 gsi_next (©_gsi
);
1878 while (!gsi_end_p (copy_gsi
));
1880 copy_gsi
= gsi_last_bb (copy_basic_block
);
1883 return copy_basic_block
;
1886 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1887 form is quite easy, since dominator relationship for old basic blocks does
1890 There is however exception where inlining might change dominator relation
1891 across EH edges from basic block within inlined functions destinating
1892 to landing pads in function we inline into.
1894 The function fills in PHI_RESULTs of such PHI nodes if they refer
1895 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1896 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1897 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1898 set, and this means that there will be no overlapping live ranges
1899 for the underlying symbol.
1901 This might change in future if we allow redirecting of EH edges and
1902 we might want to change way build CFG pre-inlining to include
1903 all the possible edges then. */
1905 update_ssa_across_abnormal_edges (basic_block bb
, basic_block ret_bb
,
1906 bool can_throw
, bool nonlocal_goto
)
1911 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1913 || ((basic_block
)e
->dest
->aux
)->index
== ENTRY_BLOCK
)
1916 gimple_stmt_iterator si
;
1919 gcc_assert (e
->flags
& EDGE_EH
);
1922 gcc_assert (!(e
->flags
& EDGE_EH
));
1924 for (si
= gsi_start_phis (e
->dest
); !gsi_end_p (si
); gsi_next (&si
))
1928 phi
= gsi_stmt (si
);
1930 /* For abnormal goto/call edges the receiver can be the
1931 ENTRY_BLOCK. Do not assert this cannot happen. */
1933 gcc_assert ((e
->flags
& EDGE_EH
)
1934 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)));
1936 re
= find_edge (ret_bb
, e
->dest
);
1937 gcc_checking_assert (re
);
1938 gcc_assert ((re
->flags
& (EDGE_EH
| EDGE_ABNORMAL
))
1939 == (e
->flags
& (EDGE_EH
| EDGE_ABNORMAL
)));
1941 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
),
1942 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, re
)));
1948 /* Copy edges from BB into its copy constructed earlier, scale profile
1949 accordingly. Edges will be taken care of later. Assume aux
1950 pointers to point to the copies of each BB. Return true if any
1951 debug stmts are left after a statement that must end the basic block. */
1954 copy_edges_for_bb (basic_block bb
, gcov_type count_scale
, basic_block ret_bb
,
1955 bool can_make_abnormal_goto
)
1957 basic_block new_bb
= (basic_block
) bb
->aux
;
1960 gimple_stmt_iterator si
;
1962 bool need_debug_cleanup
= false;
1964 /* Use the indices from the original blocks to create edges for the
1966 FOR_EACH_EDGE (old_edge
, ei
, bb
->succs
)
1967 if (!(old_edge
->flags
& EDGE_EH
))
1971 flags
= old_edge
->flags
;
1973 /* Return edges do get a FALLTHRU flag when the get inlined. */
1974 if (old_edge
->dest
->index
== EXIT_BLOCK
&& !old_edge
->flags
1975 && old_edge
->dest
->aux
!= EXIT_BLOCK_PTR
)
1976 flags
|= EDGE_FALLTHRU
;
1977 new_edge
= make_edge (new_bb
, (basic_block
) old_edge
->dest
->aux
, flags
);
1978 new_edge
->count
= apply_scale (old_edge
->count
, count_scale
);
1979 new_edge
->probability
= old_edge
->probability
;
1982 if (bb
->index
== ENTRY_BLOCK
|| bb
->index
== EXIT_BLOCK
)
1985 for (si
= gsi_start_bb (new_bb
); !gsi_end_p (si
);)
1988 bool can_throw
, nonlocal_goto
;
1990 copy_stmt
= gsi_stmt (si
);
1991 if (!is_gimple_debug (copy_stmt
))
1992 update_stmt (copy_stmt
);
1994 /* Do this before the possible split_block. */
1997 /* If this tree could throw an exception, there are two
1998 cases where we need to add abnormal edge(s): the
1999 tree wasn't in a region and there is a "current
2000 region" in the caller; or the original tree had
2001 EH edges. In both cases split the block after the tree,
2002 and add abnormal edge(s) as needed; we need both
2003 those from the callee and the caller.
2004 We check whether the copy can throw, because the const
2005 propagation can change an INDIRECT_REF which throws
2006 into a COMPONENT_REF which doesn't. If the copy
2007 can throw, the original could also throw. */
2008 can_throw
= stmt_can_throw_internal (copy_stmt
);
2009 nonlocal_goto
= stmt_can_make_abnormal_goto (copy_stmt
);
2011 if (can_throw
|| nonlocal_goto
)
2013 if (!gsi_end_p (si
))
2015 while (!gsi_end_p (si
) && is_gimple_debug (gsi_stmt (si
)))
2018 need_debug_cleanup
= true;
2020 if (!gsi_end_p (si
))
2021 /* Note that bb's predecessor edges aren't necessarily
2022 right at this point; split_block doesn't care. */
2024 edge e
= split_block (new_bb
, copy_stmt
);
2027 new_bb
->aux
= e
->src
->aux
;
2028 si
= gsi_start_bb (new_bb
);
2032 if (gimple_code (copy_stmt
) == GIMPLE_EH_DISPATCH
)
2033 make_eh_dispatch_edges (copy_stmt
);
2035 make_eh_edges (copy_stmt
);
2037 /* If the call we inline cannot make abnormal goto do not add
2038 additional abnormal edges but only retain those already present
2039 in the original function body. */
2040 nonlocal_goto
&= can_make_abnormal_goto
;
2042 make_abnormal_goto_edges (gimple_bb (copy_stmt
), true);
2044 if ((can_throw
|| nonlocal_goto
)
2045 && gimple_in_ssa_p (cfun
))
2046 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt
), ret_bb
,
2047 can_throw
, nonlocal_goto
);
2049 return need_debug_cleanup
;
2052 /* Copy the PHIs. All blocks and edges are copied, some blocks
2053 was possibly split and new outgoing EH edges inserted.
2054 BB points to the block of original function and AUX pointers links
2055 the original and newly copied blocks. */
2058 copy_phis_for_bb (basic_block bb
, copy_body_data
*id
)
2060 basic_block
const new_bb
= (basic_block
) bb
->aux
;
2063 gimple_stmt_iterator si
;
2065 bool inserted
= false;
2067 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
2072 phi
= gsi_stmt (si
);
2073 res
= PHI_RESULT (phi
);
2075 if (!virtual_operand_p (res
))
2077 walk_tree (&new_res
, copy_tree_body_r
, id
, NULL
);
2078 new_phi
= create_phi_node (new_res
, new_bb
);
2079 FOR_EACH_EDGE (new_edge
, ei
, new_bb
->preds
)
2081 edge old_edge
= find_edge ((basic_block
) new_edge
->src
->aux
, bb
);
2087 /* When doing partial cloning, we allow PHIs on the entry block
2088 as long as all the arguments are the same. Find any input
2089 edge to see argument to copy. */
2091 FOR_EACH_EDGE (old_edge
, ei2
, bb
->preds
)
2092 if (!old_edge
->src
->aux
)
2095 arg
= PHI_ARG_DEF_FROM_EDGE (phi
, old_edge
);
2097 walk_tree (&new_arg
, copy_tree_body_r
, id
, NULL
);
2098 gcc_assert (new_arg
);
2099 /* With return slot optimization we can end up with
2100 non-gimple (foo *)&this->m, fix that here. */
2101 if (TREE_CODE (new_arg
) != SSA_NAME
2102 && TREE_CODE (new_arg
) != FUNCTION_DECL
2103 && !is_gimple_val (new_arg
))
2105 gimple_seq stmts
= NULL
;
2106 new_arg
= force_gimple_operand (new_arg
, &stmts
, true, NULL
);
2107 gsi_insert_seq_on_edge (new_edge
, stmts
);
2110 locus
= gimple_phi_arg_location_from_edge (phi
, old_edge
);
2111 if (LOCATION_BLOCK (locus
))
2114 n
= (tree
*) pointer_map_contains (id
->decl_map
,
2115 LOCATION_BLOCK (locus
));
2118 locus
= COMBINE_LOCATION_DATA (line_table
, locus
, *n
);
2120 locus
= LOCATION_LOCUS (locus
);
2123 locus
= LOCATION_LOCUS (locus
);
2125 add_phi_arg (new_phi
, new_arg
, new_edge
, locus
);
2130 /* Commit the delayed edge insertions. */
2132 FOR_EACH_EDGE (new_edge
, ei
, new_bb
->preds
)
2133 gsi_commit_one_edge_insert (new_edge
, NULL
);
2137 /* Wrapper for remap_decl so it can be used as a callback. */
2140 remap_decl_1 (tree decl
, void *data
)
2142 return remap_decl (decl
, (copy_body_data
*) data
);
2145 /* Build struct function and associated datastructures for the new clone
2146 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2147 the cfun to the function of new_fndecl (and current_function_decl too). */
2150 initialize_cfun (tree new_fndecl
, tree callee_fndecl
, gcov_type count
)
2152 struct function
*src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2153 gcov_type count_scale
;
2155 if (!DECL_ARGUMENTS (new_fndecl
))
2156 DECL_ARGUMENTS (new_fndecl
) = DECL_ARGUMENTS (callee_fndecl
);
2157 if (!DECL_RESULT (new_fndecl
))
2158 DECL_RESULT (new_fndecl
) = DECL_RESULT (callee_fndecl
);
2160 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->count
)
2162 = GCOV_COMPUTE_SCALE (count
,
2163 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->count
);
2165 count_scale
= REG_BR_PROB_BASE
;
2167 /* Register specific tree functions. */
2168 gimple_register_cfg_hooks ();
2170 /* Get clean struct function. */
2171 push_struct_function (new_fndecl
);
2173 /* We will rebuild these, so just sanity check that they are empty. */
2174 gcc_assert (VALUE_HISTOGRAMS (cfun
) == NULL
);
2175 gcc_assert (cfun
->local_decls
== NULL
);
2176 gcc_assert (cfun
->cfg
== NULL
);
2177 gcc_assert (cfun
->decl
== new_fndecl
);
2179 /* Copy items we preserve during cloning. */
2180 cfun
->static_chain_decl
= src_cfun
->static_chain_decl
;
2181 cfun
->nonlocal_goto_save_area
= src_cfun
->nonlocal_goto_save_area
;
2182 cfun
->function_end_locus
= src_cfun
->function_end_locus
;
2183 cfun
->curr_properties
= src_cfun
->curr_properties
;
2184 cfun
->last_verified
= src_cfun
->last_verified
;
2185 cfun
->va_list_gpr_size
= src_cfun
->va_list_gpr_size
;
2186 cfun
->va_list_fpr_size
= src_cfun
->va_list_fpr_size
;
2187 cfun
->has_nonlocal_label
= src_cfun
->has_nonlocal_label
;
2188 cfun
->stdarg
= src_cfun
->stdarg
;
2189 cfun
->after_inlining
= src_cfun
->after_inlining
;
2190 cfun
->can_throw_non_call_exceptions
2191 = src_cfun
->can_throw_non_call_exceptions
;
2192 cfun
->can_delete_dead_exceptions
= src_cfun
->can_delete_dead_exceptions
;
2193 cfun
->returns_struct
= src_cfun
->returns_struct
;
2194 cfun
->returns_pcc_struct
= src_cfun
->returns_pcc_struct
;
2196 init_empty_tree_cfg ();
2198 profile_status_for_function (cfun
) = profile_status_for_function (src_cfun
);
2199 ENTRY_BLOCK_PTR
->count
=
2200 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->count
* count_scale
/
2202 ENTRY_BLOCK_PTR
->frequency
2203 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->frequency
;
2204 EXIT_BLOCK_PTR
->count
=
2205 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->count
* count_scale
/
2207 EXIT_BLOCK_PTR
->frequency
=
2208 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->frequency
;
2210 init_eh_for_function ();
2212 if (src_cfun
->gimple_df
)
2214 init_tree_ssa (cfun
);
2215 cfun
->gimple_df
->in_ssa_p
= true;
2216 init_ssa_operands (cfun
);
2220 /* Helper function for copy_cfg_body. Move debug stmts from the end
2221 of NEW_BB to the beginning of successor basic blocks when needed. If the
2222 successor has multiple predecessors, reset them, otherwise keep
2226 maybe_move_debug_stmts_to_successors (copy_body_data
*id
, basic_block new_bb
)
2230 gimple_stmt_iterator si
= gsi_last_nondebug_bb (new_bb
);
2233 || gsi_one_before_end_p (si
)
2234 || !(stmt_can_throw_internal (gsi_stmt (si
))
2235 || stmt_can_make_abnormal_goto (gsi_stmt (si
))))
2238 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
2240 gimple_stmt_iterator ssi
= gsi_last_bb (new_bb
);
2241 gimple_stmt_iterator dsi
= gsi_after_labels (e
->dest
);
2242 while (is_gimple_debug (gsi_stmt (ssi
)))
2244 gimple stmt
= gsi_stmt (ssi
), new_stmt
;
2248 /* For the last edge move the debug stmts instead of copying
2250 if (ei_one_before_end_p (ei
))
2254 if (!single_pred_p (e
->dest
) && gimple_debug_bind_p (stmt
))
2255 gimple_debug_bind_reset_value (stmt
);
2256 gsi_remove (&si
, false);
2257 gsi_insert_before (&dsi
, stmt
, GSI_SAME_STMT
);
2261 if (gimple_debug_bind_p (stmt
))
2263 var
= gimple_debug_bind_get_var (stmt
);
2264 if (single_pred_p (e
->dest
))
2266 value
= gimple_debug_bind_get_value (stmt
);
2267 value
= unshare_expr (value
);
2271 new_stmt
= gimple_build_debug_bind (var
, value
, stmt
);
2273 else if (gimple_debug_source_bind_p (stmt
))
2275 var
= gimple_debug_source_bind_get_var (stmt
);
2276 value
= gimple_debug_source_bind_get_value (stmt
);
2277 new_stmt
= gimple_build_debug_source_bind (var
, value
, stmt
);
2281 gsi_insert_before (&dsi
, new_stmt
, GSI_SAME_STMT
);
2282 id
->debug_stmts
.safe_push (new_stmt
);
2288 /* Make a copy of the sub-loops of SRC_PARENT and place them
2289 as siblings of DEST_PARENT. */
2292 copy_loops (copy_body_data
*id
,
2293 struct loop
*dest_parent
, struct loop
*src_parent
)
2295 struct loop
*src_loop
= src_parent
->inner
;
2298 if (!id
->blocks_to_copy
2299 || bitmap_bit_p (id
->blocks_to_copy
, src_loop
->header
->index
))
2301 struct loop
*dest_loop
= alloc_loop ();
2303 /* Assign the new loop its header and latch and associate
2304 those with the new loop. */
2305 if (src_loop
->header
!= NULL
)
2307 dest_loop
->header
= (basic_block
)src_loop
->header
->aux
;
2308 dest_loop
->header
->loop_father
= dest_loop
;
2310 if (src_loop
->latch
!= NULL
)
2312 dest_loop
->latch
= (basic_block
)src_loop
->latch
->aux
;
2313 dest_loop
->latch
->loop_father
= dest_loop
;
2316 /* Copy loop meta-data. */
2317 copy_loop_info (src_loop
, dest_loop
);
2319 /* Finally place it into the loop array and the loop tree. */
2320 place_new_loop (cfun
, dest_loop
);
2321 flow_loop_tree_node_add (dest_parent
, dest_loop
);
2323 if (src_loop
->simduid
)
2325 dest_loop
->simduid
= remap_decl (src_loop
->simduid
, id
);
2326 cfun
->has_simduid_loops
= true;
2328 if (src_loop
->force_vect
)
2330 dest_loop
->force_vect
= true;
2331 cfun
->has_force_vect_loops
= true;
2335 copy_loops (id
, dest_loop
, src_loop
);
2337 src_loop
= src_loop
->next
;
2341 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2344 redirect_all_calls (copy_body_data
* id
, basic_block bb
)
2346 gimple_stmt_iterator si
;
2347 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
2349 if (is_gimple_call (gsi_stmt (si
)))
2351 struct cgraph_edge
*edge
= cgraph_edge (id
->dst_node
, gsi_stmt (si
));
2353 cgraph_redirect_edge_call_stmt_to_callee (edge
);
2358 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2359 with each bb's frequency. Used when NODE has a 0-weight entry
2360 but we are about to inline it into a non-zero count call bb.
2361 See the comments for handle_missing_profiles() in predict.c for
2362 when this can happen for COMDATs. */
2365 freqs_to_counts (struct cgraph_node
*node
, gcov_type count
)
2370 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->decl
);
2372 FOR_ALL_BB_FN(bb
, fn
)
2374 bb
->count
= apply_scale (count
,
2375 GCOV_COMPUTE_SCALE (bb
->frequency
, BB_FREQ_MAX
));
2376 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2377 e
->count
= apply_probability (e
->src
->count
, e
->probability
);
2381 /* Make a copy of the body of FN so that it can be inserted inline in
2382 another function. Walks FN via CFG, returns new fndecl. */
2385 copy_cfg_body (copy_body_data
* id
, gcov_type count
, int frequency_scale
,
2386 basic_block entry_block_map
, basic_block exit_block_map
,
2387 basic_block new_entry
)
2389 tree callee_fndecl
= id
->src_fn
;
2390 /* Original cfun for the callee, doesn't change. */
2391 struct function
*src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2392 struct function
*cfun_to_copy
;
2394 tree new_fndecl
= NULL
;
2395 bool need_debug_cleanup
= false;
2396 gcov_type count_scale
;
2398 int incoming_frequency
= 0;
2399 gcov_type incoming_count
= 0;
2401 /* This can happen for COMDAT routines that end up with 0 counts
2402 despite being called (see the comments for handle_missing_profiles()
2403 in predict.c as to why). Apply counts to the blocks in the callee
2404 before inlining, using the guessed edge frequencies, so that we don't
2405 end up with a 0-count inline body which can confuse downstream
2406 optimizations such as function splitting. */
2407 if (!ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->count
&& count
)
2409 /* Apply the larger of the call bb count and the total incoming
2410 call edge count to the callee. */
2411 gcov_type in_count
= 0;
2412 struct cgraph_edge
*in_edge
;
2413 for (in_edge
= id
->src_node
->callers
; in_edge
;
2414 in_edge
= in_edge
->next_caller
)
2415 in_count
+= in_edge
->count
;
2416 freqs_to_counts (id
->src_node
, count
> in_count
? count
: in_count
);
2419 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->count
)
2421 = GCOV_COMPUTE_SCALE (count
,
2422 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun
)->count
);
2424 count_scale
= REG_BR_PROB_BASE
;
2426 /* Register specific tree functions. */
2427 gimple_register_cfg_hooks ();
2429 /* If we are inlining just region of the function, make sure to connect new entry
2430 to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
2431 frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
2432 probabilities of edges incoming from nonduplicated region. */
2438 FOR_EACH_EDGE (e
, ei
, new_entry
->preds
)
2441 incoming_frequency
+= EDGE_FREQUENCY (e
);
2442 incoming_count
+= e
->count
;
2444 incoming_count
= apply_scale (incoming_count
, count_scale
);
2446 = apply_scale ((gcov_type
)incoming_frequency
, frequency_scale
);
2447 ENTRY_BLOCK_PTR
->count
= incoming_count
;
2448 ENTRY_BLOCK_PTR
->frequency
= incoming_frequency
;
2451 /* Must have a CFG here at this point. */
2452 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2453 (DECL_STRUCT_FUNCTION (callee_fndecl
)));
2455 cfun_to_copy
= id
->src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2457 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy
)->aux
= entry_block_map
;
2458 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy
)->aux
= exit_block_map
;
2459 entry_block_map
->aux
= ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy
);
2460 exit_block_map
->aux
= EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy
);
2462 /* Duplicate any exception-handling regions. */
2464 id
->eh_map
= duplicate_eh_regions (cfun_to_copy
, NULL
, id
->eh_lp_nr
,
2467 /* Use aux pointers to map the original blocks to copy. */
2468 FOR_EACH_BB_FN (bb
, cfun_to_copy
)
2469 if (!id
->blocks_to_copy
|| bitmap_bit_p (id
->blocks_to_copy
, bb
->index
))
2471 basic_block new_bb
= copy_bb (id
, bb
, frequency_scale
, count_scale
);
2474 new_bb
->loop_father
= entry_block_map
->loop_father
;
2477 last
= last_basic_block
;
2479 /* Now that we've duplicated the blocks, duplicate their edges. */
2480 bool can_make_abormal_goto
2481 = id
->gimple_call
&& stmt_can_make_abnormal_goto (id
->gimple_call
);
2482 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2483 if (!id
->blocks_to_copy
2484 || (bb
->index
> 0 && bitmap_bit_p (id
->blocks_to_copy
, bb
->index
)))
2485 need_debug_cleanup
|= copy_edges_for_bb (bb
, count_scale
, exit_block_map
,
2486 can_make_abormal_goto
);
2490 edge e
= make_edge (entry_block_map
, (basic_block
)new_entry
->aux
, EDGE_FALLTHRU
);
2491 e
->probability
= REG_BR_PROB_BASE
;
2492 e
->count
= incoming_count
;
2495 /* Duplicate the loop tree, if available and wanted. */
2496 if (loops_for_fn (src_cfun
) != NULL
2497 && current_loops
!= NULL
)
2499 copy_loops (id
, entry_block_map
->loop_father
,
2500 get_loop (src_cfun
, 0));
2501 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2502 loops_state_set (LOOPS_NEED_FIXUP
);
2505 /* If the loop tree in the source function needed fixup, mark the
2506 destination loop tree for fixup, too. */
2507 if (loops_for_fn (src_cfun
)->state
& LOOPS_NEED_FIXUP
)
2508 loops_state_set (LOOPS_NEED_FIXUP
);
2510 if (gimple_in_ssa_p (cfun
))
2511 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2512 if (!id
->blocks_to_copy
2513 || (bb
->index
> 0 && bitmap_bit_p (id
->blocks_to_copy
, bb
->index
)))
2514 copy_phis_for_bb (bb
, id
);
2516 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2519 if (need_debug_cleanup
2520 && bb
->index
!= ENTRY_BLOCK
2521 && bb
->index
!= EXIT_BLOCK
)
2522 maybe_move_debug_stmts_to_successors (id
, (basic_block
) bb
->aux
);
2523 /* Update call edge destinations. This can not be done before loop
2524 info is updated, because we may split basic blocks. */
2525 if (id
->transform_call_graph_edges
== CB_CGE_DUPLICATE
)
2526 redirect_all_calls (id
, (basic_block
)bb
->aux
);
2527 ((basic_block
)bb
->aux
)->aux
= NULL
;
2531 /* Zero out AUX fields of newly created block during EH edge
2533 for (; last
< last_basic_block
; last
++)
2535 if (need_debug_cleanup
)
2536 maybe_move_debug_stmts_to_successors (id
, BASIC_BLOCK (last
));
2537 BASIC_BLOCK (last
)->aux
= NULL
;
2538 /* Update call edge destinations. This can not be done before loop
2539 info is updated, because we may split basic blocks. */
2540 if (id
->transform_call_graph_edges
== CB_CGE_DUPLICATE
)
2541 redirect_all_calls (id
, BASIC_BLOCK (last
));
2543 entry_block_map
->aux
= NULL
;
2544 exit_block_map
->aux
= NULL
;
2548 pointer_map_destroy (id
->eh_map
);
2555 /* Copy the debug STMT using ID. We deal with these statements in a
2556 special way: if any variable in their VALUE expression wasn't
2557 remapped yet, we won't remap it, because that would get decl uids
2558 out of sync, causing codegen differences between -g and -g0. If
2559 this arises, we drop the VALUE expression altogether. */
2562 copy_debug_stmt (gimple stmt
, copy_body_data
*id
)
2565 struct walk_stmt_info wi
;
2567 if (gimple_block (stmt
))
2569 n
= (tree
*) pointer_map_contains (id
->decl_map
, gimple_block (stmt
));
2570 gimple_set_block (stmt
, n
? *n
: id
->block
);
2573 /* Remap all the operands in COPY. */
2574 memset (&wi
, 0, sizeof (wi
));
2577 processing_debug_stmt
= 1;
2579 if (gimple_debug_source_bind_p (stmt
))
2580 t
= gimple_debug_source_bind_get_var (stmt
);
2582 t
= gimple_debug_bind_get_var (stmt
);
2584 if (TREE_CODE (t
) == PARM_DECL
&& id
->debug_map
2585 && (n
= (tree
*) pointer_map_contains (id
->debug_map
, t
)))
2587 gcc_assert (TREE_CODE (*n
) == VAR_DECL
);
2590 else if (TREE_CODE (t
) == VAR_DECL
2591 && !is_global_var (t
)
2592 && !pointer_map_contains (id
->decl_map
, t
))
2593 /* T is a non-localized variable. */;
2595 walk_tree (&t
, remap_gimple_op_r
, &wi
, NULL
);
2597 if (gimple_debug_bind_p (stmt
))
2599 gimple_debug_bind_set_var (stmt
, t
);
2601 if (gimple_debug_bind_has_value_p (stmt
))
2602 walk_tree (gimple_debug_bind_get_value_ptr (stmt
),
2603 remap_gimple_op_r
, &wi
, NULL
);
2605 /* Punt if any decl couldn't be remapped. */
2606 if (processing_debug_stmt
< 0)
2607 gimple_debug_bind_reset_value (stmt
);
2609 else if (gimple_debug_source_bind_p (stmt
))
2611 gimple_debug_source_bind_set_var (stmt
, t
);
2612 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt
),
2613 remap_gimple_op_r
, &wi
, NULL
);
2614 /* When inlining and source bind refers to one of the optimized
2615 away parameters, change the source bind into normal debug bind
2616 referring to the corresponding DEBUG_EXPR_DECL that should have
2617 been bound before the call stmt. */
2618 t
= gimple_debug_source_bind_get_value (stmt
);
2620 && TREE_CODE (t
) == PARM_DECL
2623 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (id
->src_fn
);
2625 if (debug_args
!= NULL
)
2627 for (i
= 0; i
< vec_safe_length (*debug_args
); i
+= 2)
2628 if ((**debug_args
)[i
] == DECL_ORIGIN (t
)
2629 && TREE_CODE ((**debug_args
)[i
+ 1]) == DEBUG_EXPR_DECL
)
2631 t
= (**debug_args
)[i
+ 1];
2632 stmt
->gsbase
.subcode
= GIMPLE_DEBUG_BIND
;
2633 gimple_debug_bind_set_value (stmt
, t
);
2640 processing_debug_stmt
= 0;
2645 /* Process deferred debug stmts. In order to give values better odds
2646 of being successfully remapped, we delay the processing of debug
2647 stmts until all other stmts that might require remapping are
2651 copy_debug_stmts (copy_body_data
*id
)
2656 if (!id
->debug_stmts
.exists ())
2659 FOR_EACH_VEC_ELT (id
->debug_stmts
, i
, stmt
)
2660 copy_debug_stmt (stmt
, id
);
2662 id
->debug_stmts
.release ();
2665 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2666 another function. */
2669 copy_tree_body (copy_body_data
*id
)
2671 tree fndecl
= id
->src_fn
;
2672 tree body
= DECL_SAVED_TREE (fndecl
);
2674 walk_tree (&body
, copy_tree_body_r
, id
, NULL
);
2679 /* Make a copy of the body of FN so that it can be inserted inline in
2680 another function. */
2683 copy_body (copy_body_data
*id
, gcov_type count
, int frequency_scale
,
2684 basic_block entry_block_map
, basic_block exit_block_map
,
2685 basic_block new_entry
)
2687 tree fndecl
= id
->src_fn
;
2690 /* If this body has a CFG, walk CFG and copy. */
2691 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl
)));
2692 body
= copy_cfg_body (id
, count
, frequency_scale
, entry_block_map
, exit_block_map
,
2694 copy_debug_stmts (id
);
2699 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2700 defined in function FN, or of a data member thereof. */
2703 self_inlining_addr_expr (tree value
, tree fn
)
2707 if (TREE_CODE (value
) != ADDR_EXPR
)
2710 var
= get_base_address (TREE_OPERAND (value
, 0));
2712 return var
&& auto_var_in_fn_p (var
, fn
);
2715 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2716 lexical block and line number information from base_stmt, if given,
2717 or from the last stmt of the block otherwise. */
2720 insert_init_debug_bind (copy_body_data
*id
,
2721 basic_block bb
, tree var
, tree value
,
2725 gimple_stmt_iterator gsi
;
2728 if (!gimple_in_ssa_p (id
->src_cfun
))
2731 if (!MAY_HAVE_DEBUG_STMTS
)
2734 tracked_var
= target_for_debug_bind (var
);
2740 gsi
= gsi_last_bb (bb
);
2741 if (!base_stmt
&& !gsi_end_p (gsi
))
2742 base_stmt
= gsi_stmt (gsi
);
2745 note
= gimple_build_debug_bind (tracked_var
, value
, base_stmt
);
2749 if (!gsi_end_p (gsi
))
2750 gsi_insert_after (&gsi
, note
, GSI_SAME_STMT
);
2752 gsi_insert_before (&gsi
, note
, GSI_SAME_STMT
);
2759 insert_init_stmt (copy_body_data
*id
, basic_block bb
, gimple init_stmt
)
2761 /* If VAR represents a zero-sized variable, it's possible that the
2762 assignment statement may result in no gimple statements. */
2765 gimple_stmt_iterator si
= gsi_last_bb (bb
);
2767 /* We can end up with init statements that store to a non-register
2768 from a rhs with a conversion. Handle that here by forcing the
2769 rhs into a temporary. gimple_regimplify_operands is not
2770 prepared to do this for us. */
2771 if (!is_gimple_debug (init_stmt
)
2772 && !is_gimple_reg (gimple_assign_lhs (init_stmt
))
2773 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt
)))
2774 && gimple_assign_rhs_class (init_stmt
) == GIMPLE_UNARY_RHS
)
2776 tree rhs
= build1 (gimple_assign_rhs_code (init_stmt
),
2777 gimple_expr_type (init_stmt
),
2778 gimple_assign_rhs1 (init_stmt
));
2779 rhs
= force_gimple_operand_gsi (&si
, rhs
, true, NULL_TREE
, false,
2781 gimple_assign_set_rhs_code (init_stmt
, TREE_CODE (rhs
));
2782 gimple_assign_set_rhs1 (init_stmt
, rhs
);
2784 gsi_insert_after (&si
, init_stmt
, GSI_NEW_STMT
);
2785 gimple_regimplify_operands (init_stmt
, &si
);
2787 if (!is_gimple_debug (init_stmt
) && MAY_HAVE_DEBUG_STMTS
)
2789 tree def
= gimple_assign_lhs (init_stmt
);
2790 insert_init_debug_bind (id
, bb
, def
, def
, init_stmt
);
2795 /* Initialize parameter P with VALUE. If needed, produce init statement
2796 at the end of BB. When BB is NULL, we return init statement to be
2799 setup_one_parameter (copy_body_data
*id
, tree p
, tree value
, tree fn
,
2800 basic_block bb
, tree
*vars
)
2802 gimple init_stmt
= NULL
;
2805 tree def
= (gimple_in_ssa_p (cfun
)
2806 ? ssa_default_def (id
->src_cfun
, p
) : NULL
);
2809 && value
!= error_mark_node
2810 && !useless_type_conversion_p (TREE_TYPE (p
), TREE_TYPE (value
)))
2812 /* If we can match up types by promotion/demotion do so. */
2813 if (fold_convertible_p (TREE_TYPE (p
), value
))
2814 rhs
= fold_convert (TREE_TYPE (p
), value
);
2817 /* ??? For valid programs we should not end up here.
2818 Still if we end up with truly mismatched types here, fall back
2819 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2820 GIMPLE to the following passes. */
2821 if (!is_gimple_reg_type (TREE_TYPE (value
))
2822 || TYPE_SIZE (TREE_TYPE (p
)) == TYPE_SIZE (TREE_TYPE (value
)))
2823 rhs
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (p
), value
);
2825 rhs
= build_zero_cst (TREE_TYPE (p
));
2829 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2830 here since the type of this decl must be visible to the calling
2832 var
= copy_decl_to_var (p
, id
);
2834 /* Declare this new variable. */
2835 DECL_CHAIN (var
) = *vars
;
2838 /* Make gimplifier happy about this variable. */
2839 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
2841 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2842 we would not need to create a new variable here at all, if it
2843 weren't for debug info. Still, we can just use the argument
2845 if (TREE_READONLY (p
)
2846 && !TREE_ADDRESSABLE (p
)
2847 && value
&& !TREE_SIDE_EFFECTS (value
)
2850 /* We may produce non-gimple trees by adding NOPs or introduce
2851 invalid sharing when operand is not really constant.
2852 It is not big deal to prohibit constant propagation here as
2853 we will constant propagate in DOM1 pass anyway. */
2854 if (is_gimple_min_invariant (value
)
2855 && useless_type_conversion_p (TREE_TYPE (p
),
2857 /* We have to be very careful about ADDR_EXPR. Make sure
2858 the base variable isn't a local variable of the inlined
2859 function, e.g., when doing recursive inlining, direct or
2860 mutually-recursive or whatever, which is why we don't
2861 just test whether fn == current_function_decl. */
2862 && ! self_inlining_addr_expr (value
, fn
))
2864 insert_decl_map (id
, p
, value
);
2865 insert_debug_decl_map (id
, p
, var
);
2866 return insert_init_debug_bind (id
, bb
, var
, value
, NULL
);
2870 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2871 that way, when the PARM_DECL is encountered, it will be
2872 automatically replaced by the VAR_DECL. */
2873 insert_decl_map (id
, p
, var
);
2875 /* Even if P was TREE_READONLY, the new VAR should not be.
2876 In the original code, we would have constructed a
2877 temporary, and then the function body would have never
2878 changed the value of P. However, now, we will be
2879 constructing VAR directly. The constructor body may
2880 change its value multiple times as it is being
2881 constructed. Therefore, it must not be TREE_READONLY;
2882 the back-end assumes that TREE_READONLY variable is
2883 assigned to only once. */
2884 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p
)))
2885 TREE_READONLY (var
) = 0;
2887 /* If there is no setup required and we are in SSA, take the easy route
2888 replacing all SSA names representing the function parameter by the
2889 SSA name passed to function.
2891 We need to construct map for the variable anyway as it might be used
2892 in different SSA names when parameter is set in function.
2894 Do replacement at -O0 for const arguments replaced by constant.
2895 This is important for builtin_constant_p and other construct requiring
2896 constant argument to be visible in inlined function body. */
2897 if (gimple_in_ssa_p (cfun
) && rhs
&& def
&& is_gimple_reg (p
)
2899 || (TREE_READONLY (p
)
2900 && is_gimple_min_invariant (rhs
)))
2901 && (TREE_CODE (rhs
) == SSA_NAME
2902 || is_gimple_min_invariant (rhs
))
2903 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
2905 insert_decl_map (id
, def
, rhs
);
2906 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
2909 /* If the value of argument is never used, don't care about initializing
2911 if (optimize
&& gimple_in_ssa_p (cfun
) && !def
&& is_gimple_reg (p
))
2913 gcc_assert (!value
|| !TREE_SIDE_EFFECTS (value
));
2914 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
2917 /* Initialize this VAR_DECL from the equivalent argument. Convert
2918 the argument to the proper type in case it was promoted. */
2921 if (rhs
== error_mark_node
)
2923 insert_decl_map (id
, p
, var
);
2924 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
2927 STRIP_USELESS_TYPE_CONVERSION (rhs
);
2929 /* If we are in SSA form properly remap the default definition
2930 or assign to a dummy SSA name if the parameter is unused and
2931 we are not optimizing. */
2932 if (gimple_in_ssa_p (cfun
) && is_gimple_reg (p
))
2936 def
= remap_ssa_name (def
, id
);
2937 init_stmt
= gimple_build_assign (def
, rhs
);
2938 SSA_NAME_IS_DEFAULT_DEF (def
) = 0;
2939 set_ssa_default_def (cfun
, var
, NULL
);
2943 def
= make_ssa_name (var
, NULL
);
2944 init_stmt
= gimple_build_assign (def
, rhs
);
2948 init_stmt
= gimple_build_assign (var
, rhs
);
2950 if (bb
&& init_stmt
)
2951 insert_init_stmt (id
, bb
, init_stmt
);
2956 /* Generate code to initialize the parameters of the function at the
2957 top of the stack in ID from the GIMPLE_CALL STMT. */
2960 initialize_inlined_parameters (copy_body_data
*id
, gimple stmt
,
2961 tree fn
, basic_block bb
)
2966 tree vars
= NULL_TREE
;
2967 tree static_chain
= gimple_call_chain (stmt
);
2969 /* Figure out what the parameters are. */
2970 parms
= DECL_ARGUMENTS (fn
);
2972 /* Loop through the parameter declarations, replacing each with an
2973 equivalent VAR_DECL, appropriately initialized. */
2974 for (p
= parms
, i
= 0; p
; p
= DECL_CHAIN (p
), i
++)
2977 val
= i
< gimple_call_num_args (stmt
) ? gimple_call_arg (stmt
, i
) : NULL
;
2978 setup_one_parameter (id
, p
, val
, fn
, bb
, &vars
);
2980 /* After remapping parameters remap their types. This has to be done
2981 in a second loop over all parameters to appropriately remap
2982 variable sized arrays when the size is specified in a
2983 parameter following the array. */
2984 for (p
= parms
, i
= 0; p
; p
= DECL_CHAIN (p
), i
++)
2986 tree
*varp
= (tree
*) pointer_map_contains (id
->decl_map
, p
);
2988 && TREE_CODE (*varp
) == VAR_DECL
)
2990 tree def
= (gimple_in_ssa_p (cfun
) && is_gimple_reg (p
)
2991 ? ssa_default_def (id
->src_cfun
, p
) : NULL
);
2993 TREE_TYPE (var
) = remap_type (TREE_TYPE (var
), id
);
2994 /* Also remap the default definition if it was remapped
2995 to the default definition of the parameter replacement
2996 by the parameter setup. */
2999 tree
*defp
= (tree
*) pointer_map_contains (id
->decl_map
, def
);
3001 && TREE_CODE (*defp
) == SSA_NAME
3002 && SSA_NAME_VAR (*defp
) == var
)
3003 TREE_TYPE (*defp
) = TREE_TYPE (var
);
3008 /* Initialize the static chain. */
3009 p
= DECL_STRUCT_FUNCTION (fn
)->static_chain_decl
;
3010 gcc_assert (fn
!= current_function_decl
);
3013 /* No static chain? Seems like a bug in tree-nested.c. */
3014 gcc_assert (static_chain
);
3016 setup_one_parameter (id
, p
, static_chain
, fn
, bb
, &vars
);
3019 declare_inline_vars (id
->block
, vars
);
3023 /* Declare a return variable to replace the RESULT_DECL for the
3024 function we are calling. An appropriate DECL_STMT is returned.
3025 The USE_STMT is filled to contain a use of the declaration to
3026 indicate the return value of the function.
3028 RETURN_SLOT, if non-null is place where to store the result. It
3029 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3030 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3032 The return value is a (possibly null) value that holds the result
3033 as seen by the caller. */
3036 declare_return_variable (copy_body_data
*id
, tree return_slot
, tree modify_dest
,
3037 basic_block entry_bb
)
3039 tree callee
= id
->src_fn
;
3040 tree result
= DECL_RESULT (callee
);
3041 tree callee_type
= TREE_TYPE (result
);
3045 /* Handle type-mismatches in the function declaration return type
3046 vs. the call expression. */
3048 caller_type
= TREE_TYPE (modify_dest
);
3050 caller_type
= TREE_TYPE (TREE_TYPE (callee
));
3052 /* We don't need to do anything for functions that don't return anything. */
3053 if (VOID_TYPE_P (callee_type
))
3056 /* If there was a return slot, then the return value is the
3057 dereferenced address of that object. */
3060 /* The front end shouldn't have used both return_slot and
3061 a modify expression. */
3062 gcc_assert (!modify_dest
);
3063 if (DECL_BY_REFERENCE (result
))
3065 tree return_slot_addr
= build_fold_addr_expr (return_slot
);
3066 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr
);
3068 /* We are going to construct *&return_slot and we can't do that
3069 for variables believed to be not addressable.
3071 FIXME: This check possibly can match, because values returned
3072 via return slot optimization are not believed to have address
3073 taken by alias analysis. */
3074 gcc_assert (TREE_CODE (return_slot
) != SSA_NAME
);
3075 var
= return_slot_addr
;
3080 gcc_assert (TREE_CODE (var
) != SSA_NAME
);
3081 TREE_ADDRESSABLE (var
) |= TREE_ADDRESSABLE (result
);
3083 if ((TREE_CODE (TREE_TYPE (result
)) == COMPLEX_TYPE
3084 || TREE_CODE (TREE_TYPE (result
)) == VECTOR_TYPE
)
3085 && !DECL_GIMPLE_REG_P (result
)
3087 DECL_GIMPLE_REG_P (var
) = 0;
3092 /* All types requiring non-trivial constructors should have been handled. */
3093 gcc_assert (!TREE_ADDRESSABLE (callee_type
));
3095 /* Attempt to avoid creating a new temporary variable. */
3097 && TREE_CODE (modify_dest
) != SSA_NAME
)
3099 bool use_it
= false;
3101 /* We can't use MODIFY_DEST if there's type promotion involved. */
3102 if (!useless_type_conversion_p (callee_type
, caller_type
))
3105 /* ??? If we're assigning to a variable sized type, then we must
3106 reuse the destination variable, because we've no good way to
3107 create variable sized temporaries at this point. */
3108 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type
)) != INTEGER_CST
)
3111 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3112 reuse it as the result of the call directly. Don't do this if
3113 it would promote MODIFY_DEST to addressable. */
3114 else if (TREE_ADDRESSABLE (result
))
3118 tree base_m
= get_base_address (modify_dest
);
3120 /* If the base isn't a decl, then it's a pointer, and we don't
3121 know where that's going to go. */
3122 if (!DECL_P (base_m
))
3124 else if (is_global_var (base_m
))
3126 else if ((TREE_CODE (TREE_TYPE (result
)) == COMPLEX_TYPE
3127 || TREE_CODE (TREE_TYPE (result
)) == VECTOR_TYPE
)
3128 && !DECL_GIMPLE_REG_P (result
)
3129 && DECL_GIMPLE_REG_P (base_m
))
3131 else if (!TREE_ADDRESSABLE (base_m
))
3143 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type
)) == INTEGER_CST
);
3145 var
= copy_result_decl_to_var (result
, id
);
3146 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
3148 /* Do not have the rest of GCC warn about this variable as it should
3149 not be visible to the user. */
3150 TREE_NO_WARNING (var
) = 1;
3152 declare_inline_vars (id
->block
, var
);
3154 /* Build the use expr. If the return type of the function was
3155 promoted, convert it back to the expected type. */
3157 if (!useless_type_conversion_p (caller_type
, TREE_TYPE (var
)))
3159 /* If we can match up types by promotion/demotion do so. */
3160 if (fold_convertible_p (caller_type
, var
))
3161 use
= fold_convert (caller_type
, var
);
3164 /* ??? For valid programs we should not end up here.
3165 Still if we end up with truly mismatched types here, fall back
3166 to using a MEM_REF to not leak invalid GIMPLE to the following
3168 /* Prevent var from being written into SSA form. */
3169 if (TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
3170 || TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
)
3171 DECL_GIMPLE_REG_P (var
) = false;
3172 else if (is_gimple_reg_type (TREE_TYPE (var
)))
3173 TREE_ADDRESSABLE (var
) = true;
3174 use
= fold_build2 (MEM_REF
, caller_type
,
3175 build_fold_addr_expr (var
),
3176 build_int_cst (ptr_type_node
, 0));
3180 STRIP_USELESS_TYPE_CONVERSION (use
);
3182 if (DECL_BY_REFERENCE (result
))
3184 TREE_ADDRESSABLE (var
) = 1;
3185 var
= build_fold_addr_expr (var
);
3189 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3190 way, when the RESULT_DECL is encountered, it will be
3191 automatically replaced by the VAR_DECL.
3193 When returning by reference, ensure that RESULT_DECL remaps to
3195 if (DECL_BY_REFERENCE (result
)
3196 && !is_gimple_val (var
))
3198 tree temp
= create_tmp_var (TREE_TYPE (result
), "retvalptr");
3199 insert_decl_map (id
, result
, temp
);
3200 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3201 it's default_def SSA_NAME. */
3202 if (gimple_in_ssa_p (id
->src_cfun
)
3203 && is_gimple_reg (result
))
3205 temp
= make_ssa_name (temp
, NULL
);
3206 insert_decl_map (id
, ssa_default_def (id
->src_cfun
, result
), temp
);
3208 insert_init_stmt (id
, entry_bb
, gimple_build_assign (temp
, var
));
3211 insert_decl_map (id
, result
, var
);
3213 /* Remember this so we can ignore it in remap_decls. */
3219 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3220 to a local label. */
3223 has_label_address_in_static_1 (tree
*nodep
, int *walk_subtrees
, void *fnp
)
3226 tree fn
= (tree
) fnp
;
3228 if (TREE_CODE (node
) == LABEL_DECL
&& DECL_CONTEXT (node
) == fn
)
3237 /* Determine if the function can be copied. If so return NULL. If
3238 not return a string describng the reason for failure. */
3241 copy_forbidden (struct function
*fun
, tree fndecl
)
3243 const char *reason
= fun
->cannot_be_copied_reason
;
3247 /* Only examine the function once. */
3248 if (fun
->cannot_be_copied_set
)
3251 /* We cannot copy a function that receives a non-local goto
3252 because we cannot remap the destination label used in the
3253 function that is performing the non-local goto. */
3254 /* ??? Actually, this should be possible, if we work at it.
3255 No doubt there's just a handful of places that simply
3256 assume it doesn't happen and don't substitute properly. */
3257 if (fun
->has_nonlocal_label
)
3259 reason
= G_("function %q+F can never be copied "
3260 "because it receives a non-local goto");
3264 FOR_EACH_LOCAL_DECL (fun
, ix
, decl
)
3265 if (TREE_CODE (decl
) == VAR_DECL
3266 && TREE_STATIC (decl
)
3267 && !DECL_EXTERNAL (decl
)
3268 && DECL_INITIAL (decl
)
3269 && walk_tree_without_duplicates (&DECL_INITIAL (decl
),
3270 has_label_address_in_static_1
,
3273 reason
= G_("function %q+F can never be copied because it saves "
3274 "address of local label in a static variable");
3279 fun
->cannot_be_copied_reason
= reason
;
3280 fun
->cannot_be_copied_set
= true;
3285 static const char *inline_forbidden_reason
;
3287 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3288 iff a function can not be inlined. Also sets the reason why. */
3291 inline_forbidden_p_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3292 struct walk_stmt_info
*wip
)
3294 tree fn
= (tree
) wip
->info
;
3296 gimple stmt
= gsi_stmt (*gsi
);
3298 switch (gimple_code (stmt
))
3301 /* Refuse to inline alloca call unless user explicitly forced so as
3302 this may change program's memory overhead drastically when the
3303 function using alloca is called in loop. In GCC present in
3304 SPEC2000 inlining into schedule_block cause it to require 2GB of
3305 RAM instead of 256MB. Don't do so for alloca calls emitted for
3306 VLA objects as those can't cause unbounded growth (they're always
3307 wrapped inside stack_save/stack_restore regions. */
3308 if (gimple_alloca_call_p (stmt
)
3309 && !gimple_call_alloca_for_var_p (stmt
)
3310 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
)))
3312 inline_forbidden_reason
3313 = G_("function %q+F can never be inlined because it uses "
3314 "alloca (override using the always_inline attribute)");
3315 *handled_ops_p
= true;
3319 t
= gimple_call_fndecl (stmt
);
3323 /* We cannot inline functions that call setjmp. */
3324 if (setjmp_call_p (t
))
3326 inline_forbidden_reason
3327 = G_("function %q+F can never be inlined because it uses setjmp");
3328 *handled_ops_p
= true;
3332 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
)
3333 switch (DECL_FUNCTION_CODE (t
))
3335 /* We cannot inline functions that take a variable number of
3337 case BUILT_IN_VA_START
:
3338 case BUILT_IN_NEXT_ARG
:
3339 case BUILT_IN_VA_END
:
3340 inline_forbidden_reason
3341 = G_("function %q+F can never be inlined because it "
3342 "uses variable argument lists");
3343 *handled_ops_p
= true;
3346 case BUILT_IN_LONGJMP
:
3347 /* We can't inline functions that call __builtin_longjmp at
3348 all. The non-local goto machinery really requires the
3349 destination be in a different function. If we allow the
3350 function calling __builtin_longjmp to be inlined into the
3351 function calling __builtin_setjmp, Things will Go Awry. */
3352 inline_forbidden_reason
3353 = G_("function %q+F can never be inlined because "
3354 "it uses setjmp-longjmp exception handling");
3355 *handled_ops_p
= true;
3358 case BUILT_IN_NONLOCAL_GOTO
:
3360 inline_forbidden_reason
3361 = G_("function %q+F can never be inlined because "
3362 "it uses non-local goto");
3363 *handled_ops_p
= true;
3366 case BUILT_IN_RETURN
:
3367 case BUILT_IN_APPLY_ARGS
:
3368 /* If a __builtin_apply_args caller would be inlined,
3369 it would be saving arguments of the function it has
3370 been inlined into. Similarly __builtin_return would
3371 return from the function the inline has been inlined into. */
3372 inline_forbidden_reason
3373 = G_("function %q+F can never be inlined because "
3374 "it uses __builtin_return or __builtin_apply_args");
3375 *handled_ops_p
= true;
3384 t
= gimple_goto_dest (stmt
);
3386 /* We will not inline a function which uses computed goto. The
3387 addresses of its local labels, which may be tucked into
3388 global storage, are of course not constant across
3389 instantiations, which causes unexpected behavior. */
3390 if (TREE_CODE (t
) != LABEL_DECL
)
3392 inline_forbidden_reason
3393 = G_("function %q+F can never be inlined "
3394 "because it contains a computed goto");
3395 *handled_ops_p
= true;
3404 *handled_ops_p
= false;
3408 /* Return true if FNDECL is a function that cannot be inlined into
3412 inline_forbidden_p (tree fndecl
)
3414 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
3415 struct walk_stmt_info wi
;
3416 struct pointer_set_t
*visited_nodes
;
3418 bool forbidden_p
= false;
3420 /* First check for shared reasons not to copy the code. */
3421 inline_forbidden_reason
= copy_forbidden (fun
, fndecl
);
3422 if (inline_forbidden_reason
!= NULL
)
3425 /* Next, walk the statements of the function looking for
3426 constraucts we can't handle, or are non-optimal for inlining. */
3427 visited_nodes
= pointer_set_create ();
3428 memset (&wi
, 0, sizeof (wi
));
3429 wi
.info
= (void *) fndecl
;
3430 wi
.pset
= visited_nodes
;
3432 FOR_EACH_BB_FN (bb
, fun
)
3435 gimple_seq seq
= bb_seq (bb
);
3436 ret
= walk_gimple_seq (seq
, inline_forbidden_p_stmt
, NULL
, &wi
);
3437 forbidden_p
= (ret
!= NULL
);
3442 pointer_set_destroy (visited_nodes
);
3446 /* Return false if the function FNDECL cannot be inlined on account of its
3447 attributes, true otherwise. */
3449 function_attribute_inlinable_p (const_tree fndecl
)
3451 if (targetm
.attribute_table
)
3455 for (a
= DECL_ATTRIBUTES (fndecl
); a
; a
= TREE_CHAIN (a
))
3457 const_tree name
= TREE_PURPOSE (a
);
3460 for (i
= 0; targetm
.attribute_table
[i
].name
!= NULL
; i
++)
3461 if (is_attribute_p (targetm
.attribute_table
[i
].name
, name
))
3462 return targetm
.function_attribute_inlinable_p (fndecl
);
3469 /* Returns nonzero if FN is a function that does not have any
3470 fundamental inline blocking properties. */
3473 tree_inlinable_function_p (tree fn
)
3475 bool inlinable
= true;
3479 /* If we've already decided this function shouldn't be inlined,
3480 there's no need to check again. */
3481 if (DECL_UNINLINABLE (fn
))
3484 /* We only warn for functions declared `inline' by the user. */
3485 do_warning
= (warn_inline
3486 && DECL_DECLARED_INLINE_P (fn
)
3487 && !DECL_NO_INLINE_WARNING_P (fn
)
3488 && !DECL_IN_SYSTEM_HEADER (fn
));
3490 always_inline
= lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
));
3493 && always_inline
== NULL
)
3496 warning (OPT_Winline
, "function %q+F can never be inlined because it "
3497 "is suppressed using -fno-inline", fn
);
3501 else if (!function_attribute_inlinable_p (fn
))
3504 warning (OPT_Winline
, "function %q+F can never be inlined because it "
3505 "uses attributes conflicting with inlining", fn
);
3509 else if (inline_forbidden_p (fn
))
3511 /* See if we should warn about uninlinable functions. Previously,
3512 some of these warnings would be issued while trying to expand
3513 the function inline, but that would cause multiple warnings
3514 about functions that would for example call alloca. But since
3515 this a property of the function, just one warning is enough.
3516 As a bonus we can now give more details about the reason why a
3517 function is not inlinable. */
3519 error (inline_forbidden_reason
, fn
);
3520 else if (do_warning
)
3521 warning (OPT_Winline
, inline_forbidden_reason
, fn
);
3526 /* Squirrel away the result so that we don't have to check again. */
3527 DECL_UNINLINABLE (fn
) = !inlinable
;
3532 /* Estimate the cost of a memory move. Use machine dependent
3533 word size and take possible memcpy call into account. */
3536 estimate_move_cost (tree type
)
3540 gcc_assert (!VOID_TYPE_P (type
));
3542 if (TREE_CODE (type
) == VECTOR_TYPE
)
3544 enum machine_mode inner
= TYPE_MODE (TREE_TYPE (type
));
3545 enum machine_mode simd
3546 = targetm
.vectorize
.preferred_simd_mode (inner
);
3547 int simd_mode_size
= GET_MODE_SIZE (simd
);
3548 return ((GET_MODE_SIZE (TYPE_MODE (type
)) + simd_mode_size
- 1)
3552 size
= int_size_in_bytes (type
);
3554 if (size
< 0 || size
> MOVE_MAX_PIECES
* MOVE_RATIO (!optimize_size
))
3555 /* Cost of a memcpy call, 3 arguments and the call. */
3558 return ((size
+ MOVE_MAX_PIECES
- 1) / MOVE_MAX_PIECES
);
3561 /* Returns cost of operation CODE, according to WEIGHTS */
3564 estimate_operator_cost (enum tree_code code
, eni_weights
*weights
,
3565 tree op1 ATTRIBUTE_UNUSED
, tree op2
)
3569 /* These are "free" conversions, or their presumed cost
3570 is folded into other operations. */
3575 case VIEW_CONVERT_EXPR
:
3578 /* Assign cost of 1 to usual operations.
3579 ??? We may consider mapping RTL costs to this. */
3585 case POINTER_PLUS_EXPR
:
3588 case MULT_HIGHPART_EXPR
:
3591 case ADDR_SPACE_CONVERT_EXPR
:
3592 case FIXED_CONVERT_EXPR
:
3593 case FIX_TRUNC_EXPR
:
3605 case VEC_LSHIFT_EXPR
:
3606 case VEC_RSHIFT_EXPR
:
3613 case TRUTH_ANDIF_EXPR
:
3614 case TRUTH_ORIF_EXPR
:
3615 case TRUTH_AND_EXPR
:
3617 case TRUTH_XOR_EXPR
:
3618 case TRUTH_NOT_EXPR
:
3627 case UNORDERED_EXPR
:
3638 case PREDECREMENT_EXPR
:
3639 case PREINCREMENT_EXPR
:
3640 case POSTDECREMENT_EXPR
:
3641 case POSTINCREMENT_EXPR
:
3643 case REALIGN_LOAD_EXPR
:
3645 case REDUC_MAX_EXPR
:
3646 case REDUC_MIN_EXPR
:
3647 case REDUC_PLUS_EXPR
:
3648 case WIDEN_SUM_EXPR
:
3649 case WIDEN_MULT_EXPR
:
3651 case WIDEN_MULT_PLUS_EXPR
:
3652 case WIDEN_MULT_MINUS_EXPR
:
3653 case WIDEN_LSHIFT_EXPR
:
3655 case VEC_WIDEN_MULT_HI_EXPR
:
3656 case VEC_WIDEN_MULT_LO_EXPR
:
3657 case VEC_WIDEN_MULT_EVEN_EXPR
:
3658 case VEC_WIDEN_MULT_ODD_EXPR
:
3659 case VEC_UNPACK_HI_EXPR
:
3660 case VEC_UNPACK_LO_EXPR
:
3661 case VEC_UNPACK_FLOAT_HI_EXPR
:
3662 case VEC_UNPACK_FLOAT_LO_EXPR
:
3663 case VEC_PACK_TRUNC_EXPR
:
3664 case VEC_PACK_SAT_EXPR
:
3665 case VEC_PACK_FIX_TRUNC_EXPR
:
3666 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3667 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3671 /* Few special cases of expensive operations. This is useful
3672 to avoid inlining on functions having too many of these. */
3673 case TRUNC_DIV_EXPR
:
3675 case FLOOR_DIV_EXPR
:
3676 case ROUND_DIV_EXPR
:
3677 case EXACT_DIV_EXPR
:
3678 case TRUNC_MOD_EXPR
:
3680 case FLOOR_MOD_EXPR
:
3681 case ROUND_MOD_EXPR
:
3683 if (TREE_CODE (op2
) != INTEGER_CST
)
3684 return weights
->div_mod_cost
;
3688 /* We expect a copy assignment with no operator. */
3689 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
);
3695 /* Estimate number of instructions that will be created by expanding
3696 the statements in the statement sequence STMTS.
3697 WEIGHTS contains weights attributed to various constructs. */
3700 int estimate_num_insns_seq (gimple_seq stmts
, eni_weights
*weights
)
3703 gimple_stmt_iterator gsi
;
3706 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3707 cost
+= estimate_num_insns (gsi_stmt (gsi
), weights
);
3713 /* Estimate number of instructions that will be created by expanding STMT.
3714 WEIGHTS contains weights attributed to various constructs. */
3717 estimate_num_insns (gimple stmt
, eni_weights
*weights
)
3720 enum gimple_code code
= gimple_code (stmt
);
3727 /* Try to estimate the cost of assignments. We have three cases to
3729 1) Simple assignments to registers;
3730 2) Stores to things that must live in memory. This includes
3731 "normal" stores to scalars, but also assignments of large
3732 structures, or constructors of big arrays;
3734 Let us look at the first two cases, assuming we have "a = b + C":
3735 <GIMPLE_ASSIGN <var_decl "a">
3736 <plus_expr <var_decl "b"> <constant C>>
3737 If "a" is a GIMPLE register, the assignment to it is free on almost
3738 any target, because "a" usually ends up in a real register. Hence
3739 the only cost of this expression comes from the PLUS_EXPR, and we
3740 can ignore the GIMPLE_ASSIGN.
3741 If "a" is not a GIMPLE register, the assignment to "a" will most
3742 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3743 of moving something into "a", which we compute using the function
3744 estimate_move_cost. */
3745 if (gimple_clobber_p (stmt
))
3746 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3748 lhs
= gimple_assign_lhs (stmt
);
3749 rhs
= gimple_assign_rhs1 (stmt
);
3753 /* Account for the cost of moving to / from memory. */
3754 if (gimple_store_p (stmt
))
3755 cost
+= estimate_move_cost (TREE_TYPE (lhs
));
3756 if (gimple_assign_load_p (stmt
))
3757 cost
+= estimate_move_cost (TREE_TYPE (rhs
));
3759 cost
+= estimate_operator_cost (gimple_assign_rhs_code (stmt
), weights
,
3760 gimple_assign_rhs1 (stmt
),
3761 get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
3762 == GIMPLE_BINARY_RHS
3763 ? gimple_assign_rhs2 (stmt
) : NULL
);
3767 cost
= 1 + estimate_operator_cost (gimple_cond_code (stmt
), weights
,
3768 gimple_op (stmt
, 0),
3769 gimple_op (stmt
, 1));
3773 /* Take into account cost of the switch + guess 2 conditional jumps for
3776 TODO: once the switch expansion logic is sufficiently separated, we can
3777 do better job on estimating cost of the switch. */
3778 if (weights
->time_based
)
3779 cost
= floor_log2 (gimple_switch_num_labels (stmt
)) * 2;
3781 cost
= gimple_switch_num_labels (stmt
) * 2;
3786 tree decl
= gimple_call_fndecl (stmt
);
3787 struct cgraph_node
*node
= NULL
;
3789 /* Do not special case builtins where we see the body.
3790 This just confuse inliner. */
3791 if (!decl
|| !(node
= cgraph_get_node (decl
)) || node
->definition
)
3793 /* For buitins that are likely expanded to nothing or
3794 inlined do not account operand costs. */
3795 else if (is_simple_builtin (decl
))
3797 else if (is_inexpensive_builtin (decl
))
3798 return weights
->target_builtin_call_cost
;
3799 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
3801 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3802 specialize the cheap expansion we do here.
3803 ??? This asks for a more general solution. */
3804 switch (DECL_FUNCTION_CODE (decl
))
3809 if (TREE_CODE (gimple_call_arg (stmt
, 1)) == REAL_CST
3810 && REAL_VALUES_EQUAL
3811 (TREE_REAL_CST (gimple_call_arg (stmt
, 1)), dconst2
))
3812 return estimate_operator_cost (MULT_EXPR
, weights
,
3813 gimple_call_arg (stmt
, 0),
3814 gimple_call_arg (stmt
, 0));
3822 cost
= node
? weights
->call_cost
: weights
->indirect_call_cost
;
3823 if (gimple_call_lhs (stmt
))
3824 cost
+= estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt
)));
3825 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
3827 tree arg
= gimple_call_arg (stmt
, i
);
3828 cost
+= estimate_move_cost (TREE_TYPE (arg
));
3834 return weights
->return_cost
;
3840 case GIMPLE_PREDICT
:
3846 int count
= asm_str_count (gimple_asm_string (stmt
));
3847 /* 1000 means infinity. This avoids overflows later
3848 with very long asm statements. */
3855 /* This is either going to be an external function call with one
3856 argument, or two register copy statements plus a goto. */
3859 case GIMPLE_EH_DISPATCH
:
3860 /* ??? This is going to turn into a switch statement. Ideally
3861 we'd have a look at the eh region and estimate the number of
3866 return estimate_num_insns_seq (gimple_bind_body (stmt
), weights
);
3868 case GIMPLE_EH_FILTER
:
3869 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt
), weights
);
3872 return estimate_num_insns_seq (gimple_catch_handler (stmt
), weights
);
3875 return (estimate_num_insns_seq (gimple_try_eval (stmt
), weights
)
3876 + estimate_num_insns_seq (gimple_try_cleanup (stmt
), weights
));
3878 /* OpenMP directives are generally very expensive. */
3880 case GIMPLE_OMP_RETURN
:
3881 case GIMPLE_OMP_SECTIONS_SWITCH
:
3882 case GIMPLE_OMP_ATOMIC_STORE
:
3883 case GIMPLE_OMP_CONTINUE
:
3884 /* ...except these, which are cheap. */
3887 case GIMPLE_OMP_ATOMIC_LOAD
:
3888 return weights
->omp_cost
;
3890 case GIMPLE_OMP_FOR
:
3891 return (weights
->omp_cost
3892 + estimate_num_insns_seq (gimple_omp_body (stmt
), weights
)
3893 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt
), weights
));
3895 case GIMPLE_OMP_PARALLEL
:
3896 case GIMPLE_OMP_TASK
:
3897 case GIMPLE_OMP_CRITICAL
:
3898 case GIMPLE_OMP_MASTER
:
3899 case GIMPLE_OMP_TASKGROUP
:
3900 case GIMPLE_OMP_ORDERED
:
3901 case GIMPLE_OMP_SECTION
:
3902 case GIMPLE_OMP_SECTIONS
:
3903 case GIMPLE_OMP_SINGLE
:
3904 case GIMPLE_OMP_TARGET
:
3905 case GIMPLE_OMP_TEAMS
:
3906 return (weights
->omp_cost
3907 + estimate_num_insns_seq (gimple_omp_body (stmt
), weights
));
3909 case GIMPLE_TRANSACTION
:
3910 return (weights
->tm_cost
3911 + estimate_num_insns_seq (gimple_transaction_body (stmt
),
3921 /* Estimate number of instructions that will be created by expanding
3922 function FNDECL. WEIGHTS contains weights attributed to various
3926 estimate_num_insns_fn (tree fndecl
, eni_weights
*weights
)
3928 struct function
*my_function
= DECL_STRUCT_FUNCTION (fndecl
);
3929 gimple_stmt_iterator bsi
;
3933 gcc_assert (my_function
&& my_function
->cfg
);
3934 FOR_EACH_BB_FN (bb
, my_function
)
3936 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
3937 n
+= estimate_num_insns (gsi_stmt (bsi
), weights
);
3944 /* Initializes weights used by estimate_num_insns. */
3947 init_inline_once (void)
3949 eni_size_weights
.call_cost
= 1;
3950 eni_size_weights
.indirect_call_cost
= 3;
3951 eni_size_weights
.target_builtin_call_cost
= 1;
3952 eni_size_weights
.div_mod_cost
= 1;
3953 eni_size_weights
.omp_cost
= 40;
3954 eni_size_weights
.tm_cost
= 10;
3955 eni_size_weights
.time_based
= false;
3956 eni_size_weights
.return_cost
= 1;
3958 /* Estimating time for call is difficult, since we have no idea what the
3959 called function does. In the current uses of eni_time_weights,
3960 underestimating the cost does less harm than overestimating it, so
3961 we choose a rather small value here. */
3962 eni_time_weights
.call_cost
= 10;
3963 eni_time_weights
.indirect_call_cost
= 15;
3964 eni_time_weights
.target_builtin_call_cost
= 1;
3965 eni_time_weights
.div_mod_cost
= 10;
3966 eni_time_weights
.omp_cost
= 40;
3967 eni_time_weights
.tm_cost
= 40;
3968 eni_time_weights
.time_based
= true;
3969 eni_time_weights
.return_cost
= 2;
3972 /* Estimate the number of instructions in a gimple_seq. */
3975 count_insns_seq (gimple_seq seq
, eni_weights
*weights
)
3977 gimple_stmt_iterator gsi
;
3979 for (gsi
= gsi_start (seq
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3980 n
+= estimate_num_insns (gsi_stmt (gsi
), weights
);
3986 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3989 prepend_lexical_block (tree current_block
, tree new_block
)
3991 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (current_block
);
3992 BLOCK_SUBBLOCKS (current_block
) = new_block
;
3993 BLOCK_SUPERCONTEXT (new_block
) = current_block
;
3996 /* Add local variables from CALLEE to CALLER. */
3999 add_local_variables (struct function
*callee
, struct function
*caller
,
4005 FOR_EACH_LOCAL_DECL (callee
, ix
, var
)
4006 if (!can_be_nonlocal (var
, id
))
4008 tree new_var
= remap_decl (var
, id
);
4010 /* Remap debug-expressions. */
4011 if (TREE_CODE (new_var
) == VAR_DECL
4012 && DECL_HAS_DEBUG_EXPR_P (var
)
4015 tree tem
= DECL_DEBUG_EXPR (var
);
4016 bool old_regimplify
= id
->regimplify
;
4017 id
->remapping_type_depth
++;
4018 walk_tree (&tem
, copy_tree_body_r
, id
, NULL
);
4019 id
->remapping_type_depth
--;
4020 id
->regimplify
= old_regimplify
;
4021 SET_DECL_DEBUG_EXPR (new_var
, tem
);
4022 DECL_HAS_DEBUG_EXPR_P (new_var
) = 1;
4024 add_local_decl (caller
, new_var
);
4028 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4031 expand_call_inline (basic_block bb
, gimple stmt
, copy_body_data
*id
)
4035 struct pointer_map_t
*st
, *dst
;
4038 location_t saved_location
;
4039 struct cgraph_edge
*cg_edge
;
4040 cgraph_inline_failed_t reason
;
4041 basic_block return_block
;
4043 gimple_stmt_iterator gsi
, stmt_gsi
;
4044 bool successfully_inlined
= FALSE
;
4045 bool purge_dead_abnormal_edges
;
4047 /* Set input_location here so we get the right instantiation context
4048 if we call instantiate_decl from inlinable_function_p. */
4049 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4050 saved_location
= input_location
;
4051 input_location
= gimple_location (stmt
);
4053 /* From here on, we're only interested in CALL_EXPRs. */
4054 if (gimple_code (stmt
) != GIMPLE_CALL
)
4057 cg_edge
= cgraph_edge (id
->dst_node
, stmt
);
4058 gcc_checking_assert (cg_edge
);
4059 /* First, see if we can figure out what function is being called.
4060 If we cannot, then there is no hope of inlining the function. */
4061 if (cg_edge
->indirect_unknown_callee
)
4063 fn
= cg_edge
->callee
->decl
;
4064 gcc_checking_assert (fn
);
4066 /* If FN is a declaration of a function in a nested scope that was
4067 globally declared inline, we don't set its DECL_INITIAL.
4068 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4069 C++ front-end uses it for cdtors to refer to their internal
4070 declarations, that are not real functions. Fortunately those
4071 don't have trees to be saved, so we can tell by checking their
4073 if (!DECL_INITIAL (fn
)
4074 && DECL_ABSTRACT_ORIGIN (fn
)
4075 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn
)))
4076 fn
= DECL_ABSTRACT_ORIGIN (fn
);
4078 /* Don't try to inline functions that are not well-suited to inlining. */
4079 if (cg_edge
->inline_failed
)
4081 reason
= cg_edge
->inline_failed
;
4082 /* If this call was originally indirect, we do not want to emit any
4083 inlining related warnings or sorry messages because there are no
4084 guarantees regarding those. */
4085 if (cg_edge
->indirect_inlining_edge
)
4088 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
))
4089 /* For extern inline functions that get redefined we always
4090 silently ignored always_inline flag. Better behaviour would
4091 be to be able to keep both bodies and use extern inline body
4092 for inlining, but we can't do that because frontends overwrite
4094 && !cg_edge
->callee
->local
.redefined_extern_inline
4095 /* During early inline pass, report only when optimization is
4097 && (cgraph_global_info_ready
4099 /* PR 20090218-1_0.c. Body can be provided by another module. */
4100 && (reason
!= CIF_BODY_NOT_AVAILABLE
|| !flag_generate_lto
))
4102 error ("inlining failed in call to always_inline %q+F: %s", fn
,
4103 cgraph_inline_failed_string (reason
));
4104 error ("called from here");
4106 else if (warn_inline
4107 && DECL_DECLARED_INLINE_P (fn
)
4108 && !DECL_NO_INLINE_WARNING_P (fn
)
4109 && !DECL_IN_SYSTEM_HEADER (fn
)
4110 && reason
!= CIF_UNSPECIFIED
4111 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn
))
4112 /* Do not warn about not inlined recursive calls. */
4113 && !cgraph_edge_recursive_p (cg_edge
)
4114 /* Avoid warnings during early inline pass. */
4115 && cgraph_global_info_ready
)
4117 warning (OPT_Winline
, "inlining failed in call to %q+F: %s",
4118 fn
, _(cgraph_inline_failed_string (reason
)));
4119 warning (OPT_Winline
, "called from here");
4123 fn
= cg_edge
->callee
->decl
;
4124 cgraph_get_body (cg_edge
->callee
);
4126 #ifdef ENABLE_CHECKING
4127 if (cg_edge
->callee
->decl
!= id
->dst_node
->decl
)
4128 verify_cgraph_node (cg_edge
->callee
);
4131 /* We will be inlining this callee. */
4132 id
->eh_lp_nr
= lookup_stmt_eh_lp (stmt
);
4134 /* Update the callers EH personality. */
4135 if (DECL_FUNCTION_PERSONALITY (cg_edge
->callee
->decl
))
4136 DECL_FUNCTION_PERSONALITY (cg_edge
->caller
->decl
)
4137 = DECL_FUNCTION_PERSONALITY (cg_edge
->callee
->decl
);
4139 /* Split the block holding the GIMPLE_CALL. */
4140 e
= split_block (bb
, stmt
);
4142 return_block
= e
->dest
;
4145 /* split_block splits after the statement; work around this by
4146 moving the call into the second block manually. Not pretty,
4147 but seems easier than doing the CFG manipulation by hand
4148 when the GIMPLE_CALL is in the last statement of BB. */
4149 stmt_gsi
= gsi_last_bb (bb
);
4150 gsi_remove (&stmt_gsi
, false);
4152 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4153 been the source of abnormal edges. In this case, schedule
4154 the removal of dead abnormal edges. */
4155 gsi
= gsi_start_bb (return_block
);
4156 if (gsi_end_p (gsi
))
4158 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
4159 purge_dead_abnormal_edges
= true;
4163 gsi_insert_before (&gsi
, stmt
, GSI_NEW_STMT
);
4164 purge_dead_abnormal_edges
= false;
4167 stmt_gsi
= gsi_start_bb (return_block
);
4169 /* Build a block containing code to initialize the arguments, the
4170 actual inline expansion of the body, and a label for the return
4171 statements within the function to jump to. The type of the
4172 statement expression is the return type of the function call.
4173 ??? If the call does not have an associated block then we will
4174 remap all callee blocks to NULL, effectively dropping most of
4175 its debug information. This should only happen for calls to
4176 artificial decls inserted by the compiler itself. We need to
4177 either link the inlined blocks into the caller block tree or
4178 not refer to them in any way to not break GC for locations. */
4179 if (gimple_block (stmt
))
4181 id
->block
= make_node (BLOCK
);
4182 BLOCK_ABSTRACT_ORIGIN (id
->block
) = fn
;
4183 BLOCK_SOURCE_LOCATION (id
->block
) = LOCATION_LOCUS (input_location
);
4184 prepend_lexical_block (gimple_block (stmt
), id
->block
);
4187 /* Local declarations will be replaced by their equivalents in this
4190 id
->decl_map
= pointer_map_create ();
4191 dst
= id
->debug_map
;
4192 id
->debug_map
= NULL
;
4194 /* Record the function we are about to inline. */
4196 id
->src_node
= cg_edge
->callee
;
4197 id
->src_cfun
= DECL_STRUCT_FUNCTION (fn
);
4198 id
->gimple_call
= stmt
;
4200 gcc_assert (!id
->src_cfun
->after_inlining
);
4203 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn
)))
4205 gimple_stmt_iterator si
= gsi_last_bb (bb
);
4206 gsi_insert_after (&si
, gimple_build_predict (PRED_COLD_FUNCTION
,
4210 initialize_inlined_parameters (id
, stmt
, fn
, bb
);
4212 if (DECL_INITIAL (fn
))
4214 if (gimple_block (stmt
))
4218 prepend_lexical_block (id
->block
,
4219 remap_blocks (DECL_INITIAL (fn
), id
));
4220 gcc_checking_assert (BLOCK_SUBBLOCKS (id
->block
)
4221 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id
->block
))
4223 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4224 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4225 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4226 under it. The parameters can be then evaluated in the debugger,
4227 but don't show in backtraces. */
4228 for (var
= &BLOCK_VARS (BLOCK_SUBBLOCKS (id
->block
)); *var
; )
4229 if (TREE_CODE (DECL_ORIGIN (*var
)) == PARM_DECL
)
4232 *var
= TREE_CHAIN (v
);
4233 TREE_CHAIN (v
) = BLOCK_VARS (id
->block
);
4234 BLOCK_VARS (id
->block
) = v
;
4237 var
= &TREE_CHAIN (*var
);
4240 remap_blocks_to_null (DECL_INITIAL (fn
), id
);
4243 /* Return statements in the function body will be replaced by jumps
4244 to the RET_LABEL. */
4245 gcc_assert (DECL_INITIAL (fn
));
4246 gcc_assert (TREE_CODE (DECL_INITIAL (fn
)) == BLOCK
);
4248 /* Find the LHS to which the result of this call is assigned. */
4250 if (gimple_call_lhs (stmt
))
4252 modify_dest
= gimple_call_lhs (stmt
);
4254 /* The function which we are inlining might not return a value,
4255 in which case we should issue a warning that the function
4256 does not return a value. In that case the optimizers will
4257 see that the variable to which the value is assigned was not
4258 initialized. We do not want to issue a warning about that
4259 uninitialized variable. */
4260 if (DECL_P (modify_dest
))
4261 TREE_NO_WARNING (modify_dest
) = 1;
4263 if (gimple_call_return_slot_opt_p (stmt
))
4265 return_slot
= modify_dest
;
4272 /* If we are inlining a call to the C++ operator new, we don't want
4273 to use type based alias analysis on the return value. Otherwise
4274 we may get confused if the compiler sees that the inlined new
4275 function returns a pointer which was just deleted. See bug
4277 if (DECL_IS_OPERATOR_NEW (fn
))
4283 /* Declare the return variable for the function. */
4284 use_retvar
= declare_return_variable (id
, return_slot
, modify_dest
, bb
);
4286 /* Add local vars in this inlined callee to caller. */
4287 add_local_variables (id
->src_cfun
, cfun
, id
);
4289 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4291 fprintf (dump_file
, "Inlining ");
4292 print_generic_expr (dump_file
, id
->src_fn
, 0);
4293 fprintf (dump_file
, " to ");
4294 print_generic_expr (dump_file
, id
->dst_fn
, 0);
4295 fprintf (dump_file
, " with frequency %i\n", cg_edge
->frequency
);
4298 /* This is it. Duplicate the callee body. Assume callee is
4299 pre-gimplified. Note that we must not alter the caller
4300 function in any way before this point, as this CALL_EXPR may be
4301 a self-referential call; if we're calling ourselves, we need to
4302 duplicate our body before altering anything. */
4303 copy_body (id
, bb
->count
,
4304 GCOV_COMPUTE_SCALE (cg_edge
->frequency
, CGRAPH_FREQ_BASE
),
4305 bb
, return_block
, NULL
);
4307 /* Reset the escaped solution. */
4308 if (cfun
->gimple_df
)
4309 pt_solution_reset (&cfun
->gimple_df
->escaped
);
4314 pointer_map_destroy (id
->debug_map
);
4315 id
->debug_map
= dst
;
4317 pointer_map_destroy (id
->decl_map
);
4320 /* Unlink the calls virtual operands before replacing it. */
4321 unlink_stmt_vdef (stmt
);
4323 /* If the inlined function returns a result that we care about,
4324 substitute the GIMPLE_CALL with an assignment of the return
4325 variable to the LHS of the call. That is, if STMT was
4326 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4327 if (use_retvar
&& gimple_call_lhs (stmt
))
4329 gimple old_stmt
= stmt
;
4330 stmt
= gimple_build_assign (gimple_call_lhs (stmt
), use_retvar
);
4331 gsi_replace (&stmt_gsi
, stmt
, false);
4332 maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
);
4336 /* Handle the case of inlining a function with no return
4337 statement, which causes the return value to become undefined. */
4338 if (gimple_call_lhs (stmt
)
4339 && TREE_CODE (gimple_call_lhs (stmt
)) == SSA_NAME
)
4341 tree name
= gimple_call_lhs (stmt
);
4342 tree var
= SSA_NAME_VAR (name
);
4343 tree def
= ssa_default_def (cfun
, var
);
4347 /* If the variable is used undefined, make this name
4348 undefined via a move. */
4349 stmt
= gimple_build_assign (gimple_call_lhs (stmt
), def
);
4350 gsi_replace (&stmt_gsi
, stmt
, true);
4354 /* Otherwise make this variable undefined. */
4355 gsi_remove (&stmt_gsi
, true);
4356 set_ssa_default_def (cfun
, var
, name
);
4357 SSA_NAME_DEF_STMT (name
) = gimple_build_nop ();
4361 gsi_remove (&stmt_gsi
, true);
4364 if (purge_dead_abnormal_edges
)
4366 gimple_purge_dead_eh_edges (return_block
);
4367 gimple_purge_dead_abnormal_call_edges (return_block
);
4370 /* If the value of the new expression is ignored, that's OK. We
4371 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4372 the equivalent inlined version either. */
4373 if (is_gimple_assign (stmt
))
4375 gcc_assert (gimple_assign_single_p (stmt
)
4376 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)));
4377 TREE_USED (gimple_assign_rhs1 (stmt
)) = 1;
4380 /* Output the inlining info for this abstract function, since it has been
4381 inlined. If we don't do this now, we can lose the information about the
4382 variables in the function when the blocks get blown away as soon as we
4383 remove the cgraph node. */
4384 if (gimple_block (stmt
))
4385 (*debug_hooks
->outlining_inline_function
) (cg_edge
->callee
->decl
);
4387 /* Update callgraph if needed. */
4388 cgraph_remove_node (cg_edge
->callee
);
4390 id
->block
= NULL_TREE
;
4391 successfully_inlined
= TRUE
;
4394 input_location
= saved_location
;
4395 return successfully_inlined
;
4398 /* Expand call statements reachable from STMT_P.
4399 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4400 in a MODIFY_EXPR. */
4403 gimple_expand_calls_inline (basic_block bb
, copy_body_data
*id
)
4405 gimple_stmt_iterator gsi
;
4407 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4409 gimple stmt
= gsi_stmt (gsi
);
4411 if (is_gimple_call (stmt
)
4412 && expand_call_inline (bb
, stmt
, id
))
4420 /* Walk all basic blocks created after FIRST and try to fold every statement
4421 in the STATEMENTS pointer set. */
4424 fold_marked_statements (int first
, struct pointer_set_t
*statements
)
4426 for (; first
< n_basic_blocks
; first
++)
4427 if (BASIC_BLOCK (first
))
4429 gimple_stmt_iterator gsi
;
4431 for (gsi
= gsi_start_bb (BASIC_BLOCK (first
));
4434 if (pointer_set_contains (statements
, gsi_stmt (gsi
)))
4436 gimple old_stmt
= gsi_stmt (gsi
);
4437 tree old_decl
= is_gimple_call (old_stmt
) ? gimple_call_fndecl (old_stmt
) : 0;
4439 if (old_decl
&& DECL_BUILT_IN (old_decl
))
4441 /* Folding builtins can create multiple instructions,
4442 we need to look at all of them. */
4443 gimple_stmt_iterator i2
= gsi
;
4445 if (fold_stmt (&gsi
))
4448 /* If a builtin at the end of a bb folded into nothing,
4449 the following loop won't work. */
4450 if (gsi_end_p (gsi
))
4452 cgraph_update_edges_for_call_stmt (old_stmt
,
4457 i2
= gsi_start_bb (BASIC_BLOCK (first
));
4462 new_stmt
= gsi_stmt (i2
);
4463 update_stmt (new_stmt
);
4464 cgraph_update_edges_for_call_stmt (old_stmt
, old_decl
,
4467 if (new_stmt
== gsi_stmt (gsi
))
4469 /* It is okay to check only for the very last
4470 of these statements. If it is a throwing
4471 statement nothing will change. If it isn't
4472 this can remove EH edges. If that weren't
4473 correct then because some intermediate stmts
4474 throw, but not the last one. That would mean
4475 we'd have to split the block, which we can't
4476 here and we'd loose anyway. And as builtins
4477 probably never throw, this all
4479 if (maybe_clean_or_replace_eh_stmt (old_stmt
,
4481 gimple_purge_dead_eh_edges (BASIC_BLOCK (first
));
4488 else if (fold_stmt (&gsi
))
4490 /* Re-read the statement from GSI as fold_stmt() may
4492 gimple new_stmt
= gsi_stmt (gsi
);
4493 update_stmt (new_stmt
);
4495 if (is_gimple_call (old_stmt
)
4496 || is_gimple_call (new_stmt
))
4497 cgraph_update_edges_for_call_stmt (old_stmt
, old_decl
,
4500 if (maybe_clean_or_replace_eh_stmt (old_stmt
, new_stmt
))
4501 gimple_purge_dead_eh_edges (BASIC_BLOCK (first
));
4507 /* Return true if BB has at least one abnormal outgoing edge. */
4510 has_abnormal_outgoing_edge_p (basic_block bb
)
4515 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4516 if (e
->flags
& EDGE_ABNORMAL
)
4522 /* Expand calls to inline functions in the body of FN. */
4525 optimize_inline_calls (tree fn
)
4529 int last
= n_basic_blocks
;
4530 struct gimplify_ctx gctx
;
4531 bool inlined_p
= false;
4534 memset (&id
, 0, sizeof (id
));
4536 id
.src_node
= id
.dst_node
= cgraph_get_node (fn
);
4537 gcc_assert (id
.dst_node
->definition
);
4539 /* Or any functions that aren't finished yet. */
4540 if (current_function_decl
)
4541 id
.dst_fn
= current_function_decl
;
4543 id
.copy_decl
= copy_decl_maybe_to_var
;
4544 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
4545 id
.transform_new_cfg
= false;
4546 id
.transform_return_to_modify
= true;
4547 id
.transform_parameter
= true;
4548 id
.transform_lang_insert_block
= NULL
;
4549 id
.statements_to_fold
= pointer_set_create ();
4551 push_gimplify_context (&gctx
);
4553 /* We make no attempts to keep dominance info up-to-date. */
4554 free_dominance_info (CDI_DOMINATORS
);
4555 free_dominance_info (CDI_POST_DOMINATORS
);
4557 /* Register specific gimple functions. */
4558 gimple_register_cfg_hooks ();
4560 /* Reach the trees by walking over the CFG, and note the
4561 enclosing basic-blocks in the call edges. */
4562 /* We walk the blocks going forward, because inlined function bodies
4563 will split id->current_basic_block, and the new blocks will
4564 follow it; we'll trudge through them, processing their CALL_EXPRs
4567 inlined_p
|= gimple_expand_calls_inline (bb
, &id
);
4569 pop_gimplify_context (NULL
);
4571 #ifdef ENABLE_CHECKING
4573 struct cgraph_edge
*e
;
4575 verify_cgraph_node (id
.dst_node
);
4577 /* Double check that we inlined everything we are supposed to inline. */
4578 for (e
= id
.dst_node
->callees
; e
; e
= e
->next_callee
)
4579 gcc_assert (e
->inline_failed
);
4583 /* Fold queued statements. */
4584 fold_marked_statements (last
, id
.statements_to_fold
);
4585 pointer_set_destroy (id
.statements_to_fold
);
4587 gcc_assert (!id
.debug_stmts
.exists ());
4589 /* If we didn't inline into the function there is nothing to do. */
4593 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4596 delete_unreachable_blocks_update_callgraph (&id
);
4597 #ifdef ENABLE_CHECKING
4598 verify_cgraph_node (id
.dst_node
);
4601 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4602 not possible yet - the IPA passes might make various functions to not
4603 throw and they don't care to proactively update local EH info. This is
4604 done later in fixup_cfg pass that also execute the verification. */
4605 return (TODO_update_ssa
4607 | (gimple_in_ssa_p (cfun
) ? TODO_remove_unused_locals
: 0)
4608 | (gimple_in_ssa_p (cfun
) ? TODO_update_address_taken
: 0)
4609 | (profile_status
!= PROFILE_ABSENT
? TODO_rebuild_frequencies
: 0));
4612 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4615 copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
4617 enum tree_code code
= TREE_CODE (*tp
);
4618 enum tree_code_class cl
= TREE_CODE_CLASS (code
);
4620 /* We make copies of most nodes. */
4621 if (IS_EXPR_CODE_CLASS (cl
)
4622 || code
== TREE_LIST
4624 || code
== TYPE_DECL
4625 || code
== OMP_CLAUSE
)
4627 /* Because the chain gets clobbered when we make a copy, we save it
4629 tree chain
= NULL_TREE
, new_tree
;
4631 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
4632 chain
= TREE_CHAIN (*tp
);
4634 /* Copy the node. */
4635 new_tree
= copy_node (*tp
);
4639 /* Now, restore the chain, if appropriate. That will cause
4640 walk_tree to walk into the chain as well. */
4641 if (code
== PARM_DECL
4642 || code
== TREE_LIST
4643 || code
== OMP_CLAUSE
)
4644 TREE_CHAIN (*tp
) = chain
;
4646 /* For now, we don't update BLOCKs when we make copies. So, we
4647 have to nullify all BIND_EXPRs. */
4648 if (TREE_CODE (*tp
) == BIND_EXPR
)
4649 BIND_EXPR_BLOCK (*tp
) = NULL_TREE
;
4651 else if (code
== CONSTRUCTOR
)
4653 /* CONSTRUCTOR nodes need special handling because
4654 we need to duplicate the vector of elements. */
4657 new_tree
= copy_node (*tp
);
4658 CONSTRUCTOR_ELTS (new_tree
) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp
));
4661 else if (code
== STATEMENT_LIST
)
4662 /* We used to just abort on STATEMENT_LIST, but we can run into them
4663 with statement-expressions (c++/40975). */
4664 copy_statement_list (tp
);
4665 else if (TREE_CODE_CLASS (code
) == tcc_type
)
4667 else if (TREE_CODE_CLASS (code
) == tcc_declaration
)
4669 else if (TREE_CODE_CLASS (code
) == tcc_constant
)
4674 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4675 information indicating to what new SAVE_EXPR this one should be mapped,
4676 use that one. Otherwise, create a new node and enter it in ST. FN is
4677 the function into which the copy will be placed. */
4680 remap_save_expr (tree
*tp
, void *st_
, int *walk_subtrees
)
4682 struct pointer_map_t
*st
= (struct pointer_map_t
*) st_
;
4686 /* See if we already encountered this SAVE_EXPR. */
4687 n
= (tree
*) pointer_map_contains (st
, *tp
);
4689 /* If we didn't already remap this SAVE_EXPR, do so now. */
4692 t
= copy_node (*tp
);
4694 /* Remember this SAVE_EXPR. */
4695 *pointer_map_insert (st
, *tp
) = t
;
4696 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4697 *pointer_map_insert (st
, t
) = t
;
4701 /* We've already walked into this SAVE_EXPR; don't do it again. */
4706 /* Replace this SAVE_EXPR with the copy. */
4710 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4711 label, copies the declaration and enters it in the splay_tree in DATA (which
4712 is really a 'copy_body_data *'. */
4715 mark_local_labels_stmt (gimple_stmt_iterator
*gsip
,
4716 bool *handled_ops_p ATTRIBUTE_UNUSED
,
4717 struct walk_stmt_info
*wi
)
4719 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
4720 gimple stmt
= gsi_stmt (*gsip
);
4722 if (gimple_code (stmt
) == GIMPLE_LABEL
)
4724 tree decl
= gimple_label_label (stmt
);
4726 /* Copy the decl and remember the copy. */
4727 insert_decl_map (id
, decl
, id
->copy_decl (decl
, id
));
4734 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4735 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4736 remaps all local declarations to appropriate replacements in gimple
4740 replace_locals_op (tree
*tp
, int *walk_subtrees
, void *data
)
4742 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
4743 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
4744 struct pointer_map_t
*st
= id
->decl_map
;
4748 /* Only a local declaration (variable or label). */
4749 if ((TREE_CODE (expr
) == VAR_DECL
4750 && !TREE_STATIC (expr
))
4751 || TREE_CODE (expr
) == LABEL_DECL
)
4753 /* Lookup the declaration. */
4754 n
= (tree
*) pointer_map_contains (st
, expr
);
4756 /* If it's there, remap it. */
4761 else if (TREE_CODE (expr
) == STATEMENT_LIST
4762 || TREE_CODE (expr
) == BIND_EXPR
4763 || TREE_CODE (expr
) == SAVE_EXPR
)
4765 else if (TREE_CODE (expr
) == TARGET_EXPR
)
4767 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4768 It's OK for this to happen if it was part of a subtree that
4769 isn't immediately expanded, such as operand 2 of another
4771 if (!TREE_OPERAND (expr
, 1))
4773 TREE_OPERAND (expr
, 1) = TREE_OPERAND (expr
, 3);
4774 TREE_OPERAND (expr
, 3) = NULL_TREE
;
4778 /* Keep iterating. */
4783 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4784 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4785 remaps all local declarations to appropriate replacements in gimple
4789 replace_locals_stmt (gimple_stmt_iterator
*gsip
,
4790 bool *handled_ops_p ATTRIBUTE_UNUSED
,
4791 struct walk_stmt_info
*wi
)
4793 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
4794 gimple stmt
= gsi_stmt (*gsip
);
4796 if (gimple_code (stmt
) == GIMPLE_BIND
)
4798 tree block
= gimple_bind_block (stmt
);
4802 remap_block (&block
, id
);
4803 gimple_bind_set_block (stmt
, block
);
4806 /* This will remap a lot of the same decls again, but this should be
4808 if (gimple_bind_vars (stmt
))
4809 gimple_bind_set_vars (stmt
, remap_decls (gimple_bind_vars (stmt
),
4813 /* Keep iterating. */
4818 /* Copies everything in SEQ and replaces variables and labels local to
4819 current_function_decl. */
4822 copy_gimple_seq_and_replace_locals (gimple_seq seq
)
4825 struct walk_stmt_info wi
;
4826 struct pointer_set_t
*visited
;
4829 /* There's nothing to do for NULL_TREE. */
4834 memset (&id
, 0, sizeof (id
));
4835 id
.src_fn
= current_function_decl
;
4836 id
.dst_fn
= current_function_decl
;
4837 id
.decl_map
= pointer_map_create ();
4838 id
.debug_map
= NULL
;
4840 id
.copy_decl
= copy_decl_no_change
;
4841 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
4842 id
.transform_new_cfg
= false;
4843 id
.transform_return_to_modify
= false;
4844 id
.transform_parameter
= false;
4845 id
.transform_lang_insert_block
= NULL
;
4847 /* Walk the tree once to find local labels. */
4848 memset (&wi
, 0, sizeof (wi
));
4849 visited
= pointer_set_create ();
4852 walk_gimple_seq (seq
, mark_local_labels_stmt
, NULL
, &wi
);
4853 pointer_set_destroy (visited
);
4855 copy
= gimple_seq_copy (seq
);
4857 /* Walk the copy, remapping decls. */
4858 memset (&wi
, 0, sizeof (wi
));
4860 walk_gimple_seq (copy
, replace_locals_stmt
, replace_locals_op
, &wi
);
4863 pointer_map_destroy (id
.decl_map
);
4865 pointer_map_destroy (id
.debug_map
);
4871 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4874 debug_find_tree_1 (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
, void *data
)
4883 debug_find_tree (tree top
, tree search
)
4885 return walk_tree_without_duplicates (&top
, debug_find_tree_1
, search
) != 0;
4889 /* Declare the variables created by the inliner. Add all the variables in
4890 VARS to BIND_EXPR. */
4893 declare_inline_vars (tree block
, tree vars
)
4896 for (t
= vars
; t
; t
= DECL_CHAIN (t
))
4898 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
4899 gcc_assert (!TREE_STATIC (t
) && !TREE_ASM_WRITTEN (t
));
4900 add_local_decl (cfun
, t
);
4904 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), vars
);
4907 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4908 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4909 VAR_DECL translation. */
4912 copy_decl_for_dup_finish (copy_body_data
*id
, tree decl
, tree copy
)
4914 /* Don't generate debug information for the copy if we wouldn't have
4915 generated it for the copy either. */
4916 DECL_ARTIFICIAL (copy
) = DECL_ARTIFICIAL (decl
);
4917 DECL_IGNORED_P (copy
) = DECL_IGNORED_P (decl
);
4919 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4920 declaration inspired this copy. */
4921 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (decl
);
4923 /* The new variable/label has no RTL, yet. */
4924 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy
), TS_DECL_WRTL
)
4925 && !TREE_STATIC (copy
) && !DECL_EXTERNAL (copy
))
4926 SET_DECL_RTL (copy
, 0);
4928 /* These args would always appear unused, if not for this. */
4929 TREE_USED (copy
) = 1;
4931 /* Set the context for the new declaration. */
4932 if (!DECL_CONTEXT (decl
))
4933 /* Globals stay global. */
4935 else if (DECL_CONTEXT (decl
) != id
->src_fn
)
4936 /* Things that weren't in the scope of the function we're inlining
4937 from aren't in the scope we're inlining to, either. */
4939 else if (TREE_STATIC (decl
))
4940 /* Function-scoped static variables should stay in the original
4944 /* Ordinary automatic local variables are now in the scope of the
4946 DECL_CONTEXT (copy
) = id
->dst_fn
;
4952 copy_decl_to_var (tree decl
, copy_body_data
*id
)
4956 gcc_assert (TREE_CODE (decl
) == PARM_DECL
4957 || TREE_CODE (decl
) == RESULT_DECL
);
4959 type
= TREE_TYPE (decl
);
4961 copy
= build_decl (DECL_SOURCE_LOCATION (id
->dst_fn
),
4962 VAR_DECL
, DECL_NAME (decl
), type
);
4963 if (DECL_PT_UID_SET_P (decl
))
4964 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
4965 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
4966 TREE_READONLY (copy
) = TREE_READONLY (decl
);
4967 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
4968 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (decl
);
4970 return copy_decl_for_dup_finish (id
, decl
, copy
);
4973 /* Like copy_decl_to_var, but create a return slot object instead of a
4974 pointer variable for return by invisible reference. */
4977 copy_result_decl_to_var (tree decl
, copy_body_data
*id
)
4981 gcc_assert (TREE_CODE (decl
) == PARM_DECL
4982 || TREE_CODE (decl
) == RESULT_DECL
);
4984 type
= TREE_TYPE (decl
);
4985 if (DECL_BY_REFERENCE (decl
))
4986 type
= TREE_TYPE (type
);
4988 copy
= build_decl (DECL_SOURCE_LOCATION (id
->dst_fn
),
4989 VAR_DECL
, DECL_NAME (decl
), type
);
4990 if (DECL_PT_UID_SET_P (decl
))
4991 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
4992 TREE_READONLY (copy
) = TREE_READONLY (decl
);
4993 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
4994 if (!DECL_BY_REFERENCE (decl
))
4996 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
4997 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (decl
);
5000 return copy_decl_for_dup_finish (id
, decl
, copy
);
5004 copy_decl_no_change (tree decl
, copy_body_data
*id
)
5008 copy
= copy_node (decl
);
5010 /* The COPY is not abstract; it will be generated in DST_FN. */
5011 DECL_ABSTRACT (copy
) = 0;
5012 lang_hooks
.dup_lang_specific_decl (copy
);
5014 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5015 been taken; it's for internal bookkeeping in expand_goto_internal. */
5016 if (TREE_CODE (copy
) == LABEL_DECL
)
5018 TREE_ADDRESSABLE (copy
) = 0;
5019 LABEL_DECL_UID (copy
) = -1;
5022 return copy_decl_for_dup_finish (id
, decl
, copy
);
5026 copy_decl_maybe_to_var (tree decl
, copy_body_data
*id
)
5028 if (TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == RESULT_DECL
)
5029 return copy_decl_to_var (decl
, id
);
5031 return copy_decl_no_change (decl
, id
);
5034 /* Return a copy of the function's argument tree. */
5036 copy_arguments_for_versioning (tree orig_parm
, copy_body_data
* id
,
5037 bitmap args_to_skip
, tree
*vars
)
5040 tree new_parm
= NULL
;
5045 for (arg
= orig_parm
; arg
; arg
= DECL_CHAIN (arg
), i
++)
5046 if (!args_to_skip
|| !bitmap_bit_p (args_to_skip
, i
))
5048 tree new_tree
= remap_decl (arg
, id
);
5049 if (TREE_CODE (new_tree
) != PARM_DECL
)
5050 new_tree
= id
->copy_decl (arg
, id
);
5051 lang_hooks
.dup_lang_specific_decl (new_tree
);
5053 parg
= &DECL_CHAIN (new_tree
);
5055 else if (!pointer_map_contains (id
->decl_map
, arg
))
5057 /* Make an equivalent VAR_DECL. If the argument was used
5058 as temporary variable later in function, the uses will be
5059 replaced by local variable. */
5060 tree var
= copy_decl_to_var (arg
, id
);
5061 insert_decl_map (id
, arg
, var
);
5062 /* Declare this new variable. */
5063 DECL_CHAIN (var
) = *vars
;
5069 /* Return a copy of the function's static chain. */
5071 copy_static_chain (tree static_chain
, copy_body_data
* id
)
5073 tree
*chain_copy
, *pvar
;
5075 chain_copy
= &static_chain
;
5076 for (pvar
= chain_copy
; *pvar
; pvar
= &DECL_CHAIN (*pvar
))
5078 tree new_tree
= remap_decl (*pvar
, id
);
5079 lang_hooks
.dup_lang_specific_decl (new_tree
);
5080 DECL_CHAIN (new_tree
) = DECL_CHAIN (*pvar
);
5083 return static_chain
;
5086 /* Return true if the function is allowed to be versioned.
5087 This is a guard for the versioning functionality. */
5090 tree_versionable_function_p (tree fndecl
)
5092 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl
))
5093 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl
), fndecl
) == NULL
);
5096 /* Delete all unreachable basic blocks and update callgraph.
5097 Doing so is somewhat nontrivial because we need to update all clones and
5098 remove inline function that become unreachable. */
5101 delete_unreachable_blocks_update_callgraph (copy_body_data
*id
)
5103 bool changed
= false;
5104 basic_block b
, next_bb
;
5106 find_unreachable_blocks ();
5108 /* Delete all unreachable basic blocks. */
5110 for (b
= ENTRY_BLOCK_PTR
->next_bb
; b
!= EXIT_BLOCK_PTR
; b
= next_bb
)
5112 next_bb
= b
->next_bb
;
5114 if (!(b
->flags
& BB_REACHABLE
))
5116 gimple_stmt_iterator bsi
;
5118 for (bsi
= gsi_start_bb (b
); !gsi_end_p (bsi
); gsi_next (&bsi
))
5120 struct cgraph_edge
*e
;
5121 struct cgraph_node
*node
;
5123 ipa_remove_stmt_references (id
->dst_node
, gsi_stmt (bsi
));
5125 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_CALL
5126 &&(e
= cgraph_edge (id
->dst_node
, gsi_stmt (bsi
))) != NULL
)
5128 if (!e
->inline_failed
)
5129 cgraph_remove_node_and_inline_clones (e
->callee
, id
->dst_node
);
5131 cgraph_remove_edge (e
);
5133 if (id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
5134 && id
->dst_node
->clones
)
5135 for (node
= id
->dst_node
->clones
; node
!= id
->dst_node
;)
5137 ipa_remove_stmt_references (node
, gsi_stmt (bsi
));
5138 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_CALL
5139 && (e
= cgraph_edge (node
, gsi_stmt (bsi
))) != NULL
)
5141 if (!e
->inline_failed
)
5142 cgraph_remove_node_and_inline_clones (e
->callee
, id
->dst_node
);
5144 cgraph_remove_edge (e
);
5148 node
= node
->clones
;
5149 else if (node
->next_sibling_clone
)
5150 node
= node
->next_sibling_clone
;
5153 while (node
!= id
->dst_node
&& !node
->next_sibling_clone
)
5154 node
= node
->clone_of
;
5155 if (node
!= id
->dst_node
)
5156 node
= node
->next_sibling_clone
;
5160 delete_basic_block (b
);
5168 /* Update clone info after duplication. */
5171 update_clone_info (copy_body_data
* id
)
5173 struct cgraph_node
*node
;
5174 if (!id
->dst_node
->clones
)
5176 for (node
= id
->dst_node
->clones
; node
!= id
->dst_node
;)
5178 /* First update replace maps to match the new body. */
5179 if (node
->clone
.tree_map
)
5182 for (i
= 0; i
< vec_safe_length (node
->clone
.tree_map
); i
++)
5184 struct ipa_replace_map
*replace_info
;
5185 replace_info
= (*node
->clone
.tree_map
)[i
];
5186 walk_tree (&replace_info
->old_tree
, copy_tree_body_r
, id
, NULL
);
5187 walk_tree (&replace_info
->new_tree
, copy_tree_body_r
, id
, NULL
);
5191 node
= node
->clones
;
5192 else if (node
->next_sibling_clone
)
5193 node
= node
->next_sibling_clone
;
5196 while (node
!= id
->dst_node
&& !node
->next_sibling_clone
)
5197 node
= node
->clone_of
;
5198 if (node
!= id
->dst_node
)
5199 node
= node
->next_sibling_clone
;
5204 /* Create a copy of a function's tree.
5205 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5206 of the original function and the new copied function
5207 respectively. In case we want to replace a DECL
5208 tree with another tree while duplicating the function's
5209 body, TREE_MAP represents the mapping between these
5210 trees. If UPDATE_CLONES is set, the call_stmt fields
5211 of edges of clones of the function will be updated.
5213 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5215 If SKIP_RETURN is true, the new version will return void.
5216 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5217 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5220 tree_function_versioning (tree old_decl
, tree new_decl
,
5221 vec
<ipa_replace_map_p
, va_gc
> *tree_map
,
5222 bool update_clones
, bitmap args_to_skip
,
5223 bool skip_return
, bitmap blocks_to_copy
,
5224 basic_block new_entry
)
5226 struct cgraph_node
*old_version_node
;
5227 struct cgraph_node
*new_version_node
;
5231 struct ipa_replace_map
*replace_info
;
5232 basic_block old_entry_block
, bb
;
5233 stack_vec
<gimple
, 10> init_stmts
;
5234 tree vars
= NULL_TREE
;
5236 gcc_assert (TREE_CODE (old_decl
) == FUNCTION_DECL
5237 && TREE_CODE (new_decl
) == FUNCTION_DECL
);
5238 DECL_POSSIBLY_INLINED (old_decl
) = 1;
5240 old_version_node
= cgraph_get_node (old_decl
);
5241 gcc_checking_assert (old_version_node
);
5242 new_version_node
= cgraph_get_node (new_decl
);
5243 gcc_checking_assert (new_version_node
);
5245 /* Copy over debug args. */
5246 if (DECL_HAS_DEBUG_ARGS_P (old_decl
))
5248 vec
<tree
, va_gc
> **new_debug_args
, **old_debug_args
;
5249 gcc_checking_assert (decl_debug_args_lookup (new_decl
) == NULL
);
5250 DECL_HAS_DEBUG_ARGS_P (new_decl
) = 0;
5251 old_debug_args
= decl_debug_args_lookup (old_decl
);
5254 new_debug_args
= decl_debug_args_insert (new_decl
);
5255 *new_debug_args
= vec_safe_copy (*old_debug_args
);
5259 /* Output the inlining info for this abstract function, since it has been
5260 inlined. If we don't do this now, we can lose the information about the
5261 variables in the function when the blocks get blown away as soon as we
5262 remove the cgraph node. */
5263 (*debug_hooks
->outlining_inline_function
) (old_decl
);
5265 DECL_ARTIFICIAL (new_decl
) = 1;
5266 DECL_ABSTRACT_ORIGIN (new_decl
) = DECL_ORIGIN (old_decl
);
5267 if (DECL_ORIGIN (old_decl
) == old_decl
)
5268 old_version_node
->used_as_abstract_origin
= true;
5269 DECL_FUNCTION_PERSONALITY (new_decl
) = DECL_FUNCTION_PERSONALITY (old_decl
);
5271 /* Prepare the data structures for the tree copy. */
5272 memset (&id
, 0, sizeof (id
));
5274 /* Generate a new name for the new version. */
5275 id
.statements_to_fold
= pointer_set_create ();
5277 id
.decl_map
= pointer_map_create ();
5278 id
.debug_map
= NULL
;
5279 id
.src_fn
= old_decl
;
5280 id
.dst_fn
= new_decl
;
5281 id
.src_node
= old_version_node
;
5282 id
.dst_node
= new_version_node
;
5283 id
.src_cfun
= DECL_STRUCT_FUNCTION (old_decl
);
5284 id
.blocks_to_copy
= blocks_to_copy
;
5285 if (id
.src_node
->ipa_transforms_to_apply
.exists ())
5287 vec
<ipa_opt_pass
> old_transforms_to_apply
5288 = id
.dst_node
->ipa_transforms_to_apply
;
5291 id
.dst_node
->ipa_transforms_to_apply
5292 = id
.src_node
->ipa_transforms_to_apply
.copy ();
5293 for (i
= 0; i
< old_transforms_to_apply
.length (); i
++)
5294 id
.dst_node
->ipa_transforms_to_apply
.safe_push (old_transforms_to_apply
[i
]);
5295 old_transforms_to_apply
.release ();
5298 id
.copy_decl
= copy_decl_no_change
;
5299 id
.transform_call_graph_edges
5300 = update_clones
? CB_CGE_MOVE_CLONES
: CB_CGE_MOVE
;
5301 id
.transform_new_cfg
= true;
5302 id
.transform_return_to_modify
= false;
5303 id
.transform_parameter
= false;
5304 id
.transform_lang_insert_block
= NULL
;
5306 old_entry_block
= ENTRY_BLOCK_PTR_FOR_FUNCTION
5307 (DECL_STRUCT_FUNCTION (old_decl
));
5308 DECL_RESULT (new_decl
) = DECL_RESULT (old_decl
);
5309 DECL_ARGUMENTS (new_decl
) = DECL_ARGUMENTS (old_decl
);
5310 initialize_cfun (new_decl
, old_decl
,
5311 old_entry_block
->count
);
5312 DECL_STRUCT_FUNCTION (new_decl
)->gimple_df
->ipa_pta
5313 = id
.src_cfun
->gimple_df
->ipa_pta
;
5315 /* Copy the function's static chain. */
5316 p
= DECL_STRUCT_FUNCTION (old_decl
)->static_chain_decl
;
5318 DECL_STRUCT_FUNCTION (new_decl
)->static_chain_decl
=
5319 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl
)->static_chain_decl
,
5322 /* If there's a tree_map, prepare for substitution. */
5324 for (i
= 0; i
< tree_map
->length (); i
++)
5327 replace_info
= (*tree_map
)[i
];
5328 if (replace_info
->replace_p
)
5330 if (!replace_info
->old_tree
)
5332 int i
= replace_info
->parm_num
;
5336 for (parm
= DECL_ARGUMENTS (old_decl
); i
; parm
= DECL_CHAIN (parm
))
5338 replace_info
->old_tree
= parm
;
5339 req_type
= TREE_TYPE (parm
);
5340 if (!useless_type_conversion_p (req_type
, TREE_TYPE (replace_info
->new_tree
)))
5342 if (fold_convertible_p (req_type
, replace_info
->new_tree
))
5343 replace_info
->new_tree
= fold_build1 (NOP_EXPR
, req_type
, replace_info
->new_tree
);
5344 else if (TYPE_SIZE (req_type
) == TYPE_SIZE (TREE_TYPE (replace_info
->new_tree
)))
5345 replace_info
->new_tree
= fold_build1 (VIEW_CONVERT_EXPR
, req_type
, replace_info
->new_tree
);
5350 fprintf (dump_file
, " const ");
5351 print_generic_expr (dump_file
, replace_info
->new_tree
, 0);
5352 fprintf (dump_file
, " can't be converted to param ");
5353 print_generic_expr (dump_file
, parm
, 0);
5354 fprintf (dump_file
, "\n");
5356 replace_info
->old_tree
= NULL
;
5361 gcc_assert (TREE_CODE (replace_info
->old_tree
) == PARM_DECL
);
5362 if (replace_info
->old_tree
)
5364 init
= setup_one_parameter (&id
, replace_info
->old_tree
,
5365 replace_info
->new_tree
, id
.src_fn
,
5369 init_stmts
.safe_push (init
);
5373 /* Copy the function's arguments. */
5374 if (DECL_ARGUMENTS (old_decl
) != NULL_TREE
)
5375 DECL_ARGUMENTS (new_decl
) =
5376 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl
), &id
,
5377 args_to_skip
, &vars
);
5379 DECL_INITIAL (new_decl
) = remap_blocks (DECL_INITIAL (id
.src_fn
), &id
);
5380 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl
)) = new_decl
;
5382 declare_inline_vars (DECL_INITIAL (new_decl
), vars
);
5384 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl
)->local_decls
))
5385 /* Add local vars. */
5386 add_local_variables (DECL_STRUCT_FUNCTION (old_decl
), cfun
, &id
);
5388 if (DECL_RESULT (old_decl
) == NULL_TREE
)
5390 else if (skip_return
&& !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl
))))
5392 DECL_RESULT (new_decl
)
5393 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl
)),
5394 RESULT_DECL
, NULL_TREE
, void_type_node
);
5395 DECL_CONTEXT (DECL_RESULT (new_decl
)) = new_decl
;
5396 cfun
->returns_struct
= 0;
5397 cfun
->returns_pcc_struct
= 0;
5402 DECL_RESULT (new_decl
) = remap_decl (DECL_RESULT (old_decl
), &id
);
5403 lang_hooks
.dup_lang_specific_decl (DECL_RESULT (new_decl
));
5404 if (gimple_in_ssa_p (id
.src_cfun
)
5405 && DECL_BY_REFERENCE (DECL_RESULT (old_decl
))
5406 && (old_name
= ssa_default_def (id
.src_cfun
, DECL_RESULT (old_decl
))))
5408 tree new_name
= make_ssa_name (DECL_RESULT (new_decl
), NULL
);
5409 insert_decl_map (&id
, old_name
, new_name
);
5410 SSA_NAME_DEF_STMT (new_name
) = gimple_build_nop ();
5411 set_ssa_default_def (cfun
, DECL_RESULT (new_decl
), new_name
);
5415 /* Set up the destination functions loop tree. */
5416 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl
)) != NULL
)
5418 cfun
->curr_properties
&= ~PROP_loops
;
5419 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
5420 cfun
->curr_properties
|= PROP_loops
;
5423 /* Copy the Function's body. */
5424 copy_body (&id
, old_entry_block
->count
, REG_BR_PROB_BASE
,
5425 ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, new_entry
);
5427 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5428 number_blocks (new_decl
);
5430 /* We want to create the BB unconditionally, so that the addition of
5431 debug stmts doesn't affect BB count, which may in the end cause
5432 codegen differences. */
5433 bb
= split_edge (single_succ_edge (ENTRY_BLOCK_PTR
));
5434 while (init_stmts
.length ())
5435 insert_init_stmt (&id
, bb
, init_stmts
.pop ());
5436 update_clone_info (&id
);
5438 /* Remap the nonlocal_goto_save_area, if any. */
5439 if (cfun
->nonlocal_goto_save_area
)
5441 struct walk_stmt_info wi
;
5443 memset (&wi
, 0, sizeof (wi
));
5445 walk_tree (&cfun
->nonlocal_goto_save_area
, remap_gimple_op_r
, &wi
, NULL
);
5449 pointer_map_destroy (id
.decl_map
);
5451 pointer_map_destroy (id
.debug_map
);
5452 free_dominance_info (CDI_DOMINATORS
);
5453 free_dominance_info (CDI_POST_DOMINATORS
);
5455 fold_marked_statements (0, id
.statements_to_fold
);
5456 pointer_set_destroy (id
.statements_to_fold
);
5457 fold_cond_expr_cond ();
5458 delete_unreachable_blocks_update_callgraph (&id
);
5459 if (id
.dst_node
->definition
)
5460 cgraph_rebuild_references ();
5461 update_ssa (TODO_update_ssa
);
5463 /* After partial cloning we need to rescale frequencies, so they are
5464 within proper range in the cloned function. */
5467 struct cgraph_edge
*e
;
5468 rebuild_frequencies ();
5470 new_version_node
->count
= ENTRY_BLOCK_PTR
->count
;
5471 for (e
= new_version_node
->callees
; e
; e
= e
->next_callee
)
5473 basic_block bb
= gimple_bb (e
->call_stmt
);
5474 e
->frequency
= compute_call_stmt_bb_frequency (current_function_decl
,
5476 e
->count
= bb
->count
;
5478 for (e
= new_version_node
->indirect_calls
; e
; e
= e
->next_callee
)
5480 basic_block bb
= gimple_bb (e
->call_stmt
);
5481 e
->frequency
= compute_call_stmt_bb_frequency (current_function_decl
,
5483 e
->count
= bb
->count
;
5487 free_dominance_info (CDI_DOMINATORS
);
5488 free_dominance_info (CDI_POST_DOMINATORS
);
5490 gcc_assert (!id
.debug_stmts
.exists ());
5495 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5496 the callee and return the inlined body on success. */
5499 maybe_inline_call_in_expr (tree exp
)
5501 tree fn
= get_callee_fndecl (exp
);
5503 /* We can only try to inline "const" functions. */
5504 if (fn
&& TREE_READONLY (fn
) && DECL_SAVED_TREE (fn
))
5506 struct pointer_map_t
*decl_map
= pointer_map_create ();
5507 call_expr_arg_iterator iter
;
5511 /* Remap the parameters. */
5512 for (param
= DECL_ARGUMENTS (fn
), arg
= first_call_expr_arg (exp
, &iter
);
5514 param
= DECL_CHAIN (param
), arg
= next_call_expr_arg (&iter
))
5515 *pointer_map_insert (decl_map
, param
) = arg
;
5517 memset (&id
, 0, sizeof (id
));
5519 id
.dst_fn
= current_function_decl
;
5520 id
.src_cfun
= DECL_STRUCT_FUNCTION (fn
);
5521 id
.decl_map
= decl_map
;
5523 id
.copy_decl
= copy_decl_no_change
;
5524 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
5525 id
.transform_new_cfg
= false;
5526 id
.transform_return_to_modify
= true;
5527 id
.transform_parameter
= true;
5528 id
.transform_lang_insert_block
= NULL
;
5530 /* Make sure not to unshare trees behind the front-end's back
5531 since front-end specific mechanisms may rely on sharing. */
5532 id
.regimplify
= false;
5533 id
.do_not_unshare
= true;
5535 /* We're not inside any EH region. */
5538 t
= copy_tree_body (&id
);
5539 pointer_map_destroy (decl_map
);
5541 /* We can only return something suitable for use in a GENERIC
5543 if (TREE_CODE (t
) == MODIFY_EXPR
)
5544 return TREE_OPERAND (t
, 1);
5550 /* Duplicate a type, fields and all. */
5553 build_duplicate_type (tree type
)
5555 struct copy_body_data id
;
5557 memset (&id
, 0, sizeof (id
));
5558 id
.src_fn
= current_function_decl
;
5559 id
.dst_fn
= current_function_decl
;
5561 id
.decl_map
= pointer_map_create ();
5562 id
.debug_map
= NULL
;
5563 id
.copy_decl
= copy_decl_no_change
;
5565 type
= remap_type_1 (type
, &id
);
5567 pointer_map_destroy (id
.decl_map
);
5569 pointer_map_destroy (id
.debug_map
);
5571 TYPE_CANONICAL (type
) = type
;