2 Copyright (C) 2001-2018 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "tree-pass.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
42 #include "tree-iterator.h"
44 #include "gimple-fold.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
51 #include "tree-into-ssa.h"
57 #include "value-prof.h"
60 #include "stringpool.h"
64 /* I'm not real happy about this, but we need to handle gimple and
67 /* Inlining, Cloning, Versioning, Parallelization
69 Inlining: a function body is duplicated, but the PARM_DECLs are
70 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
71 MODIFY_EXPRs that store to a dedicated returned-value variable.
72 The duplicated eh_region info of the copy will later be appended
73 to the info for the caller; the eh_region info in copied throwing
74 statements and RESX statements are adjusted accordingly.
76 Cloning: (only in C++) We have one body for a con/de/structor, and
77 multiple function decls, each with a unique parameter list.
78 Duplicate the body, using the given splay tree; some parameters
79 will become constants (like 0 or 1).
81 Versioning: a function body is duplicated and the result is a new
82 function rather than into blocks of an existing function as with
83 inlining. Some parameters will become constants.
85 Parallelization: a region of a function is duplicated resulting in
86 a new function. Variables may be replaced with complex expressions
87 to enable shared variable semantics.
89 All of these will simultaneously lookup any callgraph edges. If
90 we're going to inline the duplicated function body, and the given
91 function has some cloned callgraph nodes (one for each place this
92 function will be inlined) those callgraph edges will be duplicated.
93 If we're cloning the body, those callgraph edges will be
94 updated to point into the new body. (Note that the original
95 callgraph node and edge list will not be altered.)
97 See the CALL_EXPR handling case in copy_tree_body_r (). */
101 o In order to make inlining-on-trees work, we pessimized
102 function-local static constants. In particular, they are now
103 always output, even when not addressed. Fix this by treating
104 function-local static constants just like global static
105 constants; the back-end already knows not to output them if they
108 o Provide heuristics to clamp inlining of recursive template
112 /* Weights that estimate_num_insns uses to estimate the size of the
115 eni_weights eni_size_weights
;
117 /* Weights that estimate_num_insns uses to estimate the time necessary
118 to execute the produced code. */
120 eni_weights eni_time_weights
;
124 static tree
declare_return_variable (copy_body_data
*, tree
, tree
,
126 static void remap_block (tree
*, copy_body_data
*);
127 static void copy_bind_expr (tree
*, int *, copy_body_data
*);
128 static void declare_inline_vars (tree
, tree
);
129 static void remap_save_expr (tree
*, hash_map
<tree
, tree
> *, int *);
130 static void prepend_lexical_block (tree current_block
, tree new_block
);
131 static tree
copy_decl_to_var (tree
, copy_body_data
*);
132 static tree
copy_result_decl_to_var (tree
, copy_body_data
*);
133 static tree
copy_decl_maybe_to_var (tree
, copy_body_data
*);
134 static gimple_seq
remap_gimple_stmt (gimple
*, copy_body_data
*);
135 static bool delete_unreachable_blocks_update_callgraph (copy_body_data
*id
);
136 static void insert_init_stmt (copy_body_data
*, basic_block
, gimple
*);
138 /* Insert a tree->tree mapping for ID. Despite the name suggests
139 that the trees should be variables, it is used for more than that. */
142 insert_decl_map (copy_body_data
*id
, tree key
, tree value
)
144 id
->decl_map
->put (key
, value
);
146 /* Always insert an identity map as well. If we see this same new
147 node again, we won't want to duplicate it a second time. */
149 id
->decl_map
->put (value
, value
);
152 /* Insert a tree->tree mapping for ID. This is only used for
156 insert_debug_decl_map (copy_body_data
*id
, tree key
, tree value
)
158 if (!gimple_in_ssa_p (id
->src_cfun
))
161 if (!opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
164 if (!target_for_debug_bind (key
))
167 gcc_assert (TREE_CODE (key
) == PARM_DECL
);
168 gcc_assert (VAR_P (value
));
171 id
->debug_map
= new hash_map
<tree
, tree
>;
173 id
->debug_map
->put (key
, value
);
176 /* If nonzero, we're remapping the contents of inlined debug
177 statements. If negative, an error has occurred, such as a
178 reference to a variable that isn't available in the inlined
180 static int processing_debug_stmt
= 0;
182 /* Construct new SSA name for old NAME. ID is the inline context. */
185 remap_ssa_name (tree name
, copy_body_data
*id
)
190 gcc_assert (TREE_CODE (name
) == SSA_NAME
);
192 n
= id
->decl_map
->get (name
);
194 return unshare_expr (*n
);
196 if (processing_debug_stmt
)
198 if (SSA_NAME_IS_DEFAULT_DEF (name
)
199 && TREE_CODE (SSA_NAME_VAR (name
)) == PARM_DECL
200 && id
->entry_bb
== NULL
201 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun
)))
203 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
205 gimple_stmt_iterator gsi
;
206 tree val
= SSA_NAME_VAR (name
);
208 n
= id
->decl_map
->get (val
);
211 if (TREE_CODE (val
) != PARM_DECL
212 && !(VAR_P (val
) && DECL_ABSTRACT_ORIGIN (val
)))
214 processing_debug_stmt
= -1;
217 def_temp
= gimple_build_debug_source_bind (vexpr
, val
, NULL
);
218 DECL_ARTIFICIAL (vexpr
) = 1;
219 TREE_TYPE (vexpr
) = TREE_TYPE (name
);
220 SET_DECL_MODE (vexpr
, DECL_MODE (SSA_NAME_VAR (name
)));
221 gsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
222 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
226 processing_debug_stmt
= -1;
230 /* Remap anonymous SSA names or SSA names of anonymous decls. */
231 var
= SSA_NAME_VAR (name
);
233 || (!SSA_NAME_IS_DEFAULT_DEF (name
)
235 && !VAR_DECL_IS_VIRTUAL_OPERAND (var
)
236 && DECL_ARTIFICIAL (var
)
237 && DECL_IGNORED_P (var
)
238 && !DECL_NAME (var
)))
240 struct ptr_info_def
*pi
;
241 new_tree
= make_ssa_name (remap_type (TREE_TYPE (name
), id
));
242 if (!var
&& SSA_NAME_IDENTIFIER (name
))
243 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree
, SSA_NAME_IDENTIFIER (name
));
244 insert_decl_map (id
, name
, new_tree
);
245 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree
)
246 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
);
247 /* At least IPA points-to info can be directly transferred. */
248 if (id
->src_cfun
->gimple_df
249 && id
->src_cfun
->gimple_df
->ipa_pta
250 && POINTER_TYPE_P (TREE_TYPE (name
))
251 && (pi
= SSA_NAME_PTR_INFO (name
))
254 struct ptr_info_def
*new_pi
= get_ptr_info (new_tree
);
260 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
262 new_tree
= remap_decl (var
, id
);
264 /* We might've substituted constant or another SSA_NAME for
267 Replace the SSA name representing RESULT_DECL by variable during
268 inlining: this saves us from need to introduce PHI node in a case
269 return value is just partly initialized. */
270 if ((VAR_P (new_tree
) || TREE_CODE (new_tree
) == PARM_DECL
)
271 && (!SSA_NAME_VAR (name
)
272 || TREE_CODE (SSA_NAME_VAR (name
)) != RESULT_DECL
273 || !id
->transform_return_to_modify
))
275 struct ptr_info_def
*pi
;
276 new_tree
= make_ssa_name (new_tree
);
277 insert_decl_map (id
, name
, new_tree
);
278 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree
)
279 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
);
280 /* At least IPA points-to info can be directly transferred. */
281 if (id
->src_cfun
->gimple_df
282 && id
->src_cfun
->gimple_df
->ipa_pta
283 && POINTER_TYPE_P (TREE_TYPE (name
))
284 && (pi
= SSA_NAME_PTR_INFO (name
))
287 struct ptr_info_def
*new_pi
= get_ptr_info (new_tree
);
290 if (SSA_NAME_IS_DEFAULT_DEF (name
))
292 /* By inlining function having uninitialized variable, we might
293 extend the lifetime (variable might get reused). This cause
294 ICE in the case we end up extending lifetime of SSA name across
295 abnormal edge, but also increase register pressure.
297 We simply initialize all uninitialized vars by 0 except
298 for case we are inlining to very first BB. We can avoid
299 this for all BBs that are not inside strongly connected
300 regions of the CFG, but this is expensive to test. */
302 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
)
303 && (!SSA_NAME_VAR (name
)
304 || TREE_CODE (SSA_NAME_VAR (name
)) != PARM_DECL
)
305 && (id
->entry_bb
!= EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
307 || EDGE_COUNT (id
->entry_bb
->preds
) != 1))
309 gimple_stmt_iterator gsi
= gsi_last_bb (id
->entry_bb
);
311 tree zero
= build_zero_cst (TREE_TYPE (new_tree
));
313 init_stmt
= gimple_build_assign (new_tree
, zero
);
314 gsi_insert_after (&gsi
, init_stmt
, GSI_NEW_STMT
);
315 SSA_NAME_IS_DEFAULT_DEF (new_tree
) = 0;
319 SSA_NAME_DEF_STMT (new_tree
) = gimple_build_nop ();
320 set_ssa_default_def (cfun
, SSA_NAME_VAR (new_tree
), new_tree
);
325 insert_decl_map (id
, name
, new_tree
);
329 /* Remap DECL during the copying of the BLOCK tree for the function. */
332 remap_decl (tree decl
, copy_body_data
*id
)
336 /* We only remap local variables in the current function. */
338 /* See if we have remapped this declaration. */
340 n
= id
->decl_map
->get (decl
);
342 if (!n
&& processing_debug_stmt
)
344 processing_debug_stmt
= -1;
348 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
349 necessary DECLs have already been remapped and we do not want to duplicate
350 a decl coming from outside of the sequence we are copying. */
352 && id
->prevent_decl_creation_for_types
353 && id
->remapping_type_depth
> 0
354 && (VAR_P (decl
) || TREE_CODE (decl
) == PARM_DECL
))
357 /* If we didn't already have an equivalent for this declaration, create one
361 /* Make a copy of the variable or label. */
362 tree t
= id
->copy_decl (decl
, id
);
364 /* Remember it, so that if we encounter this local entity again
365 we can reuse this copy. Do this early because remap_type may
366 need this decl for TYPE_STUB_DECL. */
367 insert_decl_map (id
, decl
, t
);
372 /* Remap types, if necessary. */
373 TREE_TYPE (t
) = remap_type (TREE_TYPE (t
), id
);
374 if (TREE_CODE (t
) == TYPE_DECL
)
376 DECL_ORIGINAL_TYPE (t
) = remap_type (DECL_ORIGINAL_TYPE (t
), id
);
378 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
379 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
380 is not set on the TYPE_DECL, for example in LTO mode. */
381 if (DECL_ORIGINAL_TYPE (t
) == TREE_TYPE (t
))
383 tree x
= build_variant_type_copy (TREE_TYPE (t
));
384 TYPE_STUB_DECL (x
) = TYPE_STUB_DECL (TREE_TYPE (t
));
385 TYPE_NAME (x
) = TYPE_NAME (TREE_TYPE (t
));
386 DECL_ORIGINAL_TYPE (t
) = x
;
390 /* Remap sizes as necessary. */
391 walk_tree (&DECL_SIZE (t
), copy_tree_body_r
, id
, NULL
);
392 walk_tree (&DECL_SIZE_UNIT (t
), copy_tree_body_r
, id
, NULL
);
394 /* If fields, do likewise for offset and qualifier. */
395 if (TREE_CODE (t
) == FIELD_DECL
)
397 walk_tree (&DECL_FIELD_OFFSET (t
), copy_tree_body_r
, id
, NULL
);
398 if (TREE_CODE (DECL_CONTEXT (t
)) == QUAL_UNION_TYPE
)
399 walk_tree (&DECL_QUALIFIER (t
), copy_tree_body_r
, id
, NULL
);
405 if (id
->do_not_unshare
)
408 return unshare_expr (*n
);
412 remap_type_1 (tree type
, copy_body_data
*id
)
416 /* We do need a copy. build and register it now. If this is a pointer or
417 reference type, remap the designated type and make a new pointer or
419 if (TREE_CODE (type
) == POINTER_TYPE
)
421 new_tree
= build_pointer_type_for_mode (remap_type (TREE_TYPE (type
), id
),
423 TYPE_REF_CAN_ALIAS_ALL (type
));
424 if (TYPE_ATTRIBUTES (type
) || TYPE_QUALS (type
))
425 new_tree
= build_type_attribute_qual_variant (new_tree
,
426 TYPE_ATTRIBUTES (type
),
428 insert_decl_map (id
, type
, new_tree
);
431 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
433 new_tree
= build_reference_type_for_mode (remap_type (TREE_TYPE (type
), id
),
435 TYPE_REF_CAN_ALIAS_ALL (type
));
436 if (TYPE_ATTRIBUTES (type
) || TYPE_QUALS (type
))
437 new_tree
= build_type_attribute_qual_variant (new_tree
,
438 TYPE_ATTRIBUTES (type
),
440 insert_decl_map (id
, type
, new_tree
);
444 new_tree
= copy_node (type
);
446 insert_decl_map (id
, type
, new_tree
);
448 /* This is a new type, not a copy of an old type. Need to reassociate
449 variants. We can handle everything except the main variant lazily. */
450 t
= TYPE_MAIN_VARIANT (type
);
453 t
= remap_type (t
, id
);
454 TYPE_MAIN_VARIANT (new_tree
) = t
;
455 TYPE_NEXT_VARIANT (new_tree
) = TYPE_NEXT_VARIANT (t
);
456 TYPE_NEXT_VARIANT (t
) = new_tree
;
460 TYPE_MAIN_VARIANT (new_tree
) = new_tree
;
461 TYPE_NEXT_VARIANT (new_tree
) = NULL
;
464 if (TYPE_STUB_DECL (type
))
465 TYPE_STUB_DECL (new_tree
) = remap_decl (TYPE_STUB_DECL (type
), id
);
467 /* Lazily create pointer and reference types. */
468 TYPE_POINTER_TO (new_tree
) = NULL
;
469 TYPE_REFERENCE_TO (new_tree
) = NULL
;
471 /* Copy all types that may contain references to local variables; be sure to
472 preserve sharing in between type and its main variant when possible. */
473 switch (TREE_CODE (new_tree
))
477 case FIXED_POINT_TYPE
:
480 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
)
482 gcc_checking_assert (TYPE_MIN_VALUE (type
) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type
)));
483 gcc_checking_assert (TYPE_MAX_VALUE (type
) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type
)));
485 TYPE_MIN_VALUE (new_tree
) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree
));
486 TYPE_MAX_VALUE (new_tree
) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree
));
490 t
= TYPE_MIN_VALUE (new_tree
);
491 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
492 walk_tree (&TYPE_MIN_VALUE (new_tree
), copy_tree_body_r
, id
, NULL
);
494 t
= TYPE_MAX_VALUE (new_tree
);
495 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
496 walk_tree (&TYPE_MAX_VALUE (new_tree
), copy_tree_body_r
, id
, NULL
);
501 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
502 && TREE_TYPE (type
) == TREE_TYPE (TYPE_MAIN_VARIANT (type
)))
503 TREE_TYPE (new_tree
) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree
));
505 TREE_TYPE (new_tree
) = remap_type (TREE_TYPE (new_tree
), id
);
506 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
507 && TYPE_ARG_TYPES (type
) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type
)))
508 TYPE_ARG_TYPES (new_tree
) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree
));
510 walk_tree (&TYPE_ARG_TYPES (new_tree
), copy_tree_body_r
, id
, NULL
);
514 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
515 && TREE_TYPE (type
) == TREE_TYPE (TYPE_MAIN_VARIANT (type
)))
516 TREE_TYPE (new_tree
) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree
));
518 TREE_TYPE (new_tree
) = remap_type (TREE_TYPE (new_tree
), id
);
520 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
)
522 gcc_checking_assert (TYPE_DOMAIN (type
) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type
)));
523 TYPE_DOMAIN (new_tree
) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree
));
526 TYPE_DOMAIN (new_tree
) = remap_type (TYPE_DOMAIN (new_tree
), id
);
531 case QUAL_UNION_TYPE
:
532 if (TYPE_MAIN_VARIANT (type
) != type
533 && TYPE_FIELDS (type
) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type
)))
534 TYPE_FIELDS (new_tree
) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree
));
539 for (f
= TYPE_FIELDS (new_tree
); f
; f
= DECL_CHAIN (f
))
541 t
= remap_decl (f
, id
);
542 DECL_CONTEXT (t
) = new_tree
;
546 TYPE_FIELDS (new_tree
) = nreverse (nf
);
552 /* Shouldn't have been thought variable sized. */
556 /* All variants of type share the same size, so use the already remaped data. */
557 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
)
559 tree s
= TYPE_SIZE (type
);
560 tree mvs
= TYPE_SIZE (TYPE_MAIN_VARIANT (type
));
561 tree su
= TYPE_SIZE_UNIT (type
);
562 tree mvsu
= TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type
));
563 gcc_checking_assert ((TREE_CODE (s
) == PLACEHOLDER_EXPR
564 && (TREE_CODE (mvs
) == PLACEHOLDER_EXPR
))
566 gcc_checking_assert ((TREE_CODE (su
) == PLACEHOLDER_EXPR
567 && (TREE_CODE (mvsu
) == PLACEHOLDER_EXPR
))
569 TYPE_SIZE (new_tree
) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree
));
570 TYPE_SIZE_UNIT (new_tree
) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree
));
574 walk_tree (&TYPE_SIZE (new_tree
), copy_tree_body_r
, id
, NULL
);
575 walk_tree (&TYPE_SIZE_UNIT (new_tree
), copy_tree_body_r
, id
, NULL
);
582 remap_type (tree type
, copy_body_data
*id
)
590 /* See if we have remapped this type. */
591 node
= id
->decl_map
->get (type
);
595 /* The type only needs remapping if it's variably modified. */
596 if (! variably_modified_type_p (type
, id
->src_fn
))
598 insert_decl_map (id
, type
, type
);
602 id
->remapping_type_depth
++;
603 tmp
= remap_type_1 (type
, id
);
604 id
->remapping_type_depth
--;
609 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
612 can_be_nonlocal (tree decl
, copy_body_data
*id
)
614 /* We can not duplicate function decls. */
615 if (TREE_CODE (decl
) == FUNCTION_DECL
)
618 /* Local static vars must be non-local or we get multiple declaration
620 if (VAR_P (decl
) && !auto_var_in_fn_p (decl
, id
->src_fn
))
627 remap_decls (tree decls
, vec
<tree
, va_gc
> **nonlocalized_list
,
631 tree new_decls
= NULL_TREE
;
633 /* Remap its variables. */
634 for (old_var
= decls
; old_var
; old_var
= DECL_CHAIN (old_var
))
638 if (can_be_nonlocal (old_var
, id
))
640 /* We need to add this variable to the local decls as otherwise
641 nothing else will do so. */
642 if (VAR_P (old_var
) && ! DECL_EXTERNAL (old_var
) && cfun
)
643 add_local_decl (cfun
, old_var
);
644 if ((!optimize
|| debug_info_level
> DINFO_LEVEL_TERSE
)
645 && !DECL_IGNORED_P (old_var
)
646 && nonlocalized_list
)
647 vec_safe_push (*nonlocalized_list
, old_var
);
651 /* Remap the variable. */
652 new_var
= remap_decl (old_var
, id
);
654 /* If we didn't remap this variable, we can't mess with its
655 TREE_CHAIN. If we remapped this variable to the return slot, it's
656 already declared somewhere else, so don't declare it here. */
658 if (new_var
== id
->retvar
)
662 if ((!optimize
|| debug_info_level
> DINFO_LEVEL_TERSE
)
663 && !DECL_IGNORED_P (old_var
)
664 && nonlocalized_list
)
665 vec_safe_push (*nonlocalized_list
, old_var
);
669 gcc_assert (DECL_P (new_var
));
670 DECL_CHAIN (new_var
) = new_decls
;
673 /* Also copy value-expressions. */
674 if (VAR_P (new_var
) && DECL_HAS_VALUE_EXPR_P (new_var
))
676 tree tem
= DECL_VALUE_EXPR (new_var
);
677 bool old_regimplify
= id
->regimplify
;
678 id
->remapping_type_depth
++;
679 walk_tree (&tem
, copy_tree_body_r
, id
, NULL
);
680 id
->remapping_type_depth
--;
681 id
->regimplify
= old_regimplify
;
682 SET_DECL_VALUE_EXPR (new_var
, tem
);
687 return nreverse (new_decls
);
690 /* Copy the BLOCK to contain remapped versions of the variables
691 therein. And hook the new block into the block-tree. */
694 remap_block (tree
*block
, copy_body_data
*id
)
699 /* Make the new block. */
701 new_block
= make_node (BLOCK
);
702 TREE_USED (new_block
) = TREE_USED (old_block
);
703 BLOCK_ABSTRACT_ORIGIN (new_block
) = old_block
;
704 BLOCK_SOURCE_LOCATION (new_block
) = BLOCK_SOURCE_LOCATION (old_block
);
705 BLOCK_NONLOCALIZED_VARS (new_block
)
706 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block
));
709 /* Remap its variables. */
710 BLOCK_VARS (new_block
) = remap_decls (BLOCK_VARS (old_block
),
711 &BLOCK_NONLOCALIZED_VARS (new_block
),
714 if (id
->transform_lang_insert_block
)
715 id
->transform_lang_insert_block (new_block
);
717 /* Remember the remapped block. */
718 insert_decl_map (id
, old_block
, new_block
);
721 /* Copy the whole block tree and root it in id->block. */
724 remap_blocks (tree block
, copy_body_data
*id
)
727 tree new_tree
= block
;
732 remap_block (&new_tree
, id
);
733 gcc_assert (new_tree
!= block
);
734 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
735 prepend_lexical_block (new_tree
, remap_blocks (t
, id
));
736 /* Blocks are in arbitrary order, but make things slightly prettier and do
737 not swap order when producing a copy. */
738 BLOCK_SUBBLOCKS (new_tree
) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree
));
742 /* Remap the block tree rooted at BLOCK to nothing. */
745 remap_blocks_to_null (tree block
, copy_body_data
*id
)
748 insert_decl_map (id
, block
, NULL_TREE
);
749 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
750 remap_blocks_to_null (t
, id
);
753 /* Remap the location info pointed to by LOCUS. */
756 remap_location (location_t locus
, copy_body_data
*id
)
758 if (LOCATION_BLOCK (locus
))
760 tree
*n
= id
->decl_map
->get (LOCATION_BLOCK (locus
));
763 return set_block (locus
, *n
);
766 locus
= LOCATION_LOCUS (locus
);
768 if (locus
!= UNKNOWN_LOCATION
&& id
->block
)
769 return set_block (locus
, id
->block
);
775 copy_statement_list (tree
*tp
)
777 tree_stmt_iterator oi
, ni
;
780 new_tree
= alloc_stmt_list ();
781 ni
= tsi_start (new_tree
);
782 oi
= tsi_start (*tp
);
783 TREE_TYPE (new_tree
) = TREE_TYPE (*tp
);
786 for (; !tsi_end_p (oi
); tsi_next (&oi
))
788 tree stmt
= tsi_stmt (oi
);
789 if (TREE_CODE (stmt
) == STATEMENT_LIST
)
790 /* This copy is not redundant; tsi_link_after will smash this
791 STATEMENT_LIST into the end of the one we're building, and we
792 don't want to do that with the original. */
793 copy_statement_list (&stmt
);
794 tsi_link_after (&ni
, stmt
, TSI_CONTINUE_LINKING
);
799 copy_bind_expr (tree
*tp
, int *walk_subtrees
, copy_body_data
*id
)
801 tree block
= BIND_EXPR_BLOCK (*tp
);
802 /* Copy (and replace) the statement. */
803 copy_tree_r (tp
, walk_subtrees
, NULL
);
806 remap_block (&block
, id
);
807 BIND_EXPR_BLOCK (*tp
) = block
;
810 if (BIND_EXPR_VARS (*tp
))
811 /* This will remap a lot of the same decls again, but this should be
813 BIND_EXPR_VARS (*tp
) = remap_decls (BIND_EXPR_VARS (*tp
), NULL
, id
);
817 /* Create a new gimple_seq by remapping all the statements in BODY
818 using the inlining information in ID. */
821 remap_gimple_seq (gimple_seq body
, copy_body_data
*id
)
823 gimple_stmt_iterator si
;
824 gimple_seq new_body
= NULL
;
826 for (si
= gsi_start (body
); !gsi_end_p (si
); gsi_next (&si
))
828 gimple_seq new_stmts
= remap_gimple_stmt (gsi_stmt (si
), id
);
829 gimple_seq_add_seq (&new_body
, new_stmts
);
836 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
837 block using the mapping information in ID. */
840 copy_gimple_bind (gbind
*stmt
, copy_body_data
*id
)
843 tree new_block
, new_vars
;
844 gimple_seq body
, new_body
;
846 /* Copy the statement. Note that we purposely don't use copy_stmt
847 here because we need to remap statements as we copy. */
848 body
= gimple_bind_body (stmt
);
849 new_body
= remap_gimple_seq (body
, id
);
851 new_block
= gimple_bind_block (stmt
);
853 remap_block (&new_block
, id
);
855 /* This will remap a lot of the same decls again, but this should be
857 new_vars
= gimple_bind_vars (stmt
);
859 new_vars
= remap_decls (new_vars
, NULL
, id
);
861 new_bind
= gimple_build_bind (new_vars
, new_body
, new_block
);
866 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
871 if (TREE_CODE (decl
) == SSA_NAME
)
873 decl
= SSA_NAME_VAR (decl
);
878 return (TREE_CODE (decl
) == PARM_DECL
);
881 /* Remap the dependence CLIQUE from the source to the destination function
882 as specified in ID. */
884 static unsigned short
885 remap_dependence_clique (copy_body_data
*id
, unsigned short clique
)
887 if (clique
== 0 || processing_debug_stmt
)
889 if (!id
->dependence_map
)
890 id
->dependence_map
= new hash_map
<dependence_hash
, unsigned short>;
892 unsigned short &newc
= id
->dependence_map
->get_or_insert (clique
, &existed
);
894 newc
= ++cfun
->last_clique
;
898 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
899 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
900 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
901 recursing into the children nodes of *TP. */
904 remap_gimple_op_r (tree
*tp
, int *walk_subtrees
, void *data
)
906 struct walk_stmt_info
*wi_p
= (struct walk_stmt_info
*) data
;
907 copy_body_data
*id
= (copy_body_data
*) wi_p
->info
;
908 tree fn
= id
->src_fn
;
910 /* For recursive invocations this is no longer the LHS itself. */
911 bool is_lhs
= wi_p
->is_lhs
;
912 wi_p
->is_lhs
= false;
914 if (TREE_CODE (*tp
) == SSA_NAME
)
916 *tp
= remap_ssa_name (*tp
, id
);
919 SSA_NAME_DEF_STMT (*tp
) = wi_p
->stmt
;
922 else if (auto_var_in_fn_p (*tp
, fn
))
924 /* Local variables and labels need to be replaced by equivalent
925 variables. We don't want to copy static variables; there's
926 only one of those, no matter how many times we inline the
927 containing function. Similarly for globals from an outer
931 /* Remap the declaration. */
932 new_decl
= remap_decl (*tp
, id
);
933 gcc_assert (new_decl
);
934 /* Replace this variable with the copy. */
935 STRIP_TYPE_NOPS (new_decl
);
936 /* ??? The C++ frontend uses void * pointer zero to initialize
937 any other type. This confuses the middle-end type verification.
938 As cloned bodies do not go through gimplification again the fixup
939 there doesn't trigger. */
940 if (TREE_CODE (new_decl
) == INTEGER_CST
941 && !useless_type_conversion_p (TREE_TYPE (*tp
), TREE_TYPE (new_decl
)))
942 new_decl
= fold_convert (TREE_TYPE (*tp
), new_decl
);
946 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
948 else if (TREE_CODE (*tp
) == SAVE_EXPR
)
950 else if (TREE_CODE (*tp
) == LABEL_DECL
951 && (!DECL_CONTEXT (*tp
)
952 || decl_function_context (*tp
) == id
->src_fn
))
953 /* These may need to be remapped for EH handling. */
954 *tp
= remap_decl (*tp
, id
);
955 else if (TREE_CODE (*tp
) == FIELD_DECL
)
957 /* If the enclosing record type is variably_modified_type_p, the field
958 has already been remapped. Otherwise, it need not be. */
959 tree
*n
= id
->decl_map
->get (*tp
);
964 else if (TYPE_P (*tp
))
965 /* Types may need remapping as well. */
966 *tp
= remap_type (*tp
, id
);
967 else if (CONSTANT_CLASS_P (*tp
))
969 /* If this is a constant, we have to copy the node iff the type
970 will be remapped. copy_tree_r will not copy a constant. */
971 tree new_type
= remap_type (TREE_TYPE (*tp
), id
);
973 if (new_type
== TREE_TYPE (*tp
))
976 else if (TREE_CODE (*tp
) == INTEGER_CST
)
977 *tp
= wide_int_to_tree (new_type
, wi::to_wide (*tp
));
980 *tp
= copy_node (*tp
);
981 TREE_TYPE (*tp
) = new_type
;
986 /* Otherwise, just copy the node. Note that copy_tree_r already
987 knows not to copy VAR_DECLs, etc., so this is safe. */
989 if (TREE_CODE (*tp
) == MEM_REF
)
991 /* We need to re-canonicalize MEM_REFs from inline substitutions
992 that can happen when a pointer argument is an ADDR_EXPR.
993 Recurse here manually to allow that. */
994 tree ptr
= TREE_OPERAND (*tp
, 0);
995 tree type
= remap_type (TREE_TYPE (*tp
), id
);
997 walk_tree (&ptr
, remap_gimple_op_r
, data
, NULL
);
998 *tp
= fold_build2 (MEM_REF
, type
, ptr
, TREE_OPERAND (*tp
, 1));
999 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1000 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1001 TREE_NO_WARNING (*tp
) = TREE_NO_WARNING (old
);
1002 if (MR_DEPENDENCE_CLIQUE (old
) != 0)
1004 MR_DEPENDENCE_CLIQUE (*tp
)
1005 = remap_dependence_clique (id
, MR_DEPENDENCE_CLIQUE (old
));
1006 MR_DEPENDENCE_BASE (*tp
) = MR_DEPENDENCE_BASE (old
);
1008 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1009 remapped a parameter as the property might be valid only
1010 for the parameter itself. */
1011 if (TREE_THIS_NOTRAP (old
)
1012 && (!is_parm (TREE_OPERAND (old
, 0))
1013 || (!id
->transform_parameter
&& is_parm (ptr
))))
1014 TREE_THIS_NOTRAP (*tp
) = 1;
1015 REF_REVERSE_STORAGE_ORDER (*tp
) = REF_REVERSE_STORAGE_ORDER (old
);
1020 /* Here is the "usual case". Copy this tree node, and then
1021 tweak some special cases. */
1022 copy_tree_r (tp
, walk_subtrees
, NULL
);
1024 if (TREE_CODE (*tp
) != OMP_CLAUSE
)
1025 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
1027 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
1029 /* The copied TARGET_EXPR has never been expanded, even if the
1030 original node was expanded already. */
1031 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
1032 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
1034 else if (TREE_CODE (*tp
) == ADDR_EXPR
)
1036 /* Variable substitution need not be simple. In particular,
1037 the MEM_REF substitution above. Make sure that
1038 TREE_CONSTANT and friends are up-to-date. */
1039 int invariant
= is_gimple_min_invariant (*tp
);
1040 walk_tree (&TREE_OPERAND (*tp
, 0), remap_gimple_op_r
, data
, NULL
);
1041 recompute_tree_invariant_for_addr_expr (*tp
);
1043 /* If this used to be invariant, but is not any longer,
1044 then regimplification is probably needed. */
1045 if (invariant
&& !is_gimple_min_invariant (*tp
))
1046 id
->regimplify
= true;
1052 /* Update the TREE_BLOCK for the cloned expr. */
1055 tree new_block
= id
->remapping_type_depth
== 0 ? id
->block
: NULL
;
1056 tree old_block
= TREE_BLOCK (*tp
);
1060 n
= id
->decl_map
->get (TREE_BLOCK (*tp
));
1064 TREE_SET_BLOCK (*tp
, new_block
);
1067 /* Keep iterating. */
1072 /* Called from copy_body_id via walk_tree. DATA is really a
1073 `copy_body_data *'. */
1076 copy_tree_body_r (tree
*tp
, int *walk_subtrees
, void *data
)
1078 copy_body_data
*id
= (copy_body_data
*) data
;
1079 tree fn
= id
->src_fn
;
1082 /* Begin by recognizing trees that we'll completely rewrite for the
1083 inlining context. Our output for these trees is completely
1084 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1085 into an edge). Further down, we'll handle trees that get
1086 duplicated and/or tweaked. */
1088 /* When requested, RETURN_EXPRs should be transformed to just the
1089 contained MODIFY_EXPR. The branch semantics of the return will
1090 be handled elsewhere by manipulating the CFG rather than a statement. */
1091 if (TREE_CODE (*tp
) == RETURN_EXPR
&& id
->transform_return_to_modify
)
1093 tree assignment
= TREE_OPERAND (*tp
, 0);
1095 /* If we're returning something, just turn that into an
1096 assignment into the equivalent of the original RESULT_DECL.
1097 If the "assignment" is just the result decl, the result
1098 decl has already been set (e.g. a recent "foo (&result_decl,
1099 ...)"); just toss the entire RETURN_EXPR. */
1100 if (assignment
&& TREE_CODE (assignment
) == MODIFY_EXPR
)
1102 /* Replace the RETURN_EXPR with (a copy of) the
1103 MODIFY_EXPR hanging underneath. */
1104 *tp
= copy_node (assignment
);
1106 else /* Else the RETURN_EXPR returns no value. */
1109 return (tree
) (void *)1;
1112 else if (TREE_CODE (*tp
) == SSA_NAME
)
1114 *tp
= remap_ssa_name (*tp
, id
);
1119 /* Local variables and labels need to be replaced by equivalent
1120 variables. We don't want to copy static variables; there's only
1121 one of those, no matter how many times we inline the containing
1122 function. Similarly for globals from an outer function. */
1123 else if (auto_var_in_fn_p (*tp
, fn
))
1127 /* Remap the declaration. */
1128 new_decl
= remap_decl (*tp
, id
);
1129 gcc_assert (new_decl
);
1130 /* Replace this variable with the copy. */
1131 STRIP_TYPE_NOPS (new_decl
);
1135 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
1136 copy_statement_list (tp
);
1137 else if (TREE_CODE (*tp
) == SAVE_EXPR
1138 || TREE_CODE (*tp
) == TARGET_EXPR
)
1139 remap_save_expr (tp
, id
->decl_map
, walk_subtrees
);
1140 else if (TREE_CODE (*tp
) == LABEL_DECL
1141 && (! DECL_CONTEXT (*tp
)
1142 || decl_function_context (*tp
) == id
->src_fn
))
1143 /* These may need to be remapped for EH handling. */
1144 *tp
= remap_decl (*tp
, id
);
1145 else if (TREE_CODE (*tp
) == BIND_EXPR
)
1146 copy_bind_expr (tp
, walk_subtrees
, id
);
1147 /* Types may need remapping as well. */
1148 else if (TYPE_P (*tp
))
1149 *tp
= remap_type (*tp
, id
);
1151 /* If this is a constant, we have to copy the node iff the type will be
1152 remapped. copy_tree_r will not copy a constant. */
1153 else if (CONSTANT_CLASS_P (*tp
))
1155 tree new_type
= remap_type (TREE_TYPE (*tp
), id
);
1157 if (new_type
== TREE_TYPE (*tp
))
1160 else if (TREE_CODE (*tp
) == INTEGER_CST
)
1161 *tp
= wide_int_to_tree (new_type
, wi::to_wide (*tp
));
1164 *tp
= copy_node (*tp
);
1165 TREE_TYPE (*tp
) = new_type
;
1169 /* Otherwise, just copy the node. Note that copy_tree_r already
1170 knows not to copy VAR_DECLs, etc., so this is safe. */
1173 /* Here we handle trees that are not completely rewritten.
1174 First we detect some inlining-induced bogosities for
1176 if (TREE_CODE (*tp
) == MODIFY_EXPR
1177 && TREE_OPERAND (*tp
, 0) == TREE_OPERAND (*tp
, 1)
1178 && (auto_var_in_fn_p (TREE_OPERAND (*tp
, 0), fn
)))
1180 /* Some assignments VAR = VAR; don't generate any rtl code
1181 and thus don't count as variable modification. Avoid
1182 keeping bogosities like 0 = 0. */
1183 tree decl
= TREE_OPERAND (*tp
, 0), value
;
1186 n
= id
->decl_map
->get (decl
);
1190 STRIP_TYPE_NOPS (value
);
1191 if (TREE_CONSTANT (value
) || TREE_READONLY (value
))
1193 *tp
= build_empty_stmt (EXPR_LOCATION (*tp
));
1194 return copy_tree_body_r (tp
, walk_subtrees
, data
);
1198 else if (TREE_CODE (*tp
) == INDIRECT_REF
)
1200 /* Get rid of *& from inline substitutions that can happen when a
1201 pointer argument is an ADDR_EXPR. */
1202 tree decl
= TREE_OPERAND (*tp
, 0);
1203 tree
*n
= id
->decl_map
->get (decl
);
1206 /* If we happen to get an ADDR_EXPR in n->value, strip
1207 it manually here as we'll eventually get ADDR_EXPRs
1208 which lie about their types pointed to. In this case
1209 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1210 but we absolutely rely on that. As fold_indirect_ref
1211 does other useful transformations, try that first, though. */
1212 tree type
= TREE_TYPE (*tp
);
1213 tree ptr
= id
->do_not_unshare
? *n
: unshare_expr (*n
);
1215 *tp
= gimple_fold_indirect_ref (ptr
);
1218 type
= remap_type (type
, id
);
1219 if (TREE_CODE (ptr
) == ADDR_EXPR
)
1222 = fold_indirect_ref_1 (EXPR_LOCATION (ptr
), type
, ptr
);
1223 /* ??? We should either assert here or build
1224 a VIEW_CONVERT_EXPR instead of blindly leaking
1225 incompatible types to our IL. */
1227 *tp
= TREE_OPERAND (ptr
, 0);
1231 *tp
= build1 (INDIRECT_REF
, type
, ptr
);
1232 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1233 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1234 TREE_READONLY (*tp
) = TREE_READONLY (old
);
1235 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1236 have remapped a parameter as the property might be
1237 valid only for the parameter itself. */
1238 if (TREE_THIS_NOTRAP (old
)
1239 && (!is_parm (TREE_OPERAND (old
, 0))
1240 || (!id
->transform_parameter
&& is_parm (ptr
))))
1241 TREE_THIS_NOTRAP (*tp
) = 1;
1248 else if (TREE_CODE (*tp
) == MEM_REF
)
1250 /* We need to re-canonicalize MEM_REFs from inline substitutions
1251 that can happen when a pointer argument is an ADDR_EXPR.
1252 Recurse here manually to allow that. */
1253 tree ptr
= TREE_OPERAND (*tp
, 0);
1254 tree type
= remap_type (TREE_TYPE (*tp
), id
);
1256 walk_tree (&ptr
, copy_tree_body_r
, data
, NULL
);
1257 *tp
= fold_build2 (MEM_REF
, type
, ptr
, TREE_OPERAND (*tp
, 1));
1258 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1259 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1260 TREE_NO_WARNING (*tp
) = TREE_NO_WARNING (old
);
1261 if (MR_DEPENDENCE_CLIQUE (old
) != 0)
1263 MR_DEPENDENCE_CLIQUE (*tp
)
1264 = remap_dependence_clique (id
, MR_DEPENDENCE_CLIQUE (old
));
1265 MR_DEPENDENCE_BASE (*tp
) = MR_DEPENDENCE_BASE (old
);
1267 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1268 remapped a parameter as the property might be valid only
1269 for the parameter itself. */
1270 if (TREE_THIS_NOTRAP (old
)
1271 && (!is_parm (TREE_OPERAND (old
, 0))
1272 || (!id
->transform_parameter
&& is_parm (ptr
))))
1273 TREE_THIS_NOTRAP (*tp
) = 1;
1274 REF_REVERSE_STORAGE_ORDER (*tp
) = REF_REVERSE_STORAGE_ORDER (old
);
1279 /* Here is the "usual case". Copy this tree node, and then
1280 tweak some special cases. */
1281 copy_tree_r (tp
, walk_subtrees
, NULL
);
1283 /* If EXPR has block defined, map it to newly constructed block.
1284 When inlining we want EXPRs without block appear in the block
1285 of function call if we are not remapping a type. */
1288 new_block
= id
->remapping_type_depth
== 0 ? id
->block
: NULL
;
1289 if (TREE_BLOCK (*tp
))
1292 n
= id
->decl_map
->get (TREE_BLOCK (*tp
));
1296 TREE_SET_BLOCK (*tp
, new_block
);
1299 if (TREE_CODE (*tp
) != OMP_CLAUSE
)
1300 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
1302 /* The copied TARGET_EXPR has never been expanded, even if the
1303 original node was expanded already. */
1304 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
1306 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
1307 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
1310 /* Variable substitution need not be simple. In particular, the
1311 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1312 and friends are up-to-date. */
1313 else if (TREE_CODE (*tp
) == ADDR_EXPR
)
1315 int invariant
= is_gimple_min_invariant (*tp
);
1316 walk_tree (&TREE_OPERAND (*tp
, 0), copy_tree_body_r
, id
, NULL
);
1318 /* Handle the case where we substituted an INDIRECT_REF
1319 into the operand of the ADDR_EXPR. */
1320 if (TREE_CODE (TREE_OPERAND (*tp
, 0)) == INDIRECT_REF
)
1322 tree t
= TREE_OPERAND (TREE_OPERAND (*tp
, 0), 0);
1323 if (TREE_TYPE (t
) != TREE_TYPE (*tp
))
1324 t
= fold_convert (remap_type (TREE_TYPE (*tp
), id
), t
);
1328 recompute_tree_invariant_for_addr_expr (*tp
);
1330 /* If this used to be invariant, but is not any longer,
1331 then regimplification is probably needed. */
1332 if (invariant
&& !is_gimple_min_invariant (*tp
))
1333 id
->regimplify
= true;
1339 /* Keep iterating. */
1343 /* Helper for remap_gimple_stmt. Given an EH region number for the
1344 source function, map that to the duplicate EH region number in
1345 the destination function. */
1348 remap_eh_region_nr (int old_nr
, copy_body_data
*id
)
1350 eh_region old_r
, new_r
;
1352 old_r
= get_eh_region_from_number_fn (id
->src_cfun
, old_nr
);
1353 new_r
= static_cast<eh_region
> (*id
->eh_map
->get (old_r
));
1355 return new_r
->index
;
1358 /* Similar, but operate on INTEGER_CSTs. */
1361 remap_eh_region_tree_nr (tree old_t_nr
, copy_body_data
*id
)
1365 old_nr
= tree_to_shwi (old_t_nr
);
1366 new_nr
= remap_eh_region_nr (old_nr
, id
);
1368 return build_int_cst (integer_type_node
, new_nr
);
1371 /* Helper for copy_bb. Remap statement STMT using the inlining
1372 information in ID. Return the new statement copy. */
1375 remap_gimple_stmt (gimple
*stmt
, copy_body_data
*id
)
1377 gimple
*copy
= NULL
;
1378 struct walk_stmt_info wi
;
1379 bool skip_first
= false;
1380 gimple_seq stmts
= NULL
;
1382 if (is_gimple_debug (stmt
)
1383 && (gimple_debug_nonbind_marker_p (stmt
)
1384 ? !DECL_STRUCT_FUNCTION (id
->dst_fn
)->debug_nonbind_markers
1385 : !opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
)))
1388 /* Begin by recognizing trees that we'll completely rewrite for the
1389 inlining context. Our output for these trees is completely
1390 different from our input (e.g. RETURN_EXPR is deleted and morphs
1391 into an edge). Further down, we'll handle trees that get
1392 duplicated and/or tweaked. */
1394 /* When requested, GIMPLE_RETURN should be transformed to just the
1395 contained GIMPLE_ASSIGN. The branch semantics of the return will
1396 be handled elsewhere by manipulating the CFG rather than the
1398 if (gimple_code (stmt
) == GIMPLE_RETURN
&& id
->transform_return_to_modify
)
1400 tree retval
= gimple_return_retval (as_a
<greturn
*> (stmt
));
1402 /* If we're returning something, just turn that into an
1403 assignment to the equivalent of the original RESULT_DECL.
1404 If RETVAL is just the result decl, the result decl has
1405 already been set (e.g. a recent "foo (&result_decl, ...)");
1406 just toss the entire GIMPLE_RETURN. */
1408 && (TREE_CODE (retval
) != RESULT_DECL
1409 && (TREE_CODE (retval
) != SSA_NAME
1410 || ! SSA_NAME_VAR (retval
)
1411 || TREE_CODE (SSA_NAME_VAR (retval
)) != RESULT_DECL
)))
1413 copy
= gimple_build_assign (id
->do_not_unshare
1414 ? id
->retvar
: unshare_expr (id
->retvar
),
1416 /* id->retvar is already substituted. Skip it on later remapping. */
1422 else if (gimple_has_substatements (stmt
))
1426 /* When cloning bodies from the C++ front end, we will be handed bodies
1427 in High GIMPLE form. Handle here all the High GIMPLE statements that
1428 have embedded statements. */
1429 switch (gimple_code (stmt
))
1432 copy
= copy_gimple_bind (as_a
<gbind
*> (stmt
), id
);
1437 gcatch
*catch_stmt
= as_a
<gcatch
*> (stmt
);
1438 s1
= remap_gimple_seq (gimple_catch_handler (catch_stmt
), id
);
1439 copy
= gimple_build_catch (gimple_catch_types (catch_stmt
), s1
);
1443 case GIMPLE_EH_FILTER
:
1444 s1
= remap_gimple_seq (gimple_eh_filter_failure (stmt
), id
);
1445 copy
= gimple_build_eh_filter (gimple_eh_filter_types (stmt
), s1
);
1449 s1
= remap_gimple_seq (gimple_try_eval (stmt
), id
);
1450 s2
= remap_gimple_seq (gimple_try_cleanup (stmt
), id
);
1451 copy
= gimple_build_try (s1
, s2
, gimple_try_kind (stmt
));
1454 case GIMPLE_WITH_CLEANUP_EXPR
:
1455 s1
= remap_gimple_seq (gimple_wce_cleanup (stmt
), id
);
1456 copy
= gimple_build_wce (s1
);
1459 case GIMPLE_OMP_PARALLEL
:
1461 gomp_parallel
*omp_par_stmt
= as_a
<gomp_parallel
*> (stmt
);
1462 s1
= remap_gimple_seq (gimple_omp_body (omp_par_stmt
), id
);
1463 copy
= gimple_build_omp_parallel
1465 gimple_omp_parallel_clauses (omp_par_stmt
),
1466 gimple_omp_parallel_child_fn (omp_par_stmt
),
1467 gimple_omp_parallel_data_arg (omp_par_stmt
));
1471 case GIMPLE_OMP_TASK
:
1472 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1473 copy
= gimple_build_omp_task
1475 gimple_omp_task_clauses (stmt
),
1476 gimple_omp_task_child_fn (stmt
),
1477 gimple_omp_task_data_arg (stmt
),
1478 gimple_omp_task_copy_fn (stmt
),
1479 gimple_omp_task_arg_size (stmt
),
1480 gimple_omp_task_arg_align (stmt
));
1483 case GIMPLE_OMP_FOR
:
1484 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1485 s2
= remap_gimple_seq (gimple_omp_for_pre_body (stmt
), id
);
1486 copy
= gimple_build_omp_for (s1
, gimple_omp_for_kind (stmt
),
1487 gimple_omp_for_clauses (stmt
),
1488 gimple_omp_for_collapse (stmt
), s2
);
1491 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
1493 gimple_omp_for_set_index (copy
, i
,
1494 gimple_omp_for_index (stmt
, i
));
1495 gimple_omp_for_set_initial (copy
, i
,
1496 gimple_omp_for_initial (stmt
, i
));
1497 gimple_omp_for_set_final (copy
, i
,
1498 gimple_omp_for_final (stmt
, i
));
1499 gimple_omp_for_set_incr (copy
, i
,
1500 gimple_omp_for_incr (stmt
, i
));
1501 gimple_omp_for_set_cond (copy
, i
,
1502 gimple_omp_for_cond (stmt
, i
));
1507 case GIMPLE_OMP_MASTER
:
1508 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1509 copy
= gimple_build_omp_master (s1
);
1512 case GIMPLE_OMP_TASKGROUP
:
1513 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1514 copy
= gimple_build_omp_taskgroup (s1
);
1517 case GIMPLE_OMP_ORDERED
:
1518 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1519 copy
= gimple_build_omp_ordered
1521 gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
)));
1524 case GIMPLE_OMP_SECTION
:
1525 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1526 copy
= gimple_build_omp_section (s1
);
1529 case GIMPLE_OMP_SECTIONS
:
1530 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1531 copy
= gimple_build_omp_sections
1532 (s1
, gimple_omp_sections_clauses (stmt
));
1535 case GIMPLE_OMP_SINGLE
:
1536 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1537 copy
= gimple_build_omp_single
1538 (s1
, gimple_omp_single_clauses (stmt
));
1541 case GIMPLE_OMP_TARGET
:
1542 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1543 copy
= gimple_build_omp_target
1544 (s1
, gimple_omp_target_kind (stmt
),
1545 gimple_omp_target_clauses (stmt
));
1548 case GIMPLE_OMP_TEAMS
:
1549 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1550 copy
= gimple_build_omp_teams
1551 (s1
, gimple_omp_teams_clauses (stmt
));
1554 case GIMPLE_OMP_CRITICAL
:
1555 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1556 copy
= gimple_build_omp_critical (s1
,
1557 gimple_omp_critical_name
1558 (as_a
<gomp_critical
*> (stmt
)),
1559 gimple_omp_critical_clauses
1560 (as_a
<gomp_critical
*> (stmt
)));
1563 case GIMPLE_TRANSACTION
:
1565 gtransaction
*old_trans_stmt
= as_a
<gtransaction
*> (stmt
);
1566 gtransaction
*new_trans_stmt
;
1567 s1
= remap_gimple_seq (gimple_transaction_body (old_trans_stmt
),
1569 copy
= new_trans_stmt
= gimple_build_transaction (s1
);
1570 gimple_transaction_set_subcode (new_trans_stmt
,
1571 gimple_transaction_subcode (old_trans_stmt
));
1572 gimple_transaction_set_label_norm (new_trans_stmt
,
1573 gimple_transaction_label_norm (old_trans_stmt
));
1574 gimple_transaction_set_label_uninst (new_trans_stmt
,
1575 gimple_transaction_label_uninst (old_trans_stmt
));
1576 gimple_transaction_set_label_over (new_trans_stmt
,
1577 gimple_transaction_label_over (old_trans_stmt
));
1587 if (gimple_assign_copy_p (stmt
)
1588 && gimple_assign_lhs (stmt
) == gimple_assign_rhs1 (stmt
)
1589 && auto_var_in_fn_p (gimple_assign_lhs (stmt
), id
->src_fn
))
1591 /* Here we handle statements that are not completely rewritten.
1592 First we detect some inlining-induced bogosities for
1595 /* Some assignments VAR = VAR; don't generate any rtl code
1596 and thus don't count as variable modification. Avoid
1597 keeping bogosities like 0 = 0. */
1598 tree decl
= gimple_assign_lhs (stmt
), value
;
1601 n
= id
->decl_map
->get (decl
);
1605 STRIP_TYPE_NOPS (value
);
1606 if (TREE_CONSTANT (value
) || TREE_READONLY (value
))
1611 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1612 in a block that we aren't copying during tree_function_versioning,
1613 just drop the clobber stmt. */
1614 if (id
->blocks_to_copy
&& gimple_clobber_p (stmt
))
1616 tree lhs
= gimple_assign_lhs (stmt
);
1617 if (TREE_CODE (lhs
) == MEM_REF
1618 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
)
1620 gimple
*def_stmt
= SSA_NAME_DEF_STMT (TREE_OPERAND (lhs
, 0));
1621 if (gimple_bb (def_stmt
)
1622 && !bitmap_bit_p (id
->blocks_to_copy
,
1623 gimple_bb (def_stmt
)->index
))
1628 if (gimple_debug_bind_p (stmt
))
1631 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt
),
1632 gimple_debug_bind_get_value (stmt
),
1634 if (id
->reset_location
)
1635 gimple_set_location (copy
, input_location
);
1636 id
->debug_stmts
.safe_push (copy
);
1637 gimple_seq_add_stmt (&stmts
, copy
);
1640 if (gimple_debug_source_bind_p (stmt
))
1642 gdebug
*copy
= gimple_build_debug_source_bind
1643 (gimple_debug_source_bind_get_var (stmt
),
1644 gimple_debug_source_bind_get_value (stmt
),
1646 if (id
->reset_location
)
1647 gimple_set_location (copy
, input_location
);
1648 id
->debug_stmts
.safe_push (copy
);
1649 gimple_seq_add_stmt (&stmts
, copy
);
1652 if (gimple_debug_nonbind_marker_p (stmt
))
1654 /* If the inlined function has too many debug markers,
1656 if (id
->src_cfun
->debug_marker_count
1657 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT
))
1660 gdebug
*copy
= as_a
<gdebug
*> (gimple_copy (stmt
));
1661 if (id
->reset_location
)
1662 gimple_set_location (copy
, input_location
);
1663 id
->debug_stmts
.safe_push (copy
);
1664 gimple_seq_add_stmt (&stmts
, copy
);
1668 /* Create a new deep copy of the statement. */
1669 copy
= gimple_copy (stmt
);
1671 /* Clear flags that need revisiting. */
1672 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (copy
))
1674 if (gimple_call_tail_p (call_stmt
))
1675 gimple_call_set_tail (call_stmt
, false);
1676 if (gimple_call_from_thunk_p (call_stmt
))
1677 gimple_call_set_from_thunk (call_stmt
, false);
1678 if (gimple_call_internal_p (call_stmt
))
1679 switch (gimple_call_internal_fn (call_stmt
))
1681 case IFN_GOMP_SIMD_LANE
:
1682 case IFN_GOMP_SIMD_VF
:
1683 case IFN_GOMP_SIMD_LAST_LANE
:
1684 case IFN_GOMP_SIMD_ORDERED_START
:
1685 case IFN_GOMP_SIMD_ORDERED_END
:
1686 DECL_STRUCT_FUNCTION (id
->dst_fn
)->has_simduid_loops
= true;
1693 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1694 RESX and EH_DISPATCH. */
1696 switch (gimple_code (copy
))
1700 tree r
, fndecl
= gimple_call_fndecl (copy
);
1701 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
1702 switch (DECL_FUNCTION_CODE (fndecl
))
1704 case BUILT_IN_EH_COPY_VALUES
:
1705 r
= gimple_call_arg (copy
, 1);
1706 r
= remap_eh_region_tree_nr (r
, id
);
1707 gimple_call_set_arg (copy
, 1, r
);
1710 case BUILT_IN_EH_POINTER
:
1711 case BUILT_IN_EH_FILTER
:
1712 r
= gimple_call_arg (copy
, 0);
1713 r
= remap_eh_region_tree_nr (r
, id
);
1714 gimple_call_set_arg (copy
, 0, r
);
1721 /* Reset alias info if we didn't apply measures to
1722 keep it valid over inlining by setting DECL_PT_UID. */
1723 if (!id
->src_cfun
->gimple_df
1724 || !id
->src_cfun
->gimple_df
->ipa_pta
)
1725 gimple_call_reset_alias_info (as_a
<gcall
*> (copy
));
1731 gresx
*resx_stmt
= as_a
<gresx
*> (copy
);
1732 int r
= gimple_resx_region (resx_stmt
);
1733 r
= remap_eh_region_nr (r
, id
);
1734 gimple_resx_set_region (resx_stmt
, r
);
1738 case GIMPLE_EH_DISPATCH
:
1740 geh_dispatch
*eh_dispatch
= as_a
<geh_dispatch
*> (copy
);
1741 int r
= gimple_eh_dispatch_region (eh_dispatch
);
1742 r
= remap_eh_region_nr (r
, id
);
1743 gimple_eh_dispatch_set_region (eh_dispatch
, r
);
1752 /* If STMT has a block defined, map it to the newly constructed block. */
1753 if (gimple_block (copy
))
1756 n
= id
->decl_map
->get (gimple_block (copy
));
1758 gimple_set_block (copy
, *n
);
1761 if (id
->reset_location
)
1762 gimple_set_location (copy
, input_location
);
1764 /* Debug statements ought to be rebuilt and not copied. */
1765 gcc_checking_assert (!is_gimple_debug (copy
));
1767 /* Remap all the operands in COPY. */
1768 memset (&wi
, 0, sizeof (wi
));
1771 walk_tree (gimple_op_ptr (copy
, 1), remap_gimple_op_r
, &wi
, NULL
);
1773 walk_gimple_op (copy
, remap_gimple_op_r
, &wi
);
1775 /* Clear the copied virtual operands. We are not remapping them here
1776 but are going to recreate them from scratch. */
1777 if (gimple_has_mem_ops (copy
))
1779 gimple_set_vdef (copy
, NULL_TREE
);
1780 gimple_set_vuse (copy
, NULL_TREE
);
1783 gimple_seq_add_stmt (&stmts
, copy
);
1788 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1792 copy_bb (copy_body_data
*id
, basic_block bb
,
1793 profile_count num
, profile_count den
)
1795 gimple_stmt_iterator gsi
, copy_gsi
, seq_gsi
;
1796 basic_block copy_basic_block
;
1800 profile_count::adjust_for_ipa_scaling (&num
, &den
);
1802 /* Search for previous copied basic block. */
1805 prev
= prev
->prev_bb
;
1807 /* create_basic_block() will append every new block to
1808 basic_block_info automatically. */
1809 copy_basic_block
= create_basic_block (NULL
, (basic_block
) prev
->aux
);
1810 copy_basic_block
->count
= bb
->count
.apply_scale (num
, den
);
1812 copy_gsi
= gsi_start_bb (copy_basic_block
);
1814 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1817 gimple
*stmt
= gsi_stmt (gsi
);
1818 gimple
*orig_stmt
= stmt
;
1819 gimple_stmt_iterator stmts_gsi
;
1820 bool stmt_added
= false;
1822 id
->regimplify
= false;
1823 stmts
= remap_gimple_stmt (stmt
, id
);
1825 if (gimple_seq_empty_p (stmts
))
1830 for (stmts_gsi
= gsi_start (stmts
);
1831 !gsi_end_p (stmts_gsi
); )
1833 stmt
= gsi_stmt (stmts_gsi
);
1835 /* Advance iterator now before stmt is moved to seq_gsi. */
1836 gsi_next (&stmts_gsi
);
1838 if (gimple_nop_p (stmt
))
1841 gimple_duplicate_stmt_histograms (cfun
, stmt
, id
->src_cfun
,
1844 /* With return slot optimization we can end up with
1845 non-gimple (foo *)&this->m, fix that here. */
1846 if (is_gimple_assign (stmt
)
1847 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
1848 && !is_gimple_val (gimple_assign_rhs1 (stmt
)))
1851 new_rhs
= force_gimple_operand_gsi (&seq_gsi
,
1852 gimple_assign_rhs1 (stmt
),
1854 GSI_CONTINUE_LINKING
);
1855 gimple_assign_set_rhs1 (stmt
, new_rhs
);
1856 id
->regimplify
= false;
1859 gsi_insert_after (&seq_gsi
, stmt
, GSI_NEW_STMT
);
1862 gimple_regimplify_operands (stmt
, &seq_gsi
);
1870 /* If copy_basic_block has been empty at the start of this iteration,
1871 call gsi_start_bb again to get at the newly added statements. */
1872 if (gsi_end_p (copy_gsi
))
1873 copy_gsi
= gsi_start_bb (copy_basic_block
);
1875 gsi_next (©_gsi
);
1877 /* Process the new statement. The call to gimple_regimplify_operands
1878 possibly turned the statement into multiple statements, we
1879 need to process all of them. */
1885 stmt
= gsi_stmt (copy_gsi
);
1886 call_stmt
= dyn_cast
<gcall
*> (stmt
);
1888 && gimple_call_va_arg_pack_p (call_stmt
)
1890 && ! gimple_call_va_arg_pack_p (id
->call_stmt
))
1892 /* __builtin_va_arg_pack () should be replaced by
1893 all arguments corresponding to ... in the caller. */
1897 size_t nargs
= gimple_call_num_args (id
->call_stmt
);
1900 for (p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
1903 /* Create the new array of arguments. */
1904 n
= nargs
+ gimple_call_num_args (call_stmt
);
1905 argarray
.create (n
);
1906 argarray
.safe_grow_cleared (n
);
1908 /* Copy all the arguments before '...' */
1909 memcpy (argarray
.address (),
1910 gimple_call_arg_ptr (call_stmt
, 0),
1911 gimple_call_num_args (call_stmt
) * sizeof (tree
));
1913 /* Append the arguments passed in '...' */
1914 memcpy (argarray
.address () + gimple_call_num_args (call_stmt
),
1915 gimple_call_arg_ptr (id
->call_stmt
, 0)
1916 + (gimple_call_num_args (id
->call_stmt
) - nargs
),
1917 nargs
* sizeof (tree
));
1919 new_call
= gimple_build_call_vec (gimple_call_fn (call_stmt
),
1922 argarray
.release ();
1924 /* Copy all GIMPLE_CALL flags, location and block, except
1925 GF_CALL_VA_ARG_PACK. */
1926 gimple_call_copy_flags (new_call
, call_stmt
);
1927 gimple_call_set_va_arg_pack (new_call
, false);
1928 gimple_set_location (new_call
, gimple_location (stmt
));
1929 gimple_set_block (new_call
, gimple_block (stmt
));
1930 gimple_call_set_lhs (new_call
, gimple_call_lhs (call_stmt
));
1932 gsi_replace (©_gsi
, new_call
, false);
1937 && (decl
= gimple_call_fndecl (stmt
))
1938 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
1939 && DECL_FUNCTION_CODE (decl
) == BUILT_IN_VA_ARG_PACK_LEN
1940 && ! gimple_call_va_arg_pack_p (id
->call_stmt
))
1942 /* __builtin_va_arg_pack_len () should be replaced by
1943 the number of anonymous arguments. */
1944 size_t nargs
= gimple_call_num_args (id
->call_stmt
);
1948 for (p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
1951 count
= build_int_cst (integer_type_node
, nargs
);
1952 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
), count
);
1953 gsi_replace (©_gsi
, new_stmt
, false);
1958 && gimple_call_internal_p (stmt
)
1959 && gimple_call_internal_fn (stmt
) == IFN_TSAN_FUNC_EXIT
)
1961 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1962 gsi_remove (©_gsi
, false);
1966 /* Statements produced by inlining can be unfolded, especially
1967 when we constant propagated some operands. We can't fold
1968 them right now for two reasons:
1969 1) folding require SSA_NAME_DEF_STMTs to be correct
1970 2) we can't change function calls to builtins.
1971 So we just mark statement for later folding. We mark
1972 all new statements, instead just statements that has changed
1973 by some nontrivial substitution so even statements made
1974 foldable indirectly are updated. If this turns out to be
1975 expensive, copy_body can be told to watch for nontrivial
1977 if (id
->statements_to_fold
)
1978 id
->statements_to_fold
->add (stmt
);
1980 /* We're duplicating a CALL_EXPR. Find any corresponding
1981 callgraph edges and update or duplicate them. */
1982 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
1984 struct cgraph_edge
*edge
;
1986 switch (id
->transform_call_graph_edges
)
1988 case CB_CGE_DUPLICATE
:
1989 edge
= id
->src_node
->get_edge (orig_stmt
);
1992 struct cgraph_edge
*old_edge
= edge
;
1993 profile_count old_cnt
= edge
->count
;
1994 edge
= edge
->clone (id
->dst_node
, call_stmt
,
1999 /* Speculative calls consist of two edges - direct and
2000 indirect. Duplicate the whole thing and distribute
2001 frequencies accordingly. */
2002 if (edge
->speculative
)
2004 struct cgraph_edge
*direct
, *indirect
;
2005 struct ipa_ref
*ref
;
2007 gcc_assert (!edge
->indirect_unknown_callee
);
2008 old_edge
->speculative_call_info (direct
, indirect
, ref
);
2010 profile_count indir_cnt
= indirect
->count
;
2011 indirect
= indirect
->clone (id
->dst_node
, call_stmt
,
2016 profile_probability prob
2017 = indir_cnt
.probability_in (old_cnt
+ indir_cnt
);
2019 = copy_basic_block
->count
.apply_probability (prob
);
2020 edge
->count
= copy_basic_block
->count
- indirect
->count
;
2021 id
->dst_node
->clone_reference (ref
, stmt
);
2024 edge
->count
= copy_basic_block
->count
;
2028 case CB_CGE_MOVE_CLONES
:
2029 id
->dst_node
->set_call_stmt_including_clones (orig_stmt
,
2031 edge
= id
->dst_node
->get_edge (stmt
);
2035 edge
= id
->dst_node
->get_edge (orig_stmt
);
2037 edge
->set_call_stmt (call_stmt
);
2044 /* Constant propagation on argument done during inlining
2045 may create new direct call. Produce an edge for it. */
2047 || (edge
->indirect_inlining_edge
2048 && id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
))
2049 && id
->dst_node
->definition
2050 && (fn
= gimple_call_fndecl (stmt
)) != NULL
)
2052 struct cgraph_node
*dest
= cgraph_node::get_create (fn
);
2054 /* We have missing edge in the callgraph. This can happen
2055 when previous inlining turned an indirect call into a
2056 direct call by constant propagating arguments or we are
2057 producing dead clone (for further cloning). In all
2058 other cases we hit a bug (incorrect node sharing is the
2059 most common reason for missing edges). */
2060 gcc_assert (!dest
->definition
2061 || dest
->address_taken
2062 || !id
->src_node
->definition
2063 || !id
->dst_node
->definition
);
2064 if (id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
)
2065 id
->dst_node
->create_edge_including_clones
2066 (dest
, orig_stmt
, call_stmt
, bb
->count
,
2067 CIF_ORIGINALLY_INDIRECT_CALL
);
2069 id
->dst_node
->create_edge (dest
, call_stmt
,
2070 bb
->count
)->inline_failed
2071 = CIF_ORIGINALLY_INDIRECT_CALL
;
2074 fprintf (dump_file
, "Created new direct edge to %s\n",
2079 notice_special_calls (as_a
<gcall
*> (stmt
));
2082 maybe_duplicate_eh_stmt_fn (cfun
, stmt
, id
->src_cfun
, orig_stmt
,
2083 id
->eh_map
, id
->eh_lp_nr
);
2085 gsi_next (©_gsi
);
2087 while (!gsi_end_p (copy_gsi
));
2089 copy_gsi
= gsi_last_bb (copy_basic_block
);
2092 return copy_basic_block
;
2095 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2096 form is quite easy, since dominator relationship for old basic blocks does
2099 There is however exception where inlining might change dominator relation
2100 across EH edges from basic block within inlined functions destinating
2101 to landing pads in function we inline into.
2103 The function fills in PHI_RESULTs of such PHI nodes if they refer
2104 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2105 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2106 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2107 set, and this means that there will be no overlapping live ranges
2108 for the underlying symbol.
2110 This might change in future if we allow redirecting of EH edges and
2111 we might want to change way build CFG pre-inlining to include
2112 all the possible edges then. */
2114 update_ssa_across_abnormal_edges (basic_block bb
, basic_block ret_bb
,
2115 bool can_throw
, bool nonlocal_goto
)
2120 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2122 || ((basic_block
)e
->dest
->aux
)->index
== ENTRY_BLOCK
)
2128 gcc_assert (e
->flags
& EDGE_EH
);
2131 gcc_assert (!(e
->flags
& EDGE_EH
));
2133 for (si
= gsi_start_phis (e
->dest
); !gsi_end_p (si
); gsi_next (&si
))
2139 /* For abnormal goto/call edges the receiver can be the
2140 ENTRY_BLOCK. Do not assert this cannot happen. */
2142 gcc_assert ((e
->flags
& EDGE_EH
)
2143 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)));
2145 re
= find_edge (ret_bb
, e
->dest
);
2146 gcc_checking_assert (re
);
2147 gcc_assert ((re
->flags
& (EDGE_EH
| EDGE_ABNORMAL
))
2148 == (e
->flags
& (EDGE_EH
| EDGE_ABNORMAL
)));
2150 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
),
2151 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, re
)));
2157 /* Copy edges from BB into its copy constructed earlier, scale profile
2158 accordingly. Edges will be taken care of later. Assume aux
2159 pointers to point to the copies of each BB. Return true if any
2160 debug stmts are left after a statement that must end the basic block. */
2163 copy_edges_for_bb (basic_block bb
, profile_count num
, profile_count den
,
2164 basic_block ret_bb
, basic_block abnormal_goto_dest
,
2167 basic_block new_bb
= (basic_block
) bb
->aux
;
2170 gimple_stmt_iterator si
;
2171 bool need_debug_cleanup
= false;
2173 /* Use the indices from the original blocks to create edges for the
2175 FOR_EACH_EDGE (old_edge
, ei
, bb
->succs
)
2176 if (!(old_edge
->flags
& EDGE_EH
))
2179 int flags
= old_edge
->flags
;
2180 location_t locus
= old_edge
->goto_locus
;
2182 /* Return edges do get a FALLTHRU flag when they get inlined. */
2183 if (old_edge
->dest
->index
== EXIT_BLOCK
2184 && !(flags
& (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
|EDGE_FAKE
))
2185 && old_edge
->dest
->aux
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
2186 flags
|= EDGE_FALLTHRU
;
2189 = make_edge (new_bb
, (basic_block
) old_edge
->dest
->aux
, flags
);
2190 new_edge
->probability
= old_edge
->probability
;
2191 if (!id
->reset_location
)
2192 new_edge
->goto_locus
= remap_location (locus
, id
);
2195 if (bb
->index
== ENTRY_BLOCK
|| bb
->index
== EXIT_BLOCK
)
2198 /* When doing function splitting, we must decreate count of the return block
2199 which was previously reachable by block we did not copy. */
2200 if (single_succ_p (bb
) && single_succ_edge (bb
)->dest
->index
== EXIT_BLOCK
)
2201 FOR_EACH_EDGE (old_edge
, ei
, bb
->preds
)
2202 if (old_edge
->src
->index
!= ENTRY_BLOCK
2203 && !old_edge
->src
->aux
)
2204 new_bb
->count
-= old_edge
->count ().apply_scale (num
, den
);
2206 for (si
= gsi_start_bb (new_bb
); !gsi_end_p (si
);)
2209 bool can_throw
, nonlocal_goto
;
2211 copy_stmt
= gsi_stmt (si
);
2212 if (!is_gimple_debug (copy_stmt
))
2213 update_stmt (copy_stmt
);
2215 /* Do this before the possible split_block. */
2218 /* If this tree could throw an exception, there are two
2219 cases where we need to add abnormal edge(s): the
2220 tree wasn't in a region and there is a "current
2221 region" in the caller; or the original tree had
2222 EH edges. In both cases split the block after the tree,
2223 and add abnormal edge(s) as needed; we need both
2224 those from the callee and the caller.
2225 We check whether the copy can throw, because the const
2226 propagation can change an INDIRECT_REF which throws
2227 into a COMPONENT_REF which doesn't. If the copy
2228 can throw, the original could also throw. */
2229 can_throw
= stmt_can_throw_internal (copy_stmt
);
2231 = (stmt_can_make_abnormal_goto (copy_stmt
)
2232 && !computed_goto_p (copy_stmt
));
2234 if (can_throw
|| nonlocal_goto
)
2236 if (!gsi_end_p (si
))
2238 while (!gsi_end_p (si
) && is_gimple_debug (gsi_stmt (si
)))
2241 need_debug_cleanup
= true;
2243 if (!gsi_end_p (si
))
2244 /* Note that bb's predecessor edges aren't necessarily
2245 right at this point; split_block doesn't care. */
2247 edge e
= split_block (new_bb
, copy_stmt
);
2250 new_bb
->aux
= e
->src
->aux
;
2251 si
= gsi_start_bb (new_bb
);
2255 bool update_probs
= false;
2257 if (gimple_code (copy_stmt
) == GIMPLE_EH_DISPATCH
)
2259 make_eh_dispatch_edges (as_a
<geh_dispatch
*> (copy_stmt
));
2260 update_probs
= true;
2264 make_eh_edges (copy_stmt
);
2265 update_probs
= true;
2268 /* EH edges may not match old edges. Copy as much as possible. */
2273 basic_block copy_stmt_bb
= gimple_bb (copy_stmt
);
2275 FOR_EACH_EDGE (old_edge
, ei
, bb
->succs
)
2276 if ((old_edge
->flags
& EDGE_EH
)
2277 && (e
= find_edge (copy_stmt_bb
,
2278 (basic_block
) old_edge
->dest
->aux
))
2279 && (e
->flags
& EDGE_EH
))
2280 e
->probability
= old_edge
->probability
;
2282 FOR_EACH_EDGE (e
, ei
, copy_stmt_bb
->succs
)
2283 if ((e
->flags
& EDGE_EH
) && !e
->probability
.initialized_p ())
2284 e
->probability
= profile_probability::never ();
2288 /* If the call we inline cannot make abnormal goto do not add
2289 additional abnormal edges but only retain those already present
2290 in the original function body. */
2291 if (abnormal_goto_dest
== NULL
)
2292 nonlocal_goto
= false;
2295 basic_block copy_stmt_bb
= gimple_bb (copy_stmt
);
2297 if (get_abnormal_succ_dispatcher (copy_stmt_bb
))
2298 nonlocal_goto
= false;
2299 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2300 in OpenMP regions which aren't allowed to be left abnormally.
2301 So, no need to add abnormal edge in that case. */
2302 else if (is_gimple_call (copy_stmt
)
2303 && gimple_call_internal_p (copy_stmt
)
2304 && (gimple_call_internal_fn (copy_stmt
)
2305 == IFN_ABNORMAL_DISPATCHER
)
2306 && gimple_call_arg (copy_stmt
, 0) == boolean_true_node
)
2307 nonlocal_goto
= false;
2309 make_single_succ_edge (copy_stmt_bb
, abnormal_goto_dest
,
2313 if ((can_throw
|| nonlocal_goto
)
2314 && gimple_in_ssa_p (cfun
))
2315 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt
), ret_bb
,
2316 can_throw
, nonlocal_goto
);
2318 return need_debug_cleanup
;
2321 /* Copy the PHIs. All blocks and edges are copied, some blocks
2322 was possibly split and new outgoing EH edges inserted.
2323 BB points to the block of original function and AUX pointers links
2324 the original and newly copied blocks. */
2327 copy_phis_for_bb (basic_block bb
, copy_body_data
*id
)
2329 basic_block
const new_bb
= (basic_block
) bb
->aux
;
2334 bool inserted
= false;
2336 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
2342 res
= PHI_RESULT (phi
);
2344 if (!virtual_operand_p (res
))
2346 walk_tree (&new_res
, copy_tree_body_r
, id
, NULL
);
2347 if (EDGE_COUNT (new_bb
->preds
) == 0)
2349 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2350 SSA_NAME_DEF_STMT (new_res
) = gimple_build_nop ();
2354 new_phi
= create_phi_node (new_res
, new_bb
);
2355 FOR_EACH_EDGE (new_edge
, ei
, new_bb
->preds
)
2357 edge old_edge
= find_edge ((basic_block
) new_edge
->src
->aux
,
2364 /* When doing partial cloning, we allow PHIs on the entry
2365 block as long as all the arguments are the same.
2366 Find any input edge to see argument to copy. */
2368 FOR_EACH_EDGE (old_edge
, ei2
, bb
->preds
)
2369 if (!old_edge
->src
->aux
)
2372 arg
= PHI_ARG_DEF_FROM_EDGE (phi
, old_edge
);
2374 walk_tree (&new_arg
, copy_tree_body_r
, id
, NULL
);
2375 gcc_assert (new_arg
);
2376 /* With return slot optimization we can end up with
2377 non-gimple (foo *)&this->m, fix that here. */
2378 if (TREE_CODE (new_arg
) != SSA_NAME
2379 && TREE_CODE (new_arg
) != FUNCTION_DECL
2380 && !is_gimple_val (new_arg
))
2382 gimple_seq stmts
= NULL
;
2383 new_arg
= force_gimple_operand (new_arg
, &stmts
, true,
2385 gsi_insert_seq_on_edge (new_edge
, stmts
);
2388 locus
= gimple_phi_arg_location_from_edge (phi
, old_edge
);
2389 if (id
->reset_location
)
2390 locus
= input_location
;
2392 locus
= remap_location (locus
, id
);
2393 add_phi_arg (new_phi
, new_arg
, new_edge
, locus
);
2399 /* Commit the delayed edge insertions. */
2401 FOR_EACH_EDGE (new_edge
, ei
, new_bb
->preds
)
2402 gsi_commit_one_edge_insert (new_edge
, NULL
);
2406 /* Wrapper for remap_decl so it can be used as a callback. */
2409 remap_decl_1 (tree decl
, void *data
)
2411 return remap_decl (decl
, (copy_body_data
*) data
);
2414 /* Build struct function and associated datastructures for the new clone
2415 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2416 the cfun to the function of new_fndecl (and current_function_decl too). */
2419 initialize_cfun (tree new_fndecl
, tree callee_fndecl
, profile_count count
)
2421 struct function
*src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2423 if (!DECL_ARGUMENTS (new_fndecl
))
2424 DECL_ARGUMENTS (new_fndecl
) = DECL_ARGUMENTS (callee_fndecl
);
2425 if (!DECL_RESULT (new_fndecl
))
2426 DECL_RESULT (new_fndecl
) = DECL_RESULT (callee_fndecl
);
2428 /* Register specific tree functions. */
2429 gimple_register_cfg_hooks ();
2431 /* Get clean struct function. */
2432 push_struct_function (new_fndecl
);
2434 /* We will rebuild these, so just sanity check that they are empty. */
2435 gcc_assert (VALUE_HISTOGRAMS (cfun
) == NULL
);
2436 gcc_assert (cfun
->local_decls
== NULL
);
2437 gcc_assert (cfun
->cfg
== NULL
);
2438 gcc_assert (cfun
->decl
== new_fndecl
);
2440 /* Copy items we preserve during cloning. */
2441 cfun
->static_chain_decl
= src_cfun
->static_chain_decl
;
2442 cfun
->nonlocal_goto_save_area
= src_cfun
->nonlocal_goto_save_area
;
2443 cfun
->function_end_locus
= src_cfun
->function_end_locus
;
2444 cfun
->curr_properties
= src_cfun
->curr_properties
;
2445 cfun
->last_verified
= src_cfun
->last_verified
;
2446 cfun
->va_list_gpr_size
= src_cfun
->va_list_gpr_size
;
2447 cfun
->va_list_fpr_size
= src_cfun
->va_list_fpr_size
;
2448 cfun
->has_nonlocal_label
= src_cfun
->has_nonlocal_label
;
2449 cfun
->stdarg
= src_cfun
->stdarg
;
2450 cfun
->after_inlining
= src_cfun
->after_inlining
;
2451 cfun
->can_throw_non_call_exceptions
2452 = src_cfun
->can_throw_non_call_exceptions
;
2453 cfun
->can_delete_dead_exceptions
= src_cfun
->can_delete_dead_exceptions
;
2454 cfun
->returns_struct
= src_cfun
->returns_struct
;
2455 cfun
->returns_pcc_struct
= src_cfun
->returns_pcc_struct
;
2457 init_empty_tree_cfg ();
2459 profile_status_for_fn (cfun
) = profile_status_for_fn (src_cfun
);
2461 profile_count num
= count
;
2462 profile_count den
= ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
;
2463 profile_count::adjust_for_ipa_scaling (&num
, &den
);
2465 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
=
2466 ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
.apply_scale (count
,
2467 ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
);
2468 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
=
2469 EXIT_BLOCK_PTR_FOR_FN (src_cfun
)->count
.apply_scale (count
,
2470 ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
);
2472 init_eh_for_function ();
2474 if (src_cfun
->gimple_df
)
2476 init_tree_ssa (cfun
);
2477 cfun
->gimple_df
->in_ssa_p
= src_cfun
->gimple_df
->in_ssa_p
;
2478 if (cfun
->gimple_df
->in_ssa_p
)
2479 init_ssa_operands (cfun
);
2483 /* Helper function for copy_cfg_body. Move debug stmts from the end
2484 of NEW_BB to the beginning of successor basic blocks when needed. If the
2485 successor has multiple predecessors, reset them, otherwise keep
2489 maybe_move_debug_stmts_to_successors (copy_body_data
*id
, basic_block new_bb
)
2493 gimple_stmt_iterator si
= gsi_last_nondebug_bb (new_bb
);
2496 || gsi_one_before_end_p (si
)
2497 || !(stmt_can_throw_internal (gsi_stmt (si
))
2498 || stmt_can_make_abnormal_goto (gsi_stmt (si
))))
2501 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
2503 gimple_stmt_iterator ssi
= gsi_last_bb (new_bb
);
2504 gimple_stmt_iterator dsi
= gsi_after_labels (e
->dest
);
2505 while (is_gimple_debug (gsi_stmt (ssi
)))
2507 gimple
*stmt
= gsi_stmt (ssi
);
2512 /* For the last edge move the debug stmts instead of copying
2514 if (ei_one_before_end_p (ei
))
2518 if (!single_pred_p (e
->dest
) && gimple_debug_bind_p (stmt
))
2520 gimple_debug_bind_reset_value (stmt
);
2521 gimple_set_location (stmt
, UNKNOWN_LOCATION
);
2523 gsi_remove (&si
, false);
2524 gsi_insert_before (&dsi
, stmt
, GSI_SAME_STMT
);
2528 if (gimple_debug_bind_p (stmt
))
2530 var
= gimple_debug_bind_get_var (stmt
);
2531 if (single_pred_p (e
->dest
))
2533 value
= gimple_debug_bind_get_value (stmt
);
2534 value
= unshare_expr (value
);
2535 new_stmt
= gimple_build_debug_bind (var
, value
, stmt
);
2538 new_stmt
= gimple_build_debug_bind (var
, NULL_TREE
, NULL
);
2540 else if (gimple_debug_source_bind_p (stmt
))
2542 var
= gimple_debug_source_bind_get_var (stmt
);
2543 value
= gimple_debug_source_bind_get_value (stmt
);
2544 new_stmt
= gimple_build_debug_source_bind (var
, value
, stmt
);
2546 else if (gimple_debug_nonbind_marker_p (stmt
))
2547 new_stmt
= as_a
<gdebug
*> (gimple_copy (stmt
));
2550 gsi_insert_before (&dsi
, new_stmt
, GSI_SAME_STMT
);
2551 id
->debug_stmts
.safe_push (new_stmt
);
2557 /* Make a copy of the sub-loops of SRC_PARENT and place them
2558 as siblings of DEST_PARENT. */
2561 copy_loops (copy_body_data
*id
,
2562 struct loop
*dest_parent
, struct loop
*src_parent
)
2564 struct loop
*src_loop
= src_parent
->inner
;
2567 if (!id
->blocks_to_copy
2568 || bitmap_bit_p (id
->blocks_to_copy
, src_loop
->header
->index
))
2570 struct loop
*dest_loop
= alloc_loop ();
2572 /* Assign the new loop its header and latch and associate
2573 those with the new loop. */
2574 dest_loop
->header
= (basic_block
)src_loop
->header
->aux
;
2575 dest_loop
->header
->loop_father
= dest_loop
;
2576 if (src_loop
->latch
!= NULL
)
2578 dest_loop
->latch
= (basic_block
)src_loop
->latch
->aux
;
2579 dest_loop
->latch
->loop_father
= dest_loop
;
2582 /* Copy loop meta-data. */
2583 copy_loop_info (src_loop
, dest_loop
);
2585 /* Finally place it into the loop array and the loop tree. */
2586 place_new_loop (cfun
, dest_loop
);
2587 flow_loop_tree_node_add (dest_parent
, dest_loop
);
2589 dest_loop
->safelen
= src_loop
->safelen
;
2590 if (src_loop
->unroll
)
2592 dest_loop
->unroll
= src_loop
->unroll
;
2593 cfun
->has_unroll
= true;
2595 dest_loop
->dont_vectorize
= src_loop
->dont_vectorize
;
2596 if (src_loop
->force_vectorize
)
2598 dest_loop
->force_vectorize
= true;
2599 cfun
->has_force_vectorize_loops
= true;
2601 if (src_loop
->simduid
)
2603 dest_loop
->simduid
= remap_decl (src_loop
->simduid
, id
);
2604 cfun
->has_simduid_loops
= true;
2608 copy_loops (id
, dest_loop
, src_loop
);
2610 src_loop
= src_loop
->next
;
2614 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2617 redirect_all_calls (copy_body_data
* id
, basic_block bb
)
2619 gimple_stmt_iterator si
;
2620 gimple
*last
= last_stmt (bb
);
2621 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
2623 gimple
*stmt
= gsi_stmt (si
);
2624 if (is_gimple_call (stmt
))
2626 struct cgraph_edge
*edge
= id
->dst_node
->get_edge (stmt
);
2629 edge
->redirect_call_stmt_to_callee ();
2630 if (stmt
== last
&& id
->call_stmt
&& maybe_clean_eh_stmt (stmt
))
2631 gimple_purge_dead_eh_edges (bb
);
2637 /* Make a copy of the body of FN so that it can be inserted inline in
2638 another function. Walks FN via CFG, returns new fndecl. */
2641 copy_cfg_body (copy_body_data
* id
,
2642 basic_block entry_block_map
, basic_block exit_block_map
,
2643 basic_block new_entry
)
2645 tree callee_fndecl
= id
->src_fn
;
2646 /* Original cfun for the callee, doesn't change. */
2647 struct function
*src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2648 struct function
*cfun_to_copy
;
2650 tree new_fndecl
= NULL
;
2651 bool need_debug_cleanup
= false;
2653 profile_count den
= ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
;
2654 profile_count num
= entry_block_map
->count
;
2656 cfun_to_copy
= id
->src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2658 /* Register specific tree functions. */
2659 gimple_register_cfg_hooks ();
2661 /* If we are inlining just region of the function, make sure to connect
2662 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2663 part of loop, we must compute frequency and probability of
2664 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2665 probabilities of edges incoming from nonduplicated region. */
2670 den
= profile_count::zero ();
2672 FOR_EACH_EDGE (e
, ei
, new_entry
->preds
)
2675 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= den
;
2678 profile_count::adjust_for_ipa_scaling (&num
, &den
);
2680 /* Must have a CFG here at this point. */
2681 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2682 (DECL_STRUCT_FUNCTION (callee_fndecl
)));
2685 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy
)->aux
= entry_block_map
;
2686 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy
)->aux
= exit_block_map
;
2687 entry_block_map
->aux
= ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy
);
2688 exit_block_map
->aux
= EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy
);
2690 /* Duplicate any exception-handling regions. */
2692 id
->eh_map
= duplicate_eh_regions (cfun_to_copy
, NULL
, id
->eh_lp_nr
,
2695 /* Use aux pointers to map the original blocks to copy. */
2696 FOR_EACH_BB_FN (bb
, cfun_to_copy
)
2697 if (!id
->blocks_to_copy
|| bitmap_bit_p (id
->blocks_to_copy
, bb
->index
))
2699 basic_block new_bb
= copy_bb (id
, bb
, num
, den
);
2702 new_bb
->loop_father
= entry_block_map
->loop_father
;
2705 last
= last_basic_block_for_fn (cfun
);
2707 /* Now that we've duplicated the blocks, duplicate their edges. */
2708 basic_block abnormal_goto_dest
= NULL
;
2710 && stmt_can_make_abnormal_goto (id
->call_stmt
))
2712 gimple_stmt_iterator gsi
= gsi_for_stmt (id
->call_stmt
);
2714 bb
= gimple_bb (id
->call_stmt
);
2716 if (gsi_end_p (gsi
))
2717 abnormal_goto_dest
= get_abnormal_succ_dispatcher (bb
);
2719 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2720 if (!id
->blocks_to_copy
2721 || (bb
->index
> 0 && bitmap_bit_p (id
->blocks_to_copy
, bb
->index
)))
2722 need_debug_cleanup
|= copy_edges_for_bb (bb
, num
, den
, exit_block_map
,
2723 abnormal_goto_dest
, id
);
2727 edge e
= make_edge (entry_block_map
, (basic_block
)new_entry
->aux
,
2729 e
->probability
= profile_probability::always ();
2732 /* Duplicate the loop tree, if available and wanted. */
2733 if (loops_for_fn (src_cfun
) != NULL
2734 && current_loops
!= NULL
)
2736 copy_loops (id
, entry_block_map
->loop_father
,
2737 get_loop (src_cfun
, 0));
2738 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2739 loops_state_set (LOOPS_NEED_FIXUP
);
2742 /* If the loop tree in the source function needed fixup, mark the
2743 destination loop tree for fixup, too. */
2744 if (loops_for_fn (src_cfun
)->state
& LOOPS_NEED_FIXUP
)
2745 loops_state_set (LOOPS_NEED_FIXUP
);
2747 if (gimple_in_ssa_p (cfun
))
2748 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2749 if (!id
->blocks_to_copy
2750 || (bb
->index
> 0 && bitmap_bit_p (id
->blocks_to_copy
, bb
->index
)))
2751 copy_phis_for_bb (bb
, id
);
2753 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2756 if (need_debug_cleanup
2757 && bb
->index
!= ENTRY_BLOCK
2758 && bb
->index
!= EXIT_BLOCK
)
2759 maybe_move_debug_stmts_to_successors (id
, (basic_block
) bb
->aux
);
2760 /* Update call edge destinations. This can not be done before loop
2761 info is updated, because we may split basic blocks. */
2762 if (id
->transform_call_graph_edges
== CB_CGE_DUPLICATE
2763 && bb
->index
!= ENTRY_BLOCK
2764 && bb
->index
!= EXIT_BLOCK
)
2765 redirect_all_calls (id
, (basic_block
)bb
->aux
);
2766 ((basic_block
)bb
->aux
)->aux
= NULL
;
2770 /* Zero out AUX fields of newly created block during EH edge
2772 for (; last
< last_basic_block_for_fn (cfun
); last
++)
2774 if (need_debug_cleanup
)
2775 maybe_move_debug_stmts_to_successors (id
,
2776 BASIC_BLOCK_FOR_FN (cfun
, last
));
2777 BASIC_BLOCK_FOR_FN (cfun
, last
)->aux
= NULL
;
2778 /* Update call edge destinations. This can not be done before loop
2779 info is updated, because we may split basic blocks. */
2780 if (id
->transform_call_graph_edges
== CB_CGE_DUPLICATE
)
2781 redirect_all_calls (id
, BASIC_BLOCK_FOR_FN (cfun
, last
));
2783 entry_block_map
->aux
= NULL
;
2784 exit_block_map
->aux
= NULL
;
2791 if (id
->dependence_map
)
2793 delete id
->dependence_map
;
2794 id
->dependence_map
= NULL
;
2800 /* Copy the debug STMT using ID. We deal with these statements in a
2801 special way: if any variable in their VALUE expression wasn't
2802 remapped yet, we won't remap it, because that would get decl uids
2803 out of sync, causing codegen differences between -g and -g0. If
2804 this arises, we drop the VALUE expression altogether. */
2807 copy_debug_stmt (gdebug
*stmt
, copy_body_data
*id
)
2810 struct walk_stmt_info wi
;
2812 if (gimple_block (stmt
))
2814 n
= id
->decl_map
->get (gimple_block (stmt
));
2815 gimple_set_block (stmt
, n
? *n
: id
->block
);
2818 if (gimple_debug_nonbind_marker_p (stmt
))
2821 /* Remap all the operands in COPY. */
2822 memset (&wi
, 0, sizeof (wi
));
2825 processing_debug_stmt
= 1;
2827 if (gimple_debug_source_bind_p (stmt
))
2828 t
= gimple_debug_source_bind_get_var (stmt
);
2829 else if (gimple_debug_bind_p (stmt
))
2830 t
= gimple_debug_bind_get_var (stmt
);
2834 if (TREE_CODE (t
) == PARM_DECL
&& id
->debug_map
2835 && (n
= id
->debug_map
->get (t
)))
2837 gcc_assert (VAR_P (*n
));
2840 else if (VAR_P (t
) && !is_global_var (t
) && !id
->decl_map
->get (t
))
2841 /* T is a non-localized variable. */;
2843 walk_tree (&t
, remap_gimple_op_r
, &wi
, NULL
);
2845 if (gimple_debug_bind_p (stmt
))
2847 gimple_debug_bind_set_var (stmt
, t
);
2849 if (gimple_debug_bind_has_value_p (stmt
))
2850 walk_tree (gimple_debug_bind_get_value_ptr (stmt
),
2851 remap_gimple_op_r
, &wi
, NULL
);
2853 /* Punt if any decl couldn't be remapped. */
2854 if (processing_debug_stmt
< 0)
2855 gimple_debug_bind_reset_value (stmt
);
2857 else if (gimple_debug_source_bind_p (stmt
))
2859 gimple_debug_source_bind_set_var (stmt
, t
);
2860 /* When inlining and source bind refers to one of the optimized
2861 away parameters, change the source bind into normal debug bind
2862 referring to the corresponding DEBUG_EXPR_DECL that should have
2863 been bound before the call stmt. */
2864 t
= gimple_debug_source_bind_get_value (stmt
);
2866 && TREE_CODE (t
) == PARM_DECL
2869 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (id
->src_fn
);
2871 if (debug_args
!= NULL
)
2873 for (i
= 0; i
< vec_safe_length (*debug_args
); i
+= 2)
2874 if ((**debug_args
)[i
] == DECL_ORIGIN (t
)
2875 && TREE_CODE ((**debug_args
)[i
+ 1]) == DEBUG_EXPR_DECL
)
2877 t
= (**debug_args
)[i
+ 1];
2878 stmt
->subcode
= GIMPLE_DEBUG_BIND
;
2879 gimple_debug_bind_set_value (stmt
, t
);
2884 if (gimple_debug_source_bind_p (stmt
))
2885 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt
),
2886 remap_gimple_op_r
, &wi
, NULL
);
2889 processing_debug_stmt
= 0;
2894 /* Process deferred debug stmts. In order to give values better odds
2895 of being successfully remapped, we delay the processing of debug
2896 stmts until all other stmts that might require remapping are
2900 copy_debug_stmts (copy_body_data
*id
)
2905 if (!id
->debug_stmts
.exists ())
2908 FOR_EACH_VEC_ELT (id
->debug_stmts
, i
, stmt
)
2909 copy_debug_stmt (stmt
, id
);
2911 id
->debug_stmts
.release ();
2914 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2915 another function. */
2918 copy_tree_body (copy_body_data
*id
)
2920 tree fndecl
= id
->src_fn
;
2921 tree body
= DECL_SAVED_TREE (fndecl
);
2923 walk_tree (&body
, copy_tree_body_r
, id
, NULL
);
2928 /* Make a copy of the body of FN so that it can be inserted inline in
2929 another function. */
2932 copy_body (copy_body_data
*id
,
2933 basic_block entry_block_map
, basic_block exit_block_map
,
2934 basic_block new_entry
)
2936 tree fndecl
= id
->src_fn
;
2939 /* If this body has a CFG, walk CFG and copy. */
2940 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl
)));
2941 body
= copy_cfg_body (id
, entry_block_map
, exit_block_map
,
2943 copy_debug_stmts (id
);
2948 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2949 defined in function FN, or of a data member thereof. */
2952 self_inlining_addr_expr (tree value
, tree fn
)
2956 if (TREE_CODE (value
) != ADDR_EXPR
)
2959 var
= get_base_address (TREE_OPERAND (value
, 0));
2961 return var
&& auto_var_in_fn_p (var
, fn
);
2964 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2965 lexical block and line number information from base_stmt, if given,
2966 or from the last stmt of the block otherwise. */
2969 insert_init_debug_bind (copy_body_data
*id
,
2970 basic_block bb
, tree var
, tree value
,
2974 gimple_stmt_iterator gsi
;
2977 if (!gimple_in_ssa_p (id
->src_cfun
))
2980 if (!opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
2983 tracked_var
= target_for_debug_bind (var
);
2989 gsi
= gsi_last_bb (bb
);
2990 if (!base_stmt
&& !gsi_end_p (gsi
))
2991 base_stmt
= gsi_stmt (gsi
);
2994 note
= gimple_build_debug_bind (tracked_var
, unshare_expr (value
), base_stmt
);
2998 if (!gsi_end_p (gsi
))
2999 gsi_insert_after (&gsi
, note
, GSI_SAME_STMT
);
3001 gsi_insert_before (&gsi
, note
, GSI_SAME_STMT
);
3008 insert_init_stmt (copy_body_data
*id
, basic_block bb
, gimple
*init_stmt
)
3010 /* If VAR represents a zero-sized variable, it's possible that the
3011 assignment statement may result in no gimple statements. */
3014 gimple_stmt_iterator si
= gsi_last_bb (bb
);
3016 /* We can end up with init statements that store to a non-register
3017 from a rhs with a conversion. Handle that here by forcing the
3018 rhs into a temporary. gimple_regimplify_operands is not
3019 prepared to do this for us. */
3020 if (!is_gimple_debug (init_stmt
)
3021 && !is_gimple_reg (gimple_assign_lhs (init_stmt
))
3022 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt
)))
3023 && gimple_assign_rhs_class (init_stmt
) == GIMPLE_UNARY_RHS
)
3025 tree rhs
= build1 (gimple_assign_rhs_code (init_stmt
),
3026 gimple_expr_type (init_stmt
),
3027 gimple_assign_rhs1 (init_stmt
));
3028 rhs
= force_gimple_operand_gsi (&si
, rhs
, true, NULL_TREE
, false,
3030 gimple_assign_set_rhs_code (init_stmt
, TREE_CODE (rhs
));
3031 gimple_assign_set_rhs1 (init_stmt
, rhs
);
3033 gsi_insert_after (&si
, init_stmt
, GSI_NEW_STMT
);
3034 gimple_regimplify_operands (init_stmt
, &si
);
3036 if (!is_gimple_debug (init_stmt
))
3038 tree def
= gimple_assign_lhs (init_stmt
);
3039 insert_init_debug_bind (id
, bb
, def
, def
, init_stmt
);
3044 /* Initialize parameter P with VALUE. If needed, produce init statement
3045 at the end of BB. When BB is NULL, we return init statement to be
3048 setup_one_parameter (copy_body_data
*id
, tree p
, tree value
, tree fn
,
3049 basic_block bb
, tree
*vars
)
3051 gimple
*init_stmt
= NULL
;
3054 tree def
= (gimple_in_ssa_p (cfun
)
3055 ? ssa_default_def (id
->src_cfun
, p
) : NULL
);
3058 && value
!= error_mark_node
3059 && !useless_type_conversion_p (TREE_TYPE (p
), TREE_TYPE (value
)))
3061 /* If we can match up types by promotion/demotion do so. */
3062 if (fold_convertible_p (TREE_TYPE (p
), value
))
3063 rhs
= fold_convert (TREE_TYPE (p
), value
);
3066 /* ??? For valid programs we should not end up here.
3067 Still if we end up with truly mismatched types here, fall back
3068 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3069 GIMPLE to the following passes. */
3070 if (!is_gimple_reg_type (TREE_TYPE (value
))
3071 || TYPE_SIZE (TREE_TYPE (p
)) == TYPE_SIZE (TREE_TYPE (value
)))
3072 rhs
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (p
), value
);
3074 rhs
= build_zero_cst (TREE_TYPE (p
));
3078 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3079 here since the type of this decl must be visible to the calling
3081 var
= copy_decl_to_var (p
, id
);
3083 /* Declare this new variable. */
3084 DECL_CHAIN (var
) = *vars
;
3087 /* Make gimplifier happy about this variable. */
3088 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
3090 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3091 we would not need to create a new variable here at all, if it
3092 weren't for debug info. Still, we can just use the argument
3094 if (TREE_READONLY (p
)
3095 && !TREE_ADDRESSABLE (p
)
3096 && value
&& !TREE_SIDE_EFFECTS (value
)
3099 /* We may produce non-gimple trees by adding NOPs or introduce
3100 invalid sharing when operand is not really constant.
3101 It is not big deal to prohibit constant propagation here as
3102 we will constant propagate in DOM1 pass anyway. */
3103 if (is_gimple_min_invariant (value
)
3104 && useless_type_conversion_p (TREE_TYPE (p
),
3106 /* We have to be very careful about ADDR_EXPR. Make sure
3107 the base variable isn't a local variable of the inlined
3108 function, e.g., when doing recursive inlining, direct or
3109 mutually-recursive or whatever, which is why we don't
3110 just test whether fn == current_function_decl. */
3111 && ! self_inlining_addr_expr (value
, fn
))
3113 insert_decl_map (id
, p
, value
);
3114 insert_debug_decl_map (id
, p
, var
);
3115 return insert_init_debug_bind (id
, bb
, var
, value
, NULL
);
3119 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3120 that way, when the PARM_DECL is encountered, it will be
3121 automatically replaced by the VAR_DECL. */
3122 insert_decl_map (id
, p
, var
);
3124 /* Even if P was TREE_READONLY, the new VAR should not be.
3125 In the original code, we would have constructed a
3126 temporary, and then the function body would have never
3127 changed the value of P. However, now, we will be
3128 constructing VAR directly. The constructor body may
3129 change its value multiple times as it is being
3130 constructed. Therefore, it must not be TREE_READONLY;
3131 the back-end assumes that TREE_READONLY variable is
3132 assigned to only once. */
3133 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p
)))
3134 TREE_READONLY (var
) = 0;
3136 /* If there is no setup required and we are in SSA, take the easy route
3137 replacing all SSA names representing the function parameter by the
3138 SSA name passed to function.
3140 We need to construct map for the variable anyway as it might be used
3141 in different SSA names when parameter is set in function.
3143 Do replacement at -O0 for const arguments replaced by constant.
3144 This is important for builtin_constant_p and other construct requiring
3145 constant argument to be visible in inlined function body. */
3146 if (gimple_in_ssa_p (cfun
) && rhs
&& def
&& is_gimple_reg (p
)
3148 || (TREE_READONLY (p
)
3149 && is_gimple_min_invariant (rhs
)))
3150 && (TREE_CODE (rhs
) == SSA_NAME
3151 || is_gimple_min_invariant (rhs
))
3152 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
3154 insert_decl_map (id
, def
, rhs
);
3155 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
3158 /* If the value of argument is never used, don't care about initializing
3160 if (optimize
&& gimple_in_ssa_p (cfun
) && !def
&& is_gimple_reg (p
))
3162 gcc_assert (!value
|| !TREE_SIDE_EFFECTS (value
));
3163 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
3166 /* Initialize this VAR_DECL from the equivalent argument. Convert
3167 the argument to the proper type in case it was promoted. */
3170 if (rhs
== error_mark_node
)
3172 insert_decl_map (id
, p
, var
);
3173 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
3176 STRIP_USELESS_TYPE_CONVERSION (rhs
);
3178 /* If we are in SSA form properly remap the default definition
3179 or assign to a dummy SSA name if the parameter is unused and
3180 we are not optimizing. */
3181 if (gimple_in_ssa_p (cfun
) && is_gimple_reg (p
))
3185 def
= remap_ssa_name (def
, id
);
3186 init_stmt
= gimple_build_assign (def
, rhs
);
3187 SSA_NAME_IS_DEFAULT_DEF (def
) = 0;
3188 set_ssa_default_def (cfun
, var
, NULL
);
3192 def
= make_ssa_name (var
);
3193 init_stmt
= gimple_build_assign (def
, rhs
);
3197 init_stmt
= gimple_build_assign (var
, rhs
);
3199 if (bb
&& init_stmt
)
3200 insert_init_stmt (id
, bb
, init_stmt
);
3205 /* Generate code to initialize the parameters of the function at the
3206 top of the stack in ID from the GIMPLE_CALL STMT. */
3209 initialize_inlined_parameters (copy_body_data
*id
, gimple
*stmt
,
3210 tree fn
, basic_block bb
)
3215 tree vars
= NULL_TREE
;
3216 tree static_chain
= gimple_call_chain (stmt
);
3218 /* Figure out what the parameters are. */
3219 parms
= DECL_ARGUMENTS (fn
);
3221 /* Loop through the parameter declarations, replacing each with an
3222 equivalent VAR_DECL, appropriately initialized. */
3223 for (p
= parms
, i
= 0; p
; p
= DECL_CHAIN (p
), i
++)
3226 val
= i
< gimple_call_num_args (stmt
) ? gimple_call_arg (stmt
, i
) : NULL
;
3227 setup_one_parameter (id
, p
, val
, fn
, bb
, &vars
);
3229 /* After remapping parameters remap their types. This has to be done
3230 in a second loop over all parameters to appropriately remap
3231 variable sized arrays when the size is specified in a
3232 parameter following the array. */
3233 for (p
= parms
, i
= 0; p
; p
= DECL_CHAIN (p
), i
++)
3235 tree
*varp
= id
->decl_map
->get (p
);
3236 if (varp
&& VAR_P (*varp
))
3238 tree def
= (gimple_in_ssa_p (cfun
) && is_gimple_reg (p
)
3239 ? ssa_default_def (id
->src_cfun
, p
) : NULL
);
3241 TREE_TYPE (var
) = remap_type (TREE_TYPE (var
), id
);
3242 /* Also remap the default definition if it was remapped
3243 to the default definition of the parameter replacement
3244 by the parameter setup. */
3247 tree
*defp
= id
->decl_map
->get (def
);
3249 && TREE_CODE (*defp
) == SSA_NAME
3250 && SSA_NAME_VAR (*defp
) == var
)
3251 TREE_TYPE (*defp
) = TREE_TYPE (var
);
3256 /* Initialize the static chain. */
3257 p
= DECL_STRUCT_FUNCTION (fn
)->static_chain_decl
;
3258 gcc_assert (fn
!= current_function_decl
);
3261 /* No static chain? Seems like a bug in tree-nested.c. */
3262 gcc_assert (static_chain
);
3264 setup_one_parameter (id
, p
, static_chain
, fn
, bb
, &vars
);
3267 declare_inline_vars (id
->block
, vars
);
3271 /* Declare a return variable to replace the RESULT_DECL for the
3272 function we are calling. An appropriate DECL_STMT is returned.
3273 The USE_STMT is filled to contain a use of the declaration to
3274 indicate the return value of the function.
3276 RETURN_SLOT, if non-null is place where to store the result. It
3277 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3278 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3280 The return value is a (possibly null) value that holds the result
3281 as seen by the caller. */
3284 declare_return_variable (copy_body_data
*id
, tree return_slot
, tree modify_dest
,
3285 basic_block entry_bb
)
3287 tree callee
= id
->src_fn
;
3288 tree result
= DECL_RESULT (callee
);
3289 tree callee_type
= TREE_TYPE (result
);
3293 /* Handle type-mismatches in the function declaration return type
3294 vs. the call expression. */
3296 caller_type
= TREE_TYPE (modify_dest
);
3298 caller_type
= TREE_TYPE (TREE_TYPE (callee
));
3300 /* We don't need to do anything for functions that don't return anything. */
3301 if (VOID_TYPE_P (callee_type
))
3304 /* If there was a return slot, then the return value is the
3305 dereferenced address of that object. */
3308 /* The front end shouldn't have used both return_slot and
3309 a modify expression. */
3310 gcc_assert (!modify_dest
);
3311 if (DECL_BY_REFERENCE (result
))
3313 tree return_slot_addr
= build_fold_addr_expr (return_slot
);
3314 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr
);
3316 /* We are going to construct *&return_slot and we can't do that
3317 for variables believed to be not addressable.
3319 FIXME: This check possibly can match, because values returned
3320 via return slot optimization are not believed to have address
3321 taken by alias analysis. */
3322 gcc_assert (TREE_CODE (return_slot
) != SSA_NAME
);
3323 var
= return_slot_addr
;
3328 gcc_assert (TREE_CODE (var
) != SSA_NAME
);
3329 if (TREE_ADDRESSABLE (result
))
3330 mark_addressable (var
);
3332 if ((TREE_CODE (TREE_TYPE (result
)) == COMPLEX_TYPE
3333 || TREE_CODE (TREE_TYPE (result
)) == VECTOR_TYPE
)
3334 && !DECL_GIMPLE_REG_P (result
)
3336 DECL_GIMPLE_REG_P (var
) = 0;
3341 /* All types requiring non-trivial constructors should have been handled. */
3342 gcc_assert (!TREE_ADDRESSABLE (callee_type
));
3344 /* Attempt to avoid creating a new temporary variable. */
3346 && TREE_CODE (modify_dest
) != SSA_NAME
)
3348 bool use_it
= false;
3350 /* We can't use MODIFY_DEST if there's type promotion involved. */
3351 if (!useless_type_conversion_p (callee_type
, caller_type
))
3354 /* ??? If we're assigning to a variable sized type, then we must
3355 reuse the destination variable, because we've no good way to
3356 create variable sized temporaries at this point. */
3357 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type
)) != INTEGER_CST
)
3360 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3361 reuse it as the result of the call directly. Don't do this if
3362 it would promote MODIFY_DEST to addressable. */
3363 else if (TREE_ADDRESSABLE (result
))
3367 tree base_m
= get_base_address (modify_dest
);
3369 /* If the base isn't a decl, then it's a pointer, and we don't
3370 know where that's going to go. */
3371 if (!DECL_P (base_m
))
3373 else if (is_global_var (base_m
))
3375 else if ((TREE_CODE (TREE_TYPE (result
)) == COMPLEX_TYPE
3376 || TREE_CODE (TREE_TYPE (result
)) == VECTOR_TYPE
)
3377 && !DECL_GIMPLE_REG_P (result
)
3378 && DECL_GIMPLE_REG_P (base_m
))
3380 else if (!TREE_ADDRESSABLE (base_m
))
3392 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type
)) == INTEGER_CST
);
3394 var
= copy_result_decl_to_var (result
, id
);
3395 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
3397 /* Do not have the rest of GCC warn about this variable as it should
3398 not be visible to the user. */
3399 TREE_NO_WARNING (var
) = 1;
3401 declare_inline_vars (id
->block
, var
);
3403 /* Build the use expr. If the return type of the function was
3404 promoted, convert it back to the expected type. */
3406 if (!useless_type_conversion_p (caller_type
, TREE_TYPE (var
)))
3408 /* If we can match up types by promotion/demotion do so. */
3409 if (fold_convertible_p (caller_type
, var
))
3410 use
= fold_convert (caller_type
, var
);
3413 /* ??? For valid programs we should not end up here.
3414 Still if we end up with truly mismatched types here, fall back
3415 to using a MEM_REF to not leak invalid GIMPLE to the following
3417 /* Prevent var from being written into SSA form. */
3418 if (TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
3419 || TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
)
3420 DECL_GIMPLE_REG_P (var
) = false;
3421 else if (is_gimple_reg_type (TREE_TYPE (var
)))
3422 TREE_ADDRESSABLE (var
) = true;
3423 use
= fold_build2 (MEM_REF
, caller_type
,
3424 build_fold_addr_expr (var
),
3425 build_int_cst (ptr_type_node
, 0));
3429 STRIP_USELESS_TYPE_CONVERSION (use
);
3431 if (DECL_BY_REFERENCE (result
))
3433 TREE_ADDRESSABLE (var
) = 1;
3434 var
= build_fold_addr_expr (var
);
3438 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3439 way, when the RESULT_DECL is encountered, it will be
3440 automatically replaced by the VAR_DECL.
3442 When returning by reference, ensure that RESULT_DECL remaps to
3444 if (DECL_BY_REFERENCE (result
)
3445 && !is_gimple_val (var
))
3447 tree temp
= create_tmp_var (TREE_TYPE (result
), "retvalptr");
3448 insert_decl_map (id
, result
, temp
);
3449 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3450 it's default_def SSA_NAME. */
3451 if (gimple_in_ssa_p (id
->src_cfun
)
3452 && is_gimple_reg (result
))
3454 temp
= make_ssa_name (temp
);
3455 insert_decl_map (id
, ssa_default_def (id
->src_cfun
, result
), temp
);
3457 insert_init_stmt (id
, entry_bb
, gimple_build_assign (temp
, var
));
3460 insert_decl_map (id
, result
, var
);
3462 /* Remember this so we can ignore it in remap_decls. */
3467 /* Determine if the function can be copied. If so return NULL. If
3468 not return a string describng the reason for failure. */
3471 copy_forbidden (struct function
*fun
)
3473 const char *reason
= fun
->cannot_be_copied_reason
;
3475 /* Only examine the function once. */
3476 if (fun
->cannot_be_copied_set
)
3479 /* We cannot copy a function that receives a non-local goto
3480 because we cannot remap the destination label used in the
3481 function that is performing the non-local goto. */
3482 /* ??? Actually, this should be possible, if we work at it.
3483 No doubt there's just a handful of places that simply
3484 assume it doesn't happen and don't substitute properly. */
3485 if (fun
->has_nonlocal_label
)
3487 reason
= G_("function %q+F can never be copied "
3488 "because it receives a non-local goto");
3492 if (fun
->has_forced_label_in_static
)
3494 reason
= G_("function %q+F can never be copied because it saves "
3495 "address of local label in a static variable");
3500 fun
->cannot_be_copied_reason
= reason
;
3501 fun
->cannot_be_copied_set
= true;
3506 static const char *inline_forbidden_reason
;
3508 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3509 iff a function can not be inlined. Also sets the reason why. */
3512 inline_forbidden_p_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3513 struct walk_stmt_info
*wip
)
3515 tree fn
= (tree
) wip
->info
;
3517 gimple
*stmt
= gsi_stmt (*gsi
);
3519 switch (gimple_code (stmt
))
3522 /* Refuse to inline alloca call unless user explicitly forced so as
3523 this may change program's memory overhead drastically when the
3524 function using alloca is called in loop. In GCC present in
3525 SPEC2000 inlining into schedule_block cause it to require 2GB of
3526 RAM instead of 256MB. Don't do so for alloca calls emitted for
3527 VLA objects as those can't cause unbounded growth (they're always
3528 wrapped inside stack_save/stack_restore regions. */
3529 if (gimple_maybe_alloca_call_p (stmt
)
3530 && !gimple_call_alloca_for_var_p (as_a
<gcall
*> (stmt
))
3531 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
)))
3533 inline_forbidden_reason
3534 = G_("function %q+F can never be inlined because it uses "
3535 "alloca (override using the always_inline attribute)");
3536 *handled_ops_p
= true;
3540 t
= gimple_call_fndecl (stmt
);
3544 /* We cannot inline functions that call setjmp. */
3545 if (setjmp_call_p (t
))
3547 inline_forbidden_reason
3548 = G_("function %q+F can never be inlined because it uses setjmp");
3549 *handled_ops_p
= true;
3553 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
)
3554 switch (DECL_FUNCTION_CODE (t
))
3556 /* We cannot inline functions that take a variable number of
3558 case BUILT_IN_VA_START
:
3559 case BUILT_IN_NEXT_ARG
:
3560 case BUILT_IN_VA_END
:
3561 inline_forbidden_reason
3562 = G_("function %q+F can never be inlined because it "
3563 "uses variable argument lists");
3564 *handled_ops_p
= true;
3567 case BUILT_IN_LONGJMP
:
3568 /* We can't inline functions that call __builtin_longjmp at
3569 all. The non-local goto machinery really requires the
3570 destination be in a different function. If we allow the
3571 function calling __builtin_longjmp to be inlined into the
3572 function calling __builtin_setjmp, Things will Go Awry. */
3573 inline_forbidden_reason
3574 = G_("function %q+F can never be inlined because "
3575 "it uses setjmp-longjmp exception handling");
3576 *handled_ops_p
= true;
3579 case BUILT_IN_NONLOCAL_GOTO
:
3581 inline_forbidden_reason
3582 = G_("function %q+F can never be inlined because "
3583 "it uses non-local goto");
3584 *handled_ops_p
= true;
3587 case BUILT_IN_RETURN
:
3588 case BUILT_IN_APPLY_ARGS
:
3589 /* If a __builtin_apply_args caller would be inlined,
3590 it would be saving arguments of the function it has
3591 been inlined into. Similarly __builtin_return would
3592 return from the function the inline has been inlined into. */
3593 inline_forbidden_reason
3594 = G_("function %q+F can never be inlined because "
3595 "it uses __builtin_return or __builtin_apply_args");
3596 *handled_ops_p
= true;
3605 t
= gimple_goto_dest (stmt
);
3607 /* We will not inline a function which uses computed goto. The
3608 addresses of its local labels, which may be tucked into
3609 global storage, are of course not constant across
3610 instantiations, which causes unexpected behavior. */
3611 if (TREE_CODE (t
) != LABEL_DECL
)
3613 inline_forbidden_reason
3614 = G_("function %q+F can never be inlined "
3615 "because it contains a computed goto");
3616 *handled_ops_p
= true;
3625 *handled_ops_p
= false;
3629 /* Return true if FNDECL is a function that cannot be inlined into
3633 inline_forbidden_p (tree fndecl
)
3635 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
3636 struct walk_stmt_info wi
;
3638 bool forbidden_p
= false;
3640 /* First check for shared reasons not to copy the code. */
3641 inline_forbidden_reason
= copy_forbidden (fun
);
3642 if (inline_forbidden_reason
!= NULL
)
3645 /* Next, walk the statements of the function looking for
3646 constraucts we can't handle, or are non-optimal for inlining. */
3647 hash_set
<tree
> visited_nodes
;
3648 memset (&wi
, 0, sizeof (wi
));
3649 wi
.info
= (void *) fndecl
;
3650 wi
.pset
= &visited_nodes
;
3652 FOR_EACH_BB_FN (bb
, fun
)
3655 gimple_seq seq
= bb_seq (bb
);
3656 ret
= walk_gimple_seq (seq
, inline_forbidden_p_stmt
, NULL
, &wi
);
3657 forbidden_p
= (ret
!= NULL
);
3665 /* Return false if the function FNDECL cannot be inlined on account of its
3666 attributes, true otherwise. */
3668 function_attribute_inlinable_p (const_tree fndecl
)
3670 if (targetm
.attribute_table
)
3674 for (a
= DECL_ATTRIBUTES (fndecl
); a
; a
= TREE_CHAIN (a
))
3676 const_tree name
= TREE_PURPOSE (a
);
3679 for (i
= 0; targetm
.attribute_table
[i
].name
!= NULL
; i
++)
3680 if (is_attribute_p (targetm
.attribute_table
[i
].name
, name
))
3681 return targetm
.function_attribute_inlinable_p (fndecl
);
3688 /* Returns nonzero if FN is a function that does not have any
3689 fundamental inline blocking properties. */
3692 tree_inlinable_function_p (tree fn
)
3694 bool inlinable
= true;
3698 /* If we've already decided this function shouldn't be inlined,
3699 there's no need to check again. */
3700 if (DECL_UNINLINABLE (fn
))
3703 /* We only warn for functions declared `inline' by the user. */
3704 do_warning
= (warn_inline
3705 && DECL_DECLARED_INLINE_P (fn
)
3706 && !DECL_NO_INLINE_WARNING_P (fn
)
3707 && !DECL_IN_SYSTEM_HEADER (fn
));
3709 always_inline
= lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
));
3712 && always_inline
== NULL
)
3715 warning (OPT_Winline
, "function %q+F can never be inlined because it "
3716 "is suppressed using -fno-inline", fn
);
3720 else if (!function_attribute_inlinable_p (fn
))
3723 warning (OPT_Winline
, "function %q+F can never be inlined because it "
3724 "uses attributes conflicting with inlining", fn
);
3728 else if (inline_forbidden_p (fn
))
3730 /* See if we should warn about uninlinable functions. Previously,
3731 some of these warnings would be issued while trying to expand
3732 the function inline, but that would cause multiple warnings
3733 about functions that would for example call alloca. But since
3734 this a property of the function, just one warning is enough.
3735 As a bonus we can now give more details about the reason why a
3736 function is not inlinable. */
3738 error (inline_forbidden_reason
, fn
);
3739 else if (do_warning
)
3740 warning (OPT_Winline
, inline_forbidden_reason
, fn
);
3745 /* Squirrel away the result so that we don't have to check again. */
3746 DECL_UNINLINABLE (fn
) = !inlinable
;
3751 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3752 word size and take possible memcpy call into account and return
3753 cost based on whether optimizing for size or speed according to SPEED_P. */
3756 estimate_move_cost (tree type
, bool ARG_UNUSED (speed_p
))
3760 gcc_assert (!VOID_TYPE_P (type
));
3762 if (TREE_CODE (type
) == VECTOR_TYPE
)
3764 scalar_mode inner
= SCALAR_TYPE_MODE (TREE_TYPE (type
));
3765 machine_mode simd
= targetm
.vectorize
.preferred_simd_mode (inner
);
3767 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type
)));
3768 int simd_mode_size
= estimated_poly_value (GET_MODE_SIZE (simd
));
3769 return ((orig_mode_size
+ simd_mode_size
- 1)
3773 size
= int_size_in_bytes (type
);
3775 if (size
< 0 || size
> MOVE_MAX_PIECES
* MOVE_RATIO (speed_p
))
3776 /* Cost of a memcpy call, 3 arguments and the call. */
3779 return ((size
+ MOVE_MAX_PIECES
- 1) / MOVE_MAX_PIECES
);
3782 /* Returns cost of operation CODE, according to WEIGHTS */
3785 estimate_operator_cost (enum tree_code code
, eni_weights
*weights
,
3786 tree op1 ATTRIBUTE_UNUSED
, tree op2
)
3790 /* These are "free" conversions, or their presumed cost
3791 is folded into other operations. */
3796 case VIEW_CONVERT_EXPR
:
3799 /* Assign cost of 1 to usual operations.
3800 ??? We may consider mapping RTL costs to this. */
3806 case POINTER_PLUS_EXPR
:
3807 case POINTER_DIFF_EXPR
:
3810 case MULT_HIGHPART_EXPR
:
3812 case ADDR_SPACE_CONVERT_EXPR
:
3813 case FIXED_CONVERT_EXPR
:
3814 case FIX_TRUNC_EXPR
:
3833 case TRUTH_ANDIF_EXPR
:
3834 case TRUTH_ORIF_EXPR
:
3835 case TRUTH_AND_EXPR
:
3837 case TRUTH_XOR_EXPR
:
3838 case TRUTH_NOT_EXPR
:
3847 case UNORDERED_EXPR
:
3858 case PREDECREMENT_EXPR
:
3859 case PREINCREMENT_EXPR
:
3860 case POSTDECREMENT_EXPR
:
3861 case POSTINCREMENT_EXPR
:
3863 case REALIGN_LOAD_EXPR
:
3865 case WIDEN_SUM_EXPR
:
3866 case WIDEN_MULT_EXPR
:
3869 case WIDEN_MULT_PLUS_EXPR
:
3870 case WIDEN_MULT_MINUS_EXPR
:
3871 case WIDEN_LSHIFT_EXPR
:
3873 case VEC_WIDEN_MULT_HI_EXPR
:
3874 case VEC_WIDEN_MULT_LO_EXPR
:
3875 case VEC_WIDEN_MULT_EVEN_EXPR
:
3876 case VEC_WIDEN_MULT_ODD_EXPR
:
3877 case VEC_UNPACK_HI_EXPR
:
3878 case VEC_UNPACK_LO_EXPR
:
3879 case VEC_UNPACK_FLOAT_HI_EXPR
:
3880 case VEC_UNPACK_FLOAT_LO_EXPR
:
3881 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
3882 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
3883 case VEC_PACK_TRUNC_EXPR
:
3884 case VEC_PACK_SAT_EXPR
:
3885 case VEC_PACK_FIX_TRUNC_EXPR
:
3886 case VEC_PACK_FLOAT_EXPR
:
3887 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3888 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3889 case VEC_DUPLICATE_EXPR
:
3890 case VEC_SERIES_EXPR
:
3894 /* Few special cases of expensive operations. This is useful
3895 to avoid inlining on functions having too many of these. */
3896 case TRUNC_DIV_EXPR
:
3898 case FLOOR_DIV_EXPR
:
3899 case ROUND_DIV_EXPR
:
3900 case EXACT_DIV_EXPR
:
3901 case TRUNC_MOD_EXPR
:
3903 case FLOOR_MOD_EXPR
:
3904 case ROUND_MOD_EXPR
:
3906 if (TREE_CODE (op2
) != INTEGER_CST
)
3907 return weights
->div_mod_cost
;
3910 /* Bit-field insertion needs several shift and mask operations. */
3911 case BIT_INSERT_EXPR
:
3915 /* We expect a copy assignment with no operator. */
3916 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
);
3922 /* Estimate number of instructions that will be created by expanding
3923 the statements in the statement sequence STMTS.
3924 WEIGHTS contains weights attributed to various constructs. */
3927 estimate_num_insns_seq (gimple_seq stmts
, eni_weights
*weights
)
3930 gimple_stmt_iterator gsi
;
3933 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3934 cost
+= estimate_num_insns (gsi_stmt (gsi
), weights
);
3940 /* Estimate number of instructions that will be created by expanding STMT.
3941 WEIGHTS contains weights attributed to various constructs. */
3944 estimate_num_insns (gimple
*stmt
, eni_weights
*weights
)
3947 enum gimple_code code
= gimple_code (stmt
);
3954 /* Try to estimate the cost of assignments. We have three cases to
3956 1) Simple assignments to registers;
3957 2) Stores to things that must live in memory. This includes
3958 "normal" stores to scalars, but also assignments of large
3959 structures, or constructors of big arrays;
3961 Let us look at the first two cases, assuming we have "a = b + C":
3962 <GIMPLE_ASSIGN <var_decl "a">
3963 <plus_expr <var_decl "b"> <constant C>>
3964 If "a" is a GIMPLE register, the assignment to it is free on almost
3965 any target, because "a" usually ends up in a real register. Hence
3966 the only cost of this expression comes from the PLUS_EXPR, and we
3967 can ignore the GIMPLE_ASSIGN.
3968 If "a" is not a GIMPLE register, the assignment to "a" will most
3969 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3970 of moving something into "a", which we compute using the function
3971 estimate_move_cost. */
3972 if (gimple_clobber_p (stmt
))
3973 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3975 lhs
= gimple_assign_lhs (stmt
);
3976 rhs
= gimple_assign_rhs1 (stmt
);
3980 /* Account for the cost of moving to / from memory. */
3981 if (gimple_store_p (stmt
))
3982 cost
+= estimate_move_cost (TREE_TYPE (lhs
), weights
->time_based
);
3983 if (gimple_assign_load_p (stmt
))
3984 cost
+= estimate_move_cost (TREE_TYPE (rhs
), weights
->time_based
);
3986 cost
+= estimate_operator_cost (gimple_assign_rhs_code (stmt
), weights
,
3987 gimple_assign_rhs1 (stmt
),
3988 get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
3989 == GIMPLE_BINARY_RHS
3990 ? gimple_assign_rhs2 (stmt
) : NULL
);
3994 cost
= 1 + estimate_operator_cost (gimple_cond_code (stmt
), weights
,
3995 gimple_op (stmt
, 0),
3996 gimple_op (stmt
, 1));
4001 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
4002 /* Take into account cost of the switch + guess 2 conditional jumps for
4005 TODO: once the switch expansion logic is sufficiently separated, we can
4006 do better job on estimating cost of the switch. */
4007 if (weights
->time_based
)
4008 cost
= floor_log2 (gimple_switch_num_labels (switch_stmt
)) * 2;
4010 cost
= gimple_switch_num_labels (switch_stmt
) * 2;
4018 if (gimple_call_internal_p (stmt
))
4020 else if ((decl
= gimple_call_fndecl (stmt
))
4021 && DECL_BUILT_IN (decl
))
4023 /* Do not special case builtins where we see the body.
4024 This just confuse inliner. */
4025 struct cgraph_node
*node
;
4026 if (!(node
= cgraph_node::get (decl
))
4027 || node
->definition
)
4029 /* For buitins that are likely expanded to nothing or
4030 inlined do not account operand costs. */
4031 else if (is_simple_builtin (decl
))
4033 else if (is_inexpensive_builtin (decl
))
4034 return weights
->target_builtin_call_cost
;
4035 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
4037 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4038 specialize the cheap expansion we do here.
4039 ??? This asks for a more general solution. */
4040 switch (DECL_FUNCTION_CODE (decl
))
4045 if (TREE_CODE (gimple_call_arg (stmt
, 1)) == REAL_CST
4047 (&TREE_REAL_CST (gimple_call_arg (stmt
, 1)),
4049 return estimate_operator_cost
4050 (MULT_EXPR
, weights
, gimple_call_arg (stmt
, 0),
4051 gimple_call_arg (stmt
, 0));
4060 cost
= decl
? weights
->call_cost
: weights
->indirect_call_cost
;
4061 if (gimple_call_lhs (stmt
))
4062 cost
+= estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt
)),
4063 weights
->time_based
);
4064 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4066 tree arg
= gimple_call_arg (stmt
, i
);
4067 cost
+= estimate_move_cost (TREE_TYPE (arg
),
4068 weights
->time_based
);
4074 return weights
->return_cost
;
4080 case GIMPLE_PREDICT
:
4086 int count
= asm_str_count (gimple_asm_string (as_a
<gasm
*> (stmt
)));
4087 /* 1000 means infinity. This avoids overflows later
4088 with very long asm statements. */
4091 return MAX (1, count
);
4095 /* This is either going to be an external function call with one
4096 argument, or two register copy statements plus a goto. */
4099 case GIMPLE_EH_DISPATCH
:
4100 /* ??? This is going to turn into a switch statement. Ideally
4101 we'd have a look at the eh region and estimate the number of
4106 return estimate_num_insns_seq (
4107 gimple_bind_body (as_a
<gbind
*> (stmt
)),
4110 case GIMPLE_EH_FILTER
:
4111 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt
), weights
);
4114 return estimate_num_insns_seq (gimple_catch_handler (
4115 as_a
<gcatch
*> (stmt
)),
4119 return (estimate_num_insns_seq (gimple_try_eval (stmt
), weights
)
4120 + estimate_num_insns_seq (gimple_try_cleanup (stmt
), weights
));
4122 /* OMP directives are generally very expensive. */
4124 case GIMPLE_OMP_RETURN
:
4125 case GIMPLE_OMP_SECTIONS_SWITCH
:
4126 case GIMPLE_OMP_ATOMIC_STORE
:
4127 case GIMPLE_OMP_CONTINUE
:
4128 /* ...except these, which are cheap. */
4131 case GIMPLE_OMP_ATOMIC_LOAD
:
4132 return weights
->omp_cost
;
4134 case GIMPLE_OMP_FOR
:
4135 return (weights
->omp_cost
4136 + estimate_num_insns_seq (gimple_omp_body (stmt
), weights
)
4137 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt
), weights
));
4139 case GIMPLE_OMP_PARALLEL
:
4140 case GIMPLE_OMP_TASK
:
4141 case GIMPLE_OMP_CRITICAL
:
4142 case GIMPLE_OMP_MASTER
:
4143 case GIMPLE_OMP_TASKGROUP
:
4144 case GIMPLE_OMP_ORDERED
:
4145 case GIMPLE_OMP_SECTION
:
4146 case GIMPLE_OMP_SECTIONS
:
4147 case GIMPLE_OMP_SINGLE
:
4148 case GIMPLE_OMP_TARGET
:
4149 case GIMPLE_OMP_TEAMS
:
4150 return (weights
->omp_cost
4151 + estimate_num_insns_seq (gimple_omp_body (stmt
), weights
));
4153 case GIMPLE_TRANSACTION
:
4154 return (weights
->tm_cost
4155 + estimate_num_insns_seq (gimple_transaction_body (
4156 as_a
<gtransaction
*> (stmt
)),
4166 /* Estimate number of instructions that will be created by expanding
4167 function FNDECL. WEIGHTS contains weights attributed to various
4171 estimate_num_insns_fn (tree fndecl
, eni_weights
*weights
)
4173 struct function
*my_function
= DECL_STRUCT_FUNCTION (fndecl
);
4174 gimple_stmt_iterator bsi
;
4178 gcc_assert (my_function
&& my_function
->cfg
);
4179 FOR_EACH_BB_FN (bb
, my_function
)
4181 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
4182 n
+= estimate_num_insns (gsi_stmt (bsi
), weights
);
4189 /* Initializes weights used by estimate_num_insns. */
4192 init_inline_once (void)
4194 eni_size_weights
.call_cost
= 1;
4195 eni_size_weights
.indirect_call_cost
= 3;
4196 eni_size_weights
.target_builtin_call_cost
= 1;
4197 eni_size_weights
.div_mod_cost
= 1;
4198 eni_size_weights
.omp_cost
= 40;
4199 eni_size_weights
.tm_cost
= 10;
4200 eni_size_weights
.time_based
= false;
4201 eni_size_weights
.return_cost
= 1;
4203 /* Estimating time for call is difficult, since we have no idea what the
4204 called function does. In the current uses of eni_time_weights,
4205 underestimating the cost does less harm than overestimating it, so
4206 we choose a rather small value here. */
4207 eni_time_weights
.call_cost
= 10;
4208 eni_time_weights
.indirect_call_cost
= 15;
4209 eni_time_weights
.target_builtin_call_cost
= 1;
4210 eni_time_weights
.div_mod_cost
= 10;
4211 eni_time_weights
.omp_cost
= 40;
4212 eni_time_weights
.tm_cost
= 40;
4213 eni_time_weights
.time_based
= true;
4214 eni_time_weights
.return_cost
= 2;
4218 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4221 prepend_lexical_block (tree current_block
, tree new_block
)
4223 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (current_block
);
4224 BLOCK_SUBBLOCKS (current_block
) = new_block
;
4225 BLOCK_SUPERCONTEXT (new_block
) = current_block
;
4228 /* Add local variables from CALLEE to CALLER. */
4231 add_local_variables (struct function
*callee
, struct function
*caller
,
4237 FOR_EACH_LOCAL_DECL (callee
, ix
, var
)
4238 if (!can_be_nonlocal (var
, id
))
4240 tree new_var
= remap_decl (var
, id
);
4242 /* Remap debug-expressions. */
4244 && DECL_HAS_DEBUG_EXPR_P (var
)
4247 tree tem
= DECL_DEBUG_EXPR (var
);
4248 bool old_regimplify
= id
->regimplify
;
4249 id
->remapping_type_depth
++;
4250 walk_tree (&tem
, copy_tree_body_r
, id
, NULL
);
4251 id
->remapping_type_depth
--;
4252 id
->regimplify
= old_regimplify
;
4253 SET_DECL_DEBUG_EXPR (new_var
, tem
);
4254 DECL_HAS_DEBUG_EXPR_P (new_var
) = 1;
4256 add_local_decl (caller
, new_var
);
4260 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4261 have brought in or introduced any debug stmts for SRCVAR. */
4264 reset_debug_binding (copy_body_data
*id
, tree srcvar
, gimple_seq
*bindings
)
4266 tree
*remappedvarp
= id
->decl_map
->get (srcvar
);
4271 if (!VAR_P (*remappedvarp
))
4274 if (*remappedvarp
== id
->retvar
)
4277 tree tvar
= target_for_debug_bind (*remappedvarp
);
4281 gdebug
*stmt
= gimple_build_debug_bind (tvar
, NULL_TREE
,
4283 gimple_seq_add_stmt (bindings
, stmt
);
4286 /* For each inlined variable for which we may have debug bind stmts,
4287 add before GSI a final debug stmt resetting it, marking the end of
4288 its life, so that var-tracking knows it doesn't have to compute
4289 further locations for it. */
4292 reset_debug_bindings (copy_body_data
*id
, gimple_stmt_iterator gsi
)
4296 gimple_seq bindings
= NULL
;
4298 if (!gimple_in_ssa_p (id
->src_cfun
))
4301 if (!opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
4304 for (var
= DECL_ARGUMENTS (id
->src_fn
);
4305 var
; var
= DECL_CHAIN (var
))
4306 reset_debug_binding (id
, var
, &bindings
);
4308 FOR_EACH_LOCAL_DECL (id
->src_cfun
, ix
, var
)
4309 reset_debug_binding (id
, var
, &bindings
);
4311 gsi_insert_seq_before_without_update (&gsi
, bindings
, GSI_SAME_STMT
);
4314 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4317 expand_call_inline (basic_block bb
, gimple
*stmt
, copy_body_data
*id
)
4321 hash_map
<tree
, tree
> *dst
;
4322 hash_map
<tree
, tree
> *st
= NULL
;
4325 struct cgraph_edge
*cg_edge
;
4326 cgraph_inline_failed_t reason
;
4327 basic_block return_block
;
4329 gimple_stmt_iterator gsi
, stmt_gsi
;
4330 bool successfully_inlined
= false;
4331 bool purge_dead_abnormal_edges
;
4333 unsigned int prop_mask
, src_properties
;
4334 struct function
*dst_cfun
;
4337 gimple
*simtenter_stmt
= NULL
;
4338 vec
<tree
> *simtvars_save
;
4340 /* The gimplifier uses input_location in too many places, such as
4341 internal_get_tmp_var (). */
4342 location_t saved_location
= input_location
;
4343 input_location
= gimple_location (stmt
);
4345 /* From here on, we're only interested in CALL_EXPRs. */
4346 call_stmt
= dyn_cast
<gcall
*> (stmt
);
4350 cg_edge
= id
->dst_node
->get_edge (stmt
);
4351 gcc_checking_assert (cg_edge
);
4352 /* First, see if we can figure out what function is being called.
4353 If we cannot, then there is no hope of inlining the function. */
4354 if (cg_edge
->indirect_unknown_callee
)
4356 fn
= cg_edge
->callee
->decl
;
4357 gcc_checking_assert (fn
);
4359 /* If FN is a declaration of a function in a nested scope that was
4360 globally declared inline, we don't set its DECL_INITIAL.
4361 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4362 C++ front-end uses it for cdtors to refer to their internal
4363 declarations, that are not real functions. Fortunately those
4364 don't have trees to be saved, so we can tell by checking their
4366 if (!DECL_INITIAL (fn
)
4367 && DECL_ABSTRACT_ORIGIN (fn
)
4368 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn
)))
4369 fn
= DECL_ABSTRACT_ORIGIN (fn
);
4371 /* Don't try to inline functions that are not well-suited to inlining. */
4372 if (cg_edge
->inline_failed
)
4374 reason
= cg_edge
->inline_failed
;
4375 /* If this call was originally indirect, we do not want to emit any
4376 inlining related warnings or sorry messages because there are no
4377 guarantees regarding those. */
4378 if (cg_edge
->indirect_inlining_edge
)
4381 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
))
4382 /* For extern inline functions that get redefined we always
4383 silently ignored always_inline flag. Better behavior would
4384 be to be able to keep both bodies and use extern inline body
4385 for inlining, but we can't do that because frontends overwrite
4387 && !cg_edge
->callee
->local
.redefined_extern_inline
4388 /* During early inline pass, report only when optimization is
4390 && (symtab
->global_info_ready
4392 || cgraph_inline_failed_type (reason
) == CIF_FINAL_ERROR
)
4393 /* PR 20090218-1_0.c. Body can be provided by another module. */
4394 && (reason
!= CIF_BODY_NOT_AVAILABLE
|| !flag_generate_lto
))
4396 error ("inlining failed in call to always_inline %q+F: %s", fn
,
4397 cgraph_inline_failed_string (reason
));
4398 if (gimple_location (stmt
) != UNKNOWN_LOCATION
)
4399 inform (gimple_location (stmt
), "called from here");
4400 else if (DECL_SOURCE_LOCATION (cfun
->decl
) != UNKNOWN_LOCATION
)
4401 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
4402 "called from this function");
4404 else if (warn_inline
4405 && DECL_DECLARED_INLINE_P (fn
)
4406 && !DECL_NO_INLINE_WARNING_P (fn
)
4407 && !DECL_IN_SYSTEM_HEADER (fn
)
4408 && reason
!= CIF_UNSPECIFIED
4409 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn
))
4410 /* Do not warn about not inlined recursive calls. */
4411 && !cg_edge
->recursive_p ()
4412 /* Avoid warnings during early inline pass. */
4413 && symtab
->global_info_ready
)
4415 if (warning (OPT_Winline
, "inlining failed in call to %q+F: %s",
4416 fn
, _(cgraph_inline_failed_string (reason
))))
4418 if (gimple_location (stmt
) != UNKNOWN_LOCATION
)
4419 inform (gimple_location (stmt
), "called from here");
4420 else if (DECL_SOURCE_LOCATION (cfun
->decl
) != UNKNOWN_LOCATION
)
4421 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
4422 "called from this function");
4427 id
->src_node
= cg_edge
->callee
;
4429 /* If callee is thunk, all we need is to adjust the THIS pointer
4430 and redirect to function being thunked. */
4431 if (id
->src_node
->thunk
.thunk_p
)
4434 tree virtual_offset
= NULL
;
4435 profile_count count
= cg_edge
->count
;
4437 gimple_stmt_iterator iter
= gsi_for_stmt (stmt
);
4440 edge
= id
->src_node
->callees
->clone (id
->dst_node
, call_stmt
,
4442 profile_count::one (),
4443 profile_count::one (),
4445 edge
->count
= count
;
4446 if (id
->src_node
->thunk
.virtual_offset_p
)
4447 virtual_offset
= size_int (id
->src_node
->thunk
.virtual_value
);
4448 op
= create_tmp_reg_fn (cfun
, TREE_TYPE (gimple_call_arg (stmt
, 0)),
4450 gsi_insert_before (&iter
, gimple_build_assign (op
,
4451 gimple_call_arg (stmt
, 0)),
4453 gcc_assert (id
->src_node
->thunk
.this_adjusting
);
4454 op
= thunk_adjust (&iter
, op
, 1, id
->src_node
->thunk
.fixed_offset
,
4457 gimple_call_set_arg (stmt
, 0, op
);
4458 gimple_call_set_fndecl (stmt
, edge
->callee
->decl
);
4460 id
->src_node
->remove ();
4461 expand_call_inline (bb
, stmt
, id
);
4462 maybe_remove_unused_call_args (cfun
, stmt
);
4465 fn
= cg_edge
->callee
->decl
;
4466 cg_edge
->callee
->get_untransformed_body ();
4468 if (flag_checking
&& cg_edge
->callee
->decl
!= id
->dst_node
->decl
)
4469 cg_edge
->callee
->verify ();
4471 /* We will be inlining this callee. */
4472 id
->eh_lp_nr
= lookup_stmt_eh_lp (stmt
);
4473 id
->assign_stmts
.create (0);
4475 /* Update the callers EH personality. */
4476 if (DECL_FUNCTION_PERSONALITY (fn
))
4477 DECL_FUNCTION_PERSONALITY (cg_edge
->caller
->decl
)
4478 = DECL_FUNCTION_PERSONALITY (fn
);
4480 /* Split the block before the GIMPLE_CALL. */
4481 stmt_gsi
= gsi_for_stmt (stmt
);
4482 gsi_prev (&stmt_gsi
);
4483 e
= split_block (bb
, gsi_end_p (stmt_gsi
) ? NULL
: gsi_stmt (stmt_gsi
));
4485 return_block
= e
->dest
;
4488 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4489 been the source of abnormal edges. In this case, schedule
4490 the removal of dead abnormal edges. */
4491 gsi
= gsi_start_bb (return_block
);
4493 purge_dead_abnormal_edges
= gsi_end_p (gsi
);
4495 stmt_gsi
= gsi_start_bb (return_block
);
4497 /* Build a block containing code to initialize the arguments, the
4498 actual inline expansion of the body, and a label for the return
4499 statements within the function to jump to. The type of the
4500 statement expression is the return type of the function call.
4501 ??? If the call does not have an associated block then we will
4502 remap all callee blocks to NULL, effectively dropping most of
4503 its debug information. This should only happen for calls to
4504 artificial decls inserted by the compiler itself. We need to
4505 either link the inlined blocks into the caller block tree or
4506 not refer to them in any way to not break GC for locations. */
4507 if (gimple_block (stmt
))
4509 id
->block
= make_node (BLOCK
);
4510 BLOCK_ABSTRACT_ORIGIN (id
->block
) = fn
;
4511 BLOCK_SOURCE_LOCATION (id
->block
)
4512 = LOCATION_LOCUS (gimple_location (stmt
));
4513 prepend_lexical_block (gimple_block (stmt
), id
->block
);
4516 /* Local declarations will be replaced by their equivalents in this map. */
4518 id
->decl_map
= new hash_map
<tree
, tree
>;
4519 dst
= id
->debug_map
;
4520 id
->debug_map
= NULL
;
4522 /* Record the function we are about to inline. */
4524 id
->src_cfun
= DECL_STRUCT_FUNCTION (fn
);
4525 id
->reset_location
= DECL_IGNORED_P (fn
);
4526 id
->call_stmt
= call_stmt
;
4528 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4529 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4530 dst_cfun
= DECL_STRUCT_FUNCTION (id
->dst_fn
);
4531 simtvars_save
= id
->dst_simt_vars
;
4532 if (!(dst_cfun
->curr_properties
& PROP_gimple_lomp_dev
)
4533 && (simduid
= bb
->loop_father
->simduid
) != NULL_TREE
4534 && (simduid
= ssa_default_def (dst_cfun
, simduid
)) != NULL_TREE
4535 && single_imm_use (simduid
, &use
, &simtenter_stmt
)
4536 && is_gimple_call (simtenter_stmt
)
4537 && gimple_call_internal_p (simtenter_stmt
, IFN_GOMP_SIMT_ENTER
))
4538 vec_alloc (id
->dst_simt_vars
, 0);
4540 id
->dst_simt_vars
= NULL
;
4542 if (profile_status_for_fn (id
->src_cfun
) == PROFILE_ABSENT
)
4543 profile_status_for_fn (dst_cfun
) = PROFILE_ABSENT
;
4545 /* If the src function contains an IFN_VA_ARG, then so will the dst
4546 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4547 prop_mask
= PROP_gimple_lva
| PROP_gimple_lomp_dev
;
4548 src_properties
= id
->src_cfun
->curr_properties
& prop_mask
;
4549 if (src_properties
!= prop_mask
)
4550 dst_cfun
->curr_properties
&= src_properties
| ~prop_mask
;
4552 gcc_assert (!id
->src_cfun
->after_inlining
);
4555 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn
)))
4557 gimple_stmt_iterator si
= gsi_last_bb (bb
);
4558 gsi_insert_after (&si
, gimple_build_predict (PRED_COLD_FUNCTION
,
4562 initialize_inlined_parameters (id
, stmt
, fn
, bb
);
4563 if (debug_nonbind_markers_p
&& debug_inline_points
&& id
->block
4564 && inlined_function_outer_scope_p (id
->block
))
4566 gimple_stmt_iterator si
= gsi_last_bb (bb
);
4567 gsi_insert_after (&si
, gimple_build_debug_inline_entry
4568 (id
->block
, input_location
), GSI_NEW_STMT
);
4571 if (DECL_INITIAL (fn
))
4573 if (gimple_block (stmt
))
4577 prepend_lexical_block (id
->block
,
4578 remap_blocks (DECL_INITIAL (fn
), id
));
4579 gcc_checking_assert (BLOCK_SUBBLOCKS (id
->block
)
4580 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id
->block
))
4582 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4583 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4584 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4585 under it. The parameters can be then evaluated in the debugger,
4586 but don't show in backtraces. */
4587 for (var
= &BLOCK_VARS (BLOCK_SUBBLOCKS (id
->block
)); *var
; )
4588 if (TREE_CODE (DECL_ORIGIN (*var
)) == PARM_DECL
)
4591 *var
= TREE_CHAIN (v
);
4592 TREE_CHAIN (v
) = BLOCK_VARS (id
->block
);
4593 BLOCK_VARS (id
->block
) = v
;
4596 var
= &TREE_CHAIN (*var
);
4599 remap_blocks_to_null (DECL_INITIAL (fn
), id
);
4602 /* Return statements in the function body will be replaced by jumps
4603 to the RET_LABEL. */
4604 gcc_assert (DECL_INITIAL (fn
));
4605 gcc_assert (TREE_CODE (DECL_INITIAL (fn
)) == BLOCK
);
4607 /* Find the LHS to which the result of this call is assigned. */
4609 if (gimple_call_lhs (stmt
))
4611 modify_dest
= gimple_call_lhs (stmt
);
4613 /* The function which we are inlining might not return a value,
4614 in which case we should issue a warning that the function
4615 does not return a value. In that case the optimizers will
4616 see that the variable to which the value is assigned was not
4617 initialized. We do not want to issue a warning about that
4618 uninitialized variable. */
4619 if (DECL_P (modify_dest
))
4620 TREE_NO_WARNING (modify_dest
) = 1;
4622 if (gimple_call_return_slot_opt_p (call_stmt
))
4624 return_slot
= modify_dest
;
4631 /* If we are inlining a call to the C++ operator new, we don't want
4632 to use type based alias analysis on the return value. Otherwise
4633 we may get confused if the compiler sees that the inlined new
4634 function returns a pointer which was just deleted. See bug
4636 if (DECL_IS_OPERATOR_NEW (fn
))
4642 /* Declare the return variable for the function. */
4643 use_retvar
= declare_return_variable (id
, return_slot
, modify_dest
, bb
);
4645 /* Add local vars in this inlined callee to caller. */
4646 add_local_variables (id
->src_cfun
, cfun
, id
);
4648 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4650 fprintf (dump_file
, "Inlining %s to %s with frequency %4.2f\n",
4651 id
->src_node
->dump_name (),
4652 id
->dst_node
->dump_name (),
4653 cg_edge
->sreal_frequency ().to_double ());
4654 id
->src_node
->dump (dump_file
);
4655 id
->dst_node
->dump (dump_file
);
4658 /* This is it. Duplicate the callee body. Assume callee is
4659 pre-gimplified. Note that we must not alter the caller
4660 function in any way before this point, as this CALL_EXPR may be
4661 a self-referential call; if we're calling ourselves, we need to
4662 duplicate our body before altering anything. */
4663 copy_body (id
, bb
, return_block
, NULL
);
4665 reset_debug_bindings (id
, stmt_gsi
);
4667 if (flag_stack_reuse
!= SR_NONE
)
4668 for (tree p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
4669 if (!TREE_THIS_VOLATILE (p
))
4671 tree
*varp
= id
->decl_map
->get (p
);
4672 if (varp
&& VAR_P (*varp
) && !is_gimple_reg (*varp
))
4674 tree clobber
= build_constructor (TREE_TYPE (*varp
), NULL
);
4675 gimple
*clobber_stmt
;
4676 TREE_THIS_VOLATILE (clobber
) = 1;
4677 clobber_stmt
= gimple_build_assign (*varp
, clobber
);
4678 gimple_set_location (clobber_stmt
, gimple_location (stmt
));
4679 gsi_insert_before (&stmt_gsi
, clobber_stmt
, GSI_SAME_STMT
);
4683 /* Reset the escaped solution. */
4684 if (cfun
->gimple_df
)
4685 pt_solution_reset (&cfun
->gimple_df
->escaped
);
4687 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4688 if (id
->dst_simt_vars
&& id
->dst_simt_vars
->length () > 0)
4690 size_t nargs
= gimple_call_num_args (simtenter_stmt
);
4691 vec
<tree
> *vars
= id
->dst_simt_vars
;
4692 auto_vec
<tree
> newargs (nargs
+ vars
->length ());
4693 for (size_t i
= 0; i
< nargs
; i
++)
4694 newargs
.quick_push (gimple_call_arg (simtenter_stmt
, i
));
4695 for (tree
*pvar
= vars
->begin (); pvar
!= vars
->end (); pvar
++)
4697 tree ptrtype
= build_pointer_type (TREE_TYPE (*pvar
));
4698 newargs
.quick_push (build1 (ADDR_EXPR
, ptrtype
, *pvar
));
4700 gcall
*g
= gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, newargs
);
4701 gimple_call_set_lhs (g
, gimple_call_lhs (simtenter_stmt
));
4702 gimple_stmt_iterator gsi
= gsi_for_stmt (simtenter_stmt
);
4703 gsi_replace (&gsi
, g
, false);
4705 vec_free (id
->dst_simt_vars
);
4706 id
->dst_simt_vars
= simtvars_save
;
4711 delete id
->debug_map
;
4712 id
->debug_map
= dst
;
4714 delete id
->decl_map
;
4717 /* Unlink the calls virtual operands before replacing it. */
4718 unlink_stmt_vdef (stmt
);
4719 if (gimple_vdef (stmt
)
4720 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
4721 release_ssa_name (gimple_vdef (stmt
));
4723 /* If the inlined function returns a result that we care about,
4724 substitute the GIMPLE_CALL with an assignment of the return
4725 variable to the LHS of the call. That is, if STMT was
4726 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4727 if (use_retvar
&& gimple_call_lhs (stmt
))
4729 gimple
*old_stmt
= stmt
;
4730 stmt
= gimple_build_assign (gimple_call_lhs (stmt
), use_retvar
);
4731 gimple_set_location (stmt
, gimple_location (old_stmt
));
4732 gsi_replace (&stmt_gsi
, stmt
, false);
4733 maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
);
4734 /* Append a clobber for id->retvar if easily possible. */
4735 if (flag_stack_reuse
!= SR_NONE
4737 && VAR_P (id
->retvar
)
4738 && id
->retvar
!= return_slot
4739 && id
->retvar
!= modify_dest
4740 && !TREE_THIS_VOLATILE (id
->retvar
)
4741 && !is_gimple_reg (id
->retvar
)
4742 && !stmt_ends_bb_p (stmt
))
4744 tree clobber
= build_constructor (TREE_TYPE (id
->retvar
), NULL
);
4745 gimple
*clobber_stmt
;
4746 TREE_THIS_VOLATILE (clobber
) = 1;
4747 clobber_stmt
= gimple_build_assign (id
->retvar
, clobber
);
4748 gimple_set_location (clobber_stmt
, gimple_location (old_stmt
));
4749 gsi_insert_after (&stmt_gsi
, clobber_stmt
, GSI_SAME_STMT
);
4754 /* Handle the case of inlining a function with no return
4755 statement, which causes the return value to become undefined. */
4756 if (gimple_call_lhs (stmt
)
4757 && TREE_CODE (gimple_call_lhs (stmt
)) == SSA_NAME
)
4759 tree name
= gimple_call_lhs (stmt
);
4760 tree var
= SSA_NAME_VAR (name
);
4761 tree def
= var
? ssa_default_def (cfun
, var
) : NULL
;
4765 /* If the variable is used undefined, make this name
4766 undefined via a move. */
4767 stmt
= gimple_build_assign (gimple_call_lhs (stmt
), def
);
4768 gsi_replace (&stmt_gsi
, stmt
, true);
4774 var
= create_tmp_reg_fn (cfun
, TREE_TYPE (name
), NULL
);
4775 SET_SSA_NAME_VAR_OR_IDENTIFIER (name
, var
);
4777 /* Otherwise make this variable undefined. */
4778 gsi_remove (&stmt_gsi
, true);
4779 set_ssa_default_def (cfun
, var
, name
);
4780 SSA_NAME_DEF_STMT (name
) = gimple_build_nop ();
4783 /* Replace with a clobber for id->retvar. */
4784 else if (flag_stack_reuse
!= SR_NONE
4786 && VAR_P (id
->retvar
)
4787 && id
->retvar
!= return_slot
4788 && id
->retvar
!= modify_dest
4789 && !TREE_THIS_VOLATILE (id
->retvar
)
4790 && !is_gimple_reg (id
->retvar
))
4792 tree clobber
= build_constructor (TREE_TYPE (id
->retvar
), NULL
);
4793 gimple
*clobber_stmt
;
4794 TREE_THIS_VOLATILE (clobber
) = 1;
4795 clobber_stmt
= gimple_build_assign (id
->retvar
, clobber
);
4796 gimple_set_location (clobber_stmt
, gimple_location (stmt
));
4797 gsi_replace (&stmt_gsi
, clobber_stmt
, false);
4798 maybe_clean_or_replace_eh_stmt (stmt
, clobber_stmt
);
4801 gsi_remove (&stmt_gsi
, true);
4804 if (purge_dead_abnormal_edges
)
4806 gimple_purge_dead_eh_edges (return_block
);
4807 gimple_purge_dead_abnormal_call_edges (return_block
);
4810 /* If the value of the new expression is ignored, that's OK. We
4811 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4812 the equivalent inlined version either. */
4813 if (is_gimple_assign (stmt
))
4815 gcc_assert (gimple_assign_single_p (stmt
)
4816 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)));
4817 TREE_USED (gimple_assign_rhs1 (stmt
)) = 1;
4820 id
->assign_stmts
.release ();
4822 /* Output the inlining info for this abstract function, since it has been
4823 inlined. If we don't do this now, we can lose the information about the
4824 variables in the function when the blocks get blown away as soon as we
4825 remove the cgraph node. */
4826 if (gimple_block (stmt
))
4827 (*debug_hooks
->outlining_inline_function
) (fn
);
4829 /* Update callgraph if needed. */
4830 cg_edge
->callee
->remove ();
4832 id
->block
= NULL_TREE
;
4833 id
->retvar
= NULL_TREE
;
4834 successfully_inlined
= true;
4837 input_location
= saved_location
;
4838 return successfully_inlined
;
4841 /* Expand call statements reachable from STMT_P.
4842 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4843 in a MODIFY_EXPR. */
4846 gimple_expand_calls_inline (basic_block bb
, copy_body_data
*id
)
4848 gimple_stmt_iterator gsi
;
4849 bool inlined
= false;
4851 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
);)
4853 gimple
*stmt
= gsi_stmt (gsi
);
4856 if (is_gimple_call (stmt
)
4857 && !gimple_call_internal_p (stmt
))
4858 inlined
|= expand_call_inline (bb
, stmt
, id
);
4865 /* Walk all basic blocks created after FIRST and try to fold every statement
4866 in the STATEMENTS pointer set. */
4869 fold_marked_statements (int first
, hash_set
<gimple
*> *statements
)
4871 for (; first
< n_basic_blocks_for_fn (cfun
); first
++)
4872 if (BASIC_BLOCK_FOR_FN (cfun
, first
))
4874 gimple_stmt_iterator gsi
;
4876 for (gsi
= gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, first
));
4879 if (statements
->contains (gsi_stmt (gsi
)))
4881 gimple
*old_stmt
= gsi_stmt (gsi
);
4882 tree old_decl
= is_gimple_call (old_stmt
) ? gimple_call_fndecl (old_stmt
) : 0;
4884 if (old_decl
&& DECL_BUILT_IN (old_decl
))
4886 /* Folding builtins can create multiple instructions,
4887 we need to look at all of them. */
4888 gimple_stmt_iterator i2
= gsi
;
4890 if (fold_stmt (&gsi
))
4893 /* If a builtin at the end of a bb folded into nothing,
4894 the following loop won't work. */
4895 if (gsi_end_p (gsi
))
4897 cgraph_update_edges_for_call_stmt (old_stmt
,
4902 i2
= gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, first
));
4907 new_stmt
= gsi_stmt (i2
);
4908 update_stmt (new_stmt
);
4909 cgraph_update_edges_for_call_stmt (old_stmt
, old_decl
,
4912 if (new_stmt
== gsi_stmt (gsi
))
4914 /* It is okay to check only for the very last
4915 of these statements. If it is a throwing
4916 statement nothing will change. If it isn't
4917 this can remove EH edges. If that weren't
4918 correct then because some intermediate stmts
4919 throw, but not the last one. That would mean
4920 we'd have to split the block, which we can't
4921 here and we'd loose anyway. And as builtins
4922 probably never throw, this all
4924 if (maybe_clean_or_replace_eh_stmt (old_stmt
,
4926 gimple_purge_dead_eh_edges (
4927 BASIC_BLOCK_FOR_FN (cfun
, first
));
4934 else if (fold_stmt (&gsi
))
4936 /* Re-read the statement from GSI as fold_stmt() may
4938 gimple
*new_stmt
= gsi_stmt (gsi
);
4939 update_stmt (new_stmt
);
4941 if (is_gimple_call (old_stmt
)
4942 || is_gimple_call (new_stmt
))
4943 cgraph_update_edges_for_call_stmt (old_stmt
, old_decl
,
4946 if (maybe_clean_or_replace_eh_stmt (old_stmt
, new_stmt
))
4947 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun
,
4954 /* Expand calls to inline functions in the body of FN. */
4957 optimize_inline_calls (tree fn
)
4961 int last
= n_basic_blocks_for_fn (cfun
);
4962 bool inlined_p
= false;
4965 memset (&id
, 0, sizeof (id
));
4967 id
.src_node
= id
.dst_node
= cgraph_node::get (fn
);
4968 gcc_assert (id
.dst_node
->definition
);
4970 /* Or any functions that aren't finished yet. */
4971 if (current_function_decl
)
4972 id
.dst_fn
= current_function_decl
;
4974 id
.copy_decl
= copy_decl_maybe_to_var
;
4975 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
4976 id
.transform_new_cfg
= false;
4977 id
.transform_return_to_modify
= true;
4978 id
.transform_parameter
= true;
4979 id
.transform_lang_insert_block
= NULL
;
4980 id
.statements_to_fold
= new hash_set
<gimple
*>;
4982 push_gimplify_context ();
4984 /* We make no attempts to keep dominance info up-to-date. */
4985 free_dominance_info (CDI_DOMINATORS
);
4986 free_dominance_info (CDI_POST_DOMINATORS
);
4988 /* Register specific gimple functions. */
4989 gimple_register_cfg_hooks ();
4991 /* Reach the trees by walking over the CFG, and note the
4992 enclosing basic-blocks in the call edges. */
4993 /* We walk the blocks going forward, because inlined function bodies
4994 will split id->current_basic_block, and the new blocks will
4995 follow it; we'll trudge through them, processing their CALL_EXPRs
4997 FOR_EACH_BB_FN (bb
, cfun
)
4998 inlined_p
|= gimple_expand_calls_inline (bb
, &id
);
5000 pop_gimplify_context (NULL
);
5004 struct cgraph_edge
*e
;
5006 id
.dst_node
->verify ();
5008 /* Double check that we inlined everything we are supposed to inline. */
5009 for (e
= id
.dst_node
->callees
; e
; e
= e
->next_callee
)
5010 gcc_assert (e
->inline_failed
);
5013 /* Fold queued statements. */
5014 update_max_bb_count ();
5015 fold_marked_statements (last
, id
.statements_to_fold
);
5016 delete id
.statements_to_fold
;
5018 gcc_assert (!id
.debug_stmts
.exists ());
5020 /* If we didn't inline into the function there is nothing to do. */
5024 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5027 delete_unreachable_blocks_update_callgraph (&id
);
5029 id
.dst_node
->verify ();
5031 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5032 not possible yet - the IPA passes might make various functions to not
5033 throw and they don't care to proactively update local EH info. This is
5034 done later in fixup_cfg pass that also execute the verification. */
5035 return (TODO_update_ssa
5037 | (gimple_in_ssa_p (cfun
) ? TODO_remove_unused_locals
: 0)
5038 | (gimple_in_ssa_p (cfun
) ? TODO_update_address_taken
: 0)
5039 | (profile_status_for_fn (cfun
) != PROFILE_ABSENT
5040 ? TODO_rebuild_frequencies
: 0));
5043 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5046 copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
5048 enum tree_code code
= TREE_CODE (*tp
);
5049 enum tree_code_class cl
= TREE_CODE_CLASS (code
);
5051 /* We make copies of most nodes. */
5052 if (IS_EXPR_CODE_CLASS (cl
)
5053 || code
== TREE_LIST
5055 || code
== TYPE_DECL
5056 || code
== OMP_CLAUSE
)
5058 /* Because the chain gets clobbered when we make a copy, we save it
5060 tree chain
= NULL_TREE
, new_tree
;
5062 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
5063 chain
= TREE_CHAIN (*tp
);
5065 /* Copy the node. */
5066 new_tree
= copy_node (*tp
);
5070 /* Now, restore the chain, if appropriate. That will cause
5071 walk_tree to walk into the chain as well. */
5072 if (code
== PARM_DECL
5073 || code
== TREE_LIST
5074 || code
== OMP_CLAUSE
)
5075 TREE_CHAIN (*tp
) = chain
;
5077 /* For now, we don't update BLOCKs when we make copies. So, we
5078 have to nullify all BIND_EXPRs. */
5079 if (TREE_CODE (*tp
) == BIND_EXPR
)
5080 BIND_EXPR_BLOCK (*tp
) = NULL_TREE
;
5082 else if (code
== CONSTRUCTOR
)
5084 /* CONSTRUCTOR nodes need special handling because
5085 we need to duplicate the vector of elements. */
5088 new_tree
= copy_node (*tp
);
5089 CONSTRUCTOR_ELTS (new_tree
) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp
));
5092 else if (code
== STATEMENT_LIST
)
5093 /* We used to just abort on STATEMENT_LIST, but we can run into them
5094 with statement-expressions (c++/40975). */
5095 copy_statement_list (tp
);
5096 else if (TREE_CODE_CLASS (code
) == tcc_type
)
5098 else if (TREE_CODE_CLASS (code
) == tcc_declaration
)
5100 else if (TREE_CODE_CLASS (code
) == tcc_constant
)
5105 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5106 information indicating to what new SAVE_EXPR this one should be mapped,
5107 use that one. Otherwise, create a new node and enter it in ST. FN is
5108 the function into which the copy will be placed. */
5111 remap_save_expr (tree
*tp
, hash_map
<tree
, tree
> *st
, int *walk_subtrees
)
5116 /* See if we already encountered this SAVE_EXPR. */
5119 /* If we didn't already remap this SAVE_EXPR, do so now. */
5122 t
= copy_node (*tp
);
5124 /* Remember this SAVE_EXPR. */
5126 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5131 /* We've already walked into this SAVE_EXPR; don't do it again. */
5136 /* Replace this SAVE_EXPR with the copy. */
5140 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5141 label, copies the declaration and enters it in the splay_tree in DATA (which
5142 is really a 'copy_body_data *'. */
5145 mark_local_labels_stmt (gimple_stmt_iterator
*gsip
,
5146 bool *handled_ops_p ATTRIBUTE_UNUSED
,
5147 struct walk_stmt_info
*wi
)
5149 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
5150 glabel
*stmt
= dyn_cast
<glabel
*> (gsi_stmt (*gsip
));
5154 tree decl
= gimple_label_label (stmt
);
5156 /* Copy the decl and remember the copy. */
5157 insert_decl_map (id
, decl
, id
->copy_decl (decl
, id
));
5163 static gimple_seq
duplicate_remap_omp_clause_seq (gimple_seq seq
,
5164 struct walk_stmt_info
*wi
);
5166 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5167 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5168 remaps all local declarations to appropriate replacements in gimple
5172 replace_locals_op (tree
*tp
, int *walk_subtrees
, void *data
)
5174 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5175 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
5176 hash_map
<tree
, tree
> *st
= id
->decl_map
;
5180 /* For recursive invocations this is no longer the LHS itself. */
5181 bool is_lhs
= wi
->is_lhs
;
5184 if (TREE_CODE (expr
) == SSA_NAME
)
5186 *tp
= remap_ssa_name (*tp
, id
);
5189 SSA_NAME_DEF_STMT (*tp
) = gsi_stmt (wi
->gsi
);
5191 /* Only a local declaration (variable or label). */
5192 else if ((VAR_P (expr
) && !TREE_STATIC (expr
))
5193 || TREE_CODE (expr
) == LABEL_DECL
)
5195 /* Lookup the declaration. */
5198 /* If it's there, remap it. */
5203 else if (TREE_CODE (expr
) == STATEMENT_LIST
5204 || TREE_CODE (expr
) == BIND_EXPR
5205 || TREE_CODE (expr
) == SAVE_EXPR
)
5207 else if (TREE_CODE (expr
) == TARGET_EXPR
)
5209 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5210 It's OK for this to happen if it was part of a subtree that
5211 isn't immediately expanded, such as operand 2 of another
5213 if (!TREE_OPERAND (expr
, 1))
5215 TREE_OPERAND (expr
, 1) = TREE_OPERAND (expr
, 3);
5216 TREE_OPERAND (expr
, 3) = NULL_TREE
;
5219 else if (TREE_CODE (expr
) == OMP_CLAUSE
)
5221 /* Before the omplower pass completes, some OMP clauses can contain
5222 sequences that are neither copied by gimple_seq_copy nor walked by
5223 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5224 in those situations, we have to copy and process them explicitely. */
5226 if (OMP_CLAUSE_CODE (expr
) == OMP_CLAUSE_LASTPRIVATE
)
5228 gimple_seq seq
= OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr
);
5229 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5230 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr
) = seq
;
5232 else if (OMP_CLAUSE_CODE (expr
) == OMP_CLAUSE_LINEAR
)
5234 gimple_seq seq
= OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr
);
5235 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5236 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr
) = seq
;
5238 else if (OMP_CLAUSE_CODE (expr
) == OMP_CLAUSE_REDUCTION
)
5240 gimple_seq seq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr
);
5241 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5242 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr
) = seq
;
5243 seq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr
);
5244 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5245 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr
) = seq
;
5249 /* Keep iterating. */
5254 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5255 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5256 remaps all local declarations to appropriate replacements in gimple
5260 replace_locals_stmt (gimple_stmt_iterator
*gsip
,
5261 bool *handled_ops_p ATTRIBUTE_UNUSED
,
5262 struct walk_stmt_info
*wi
)
5264 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
5265 gimple
*gs
= gsi_stmt (*gsip
);
5267 if (gbind
*stmt
= dyn_cast
<gbind
*> (gs
))
5269 tree block
= gimple_bind_block (stmt
);
5273 remap_block (&block
, id
);
5274 gimple_bind_set_block (stmt
, block
);
5277 /* This will remap a lot of the same decls again, but this should be
5279 if (gimple_bind_vars (stmt
))
5281 tree old_var
, decls
= gimple_bind_vars (stmt
);
5283 for (old_var
= decls
; old_var
; old_var
= DECL_CHAIN (old_var
))
5284 if (!can_be_nonlocal (old_var
, id
)
5285 && ! variably_modified_type_p (TREE_TYPE (old_var
), id
->src_fn
))
5286 remap_decl (old_var
, id
);
5288 gcc_checking_assert (!id
->prevent_decl_creation_for_types
);
5289 id
->prevent_decl_creation_for_types
= true;
5290 gimple_bind_set_vars (stmt
, remap_decls (decls
, NULL
, id
));
5291 id
->prevent_decl_creation_for_types
= false;
5295 /* Keep iterating. */
5299 /* Create a copy of SEQ and remap all decls in it. */
5302 duplicate_remap_omp_clause_seq (gimple_seq seq
, struct walk_stmt_info
*wi
)
5307 /* If there are any labels in OMP sequences, they can be only referred to in
5308 the sequence itself and therefore we can do both here. */
5309 walk_gimple_seq (seq
, mark_local_labels_stmt
, NULL
, wi
);
5310 gimple_seq copy
= gimple_seq_copy (seq
);
5311 walk_gimple_seq (copy
, replace_locals_stmt
, replace_locals_op
, wi
);
5315 /* Copies everything in SEQ and replaces variables and labels local to
5316 current_function_decl. */
5319 copy_gimple_seq_and_replace_locals (gimple_seq seq
)
5322 struct walk_stmt_info wi
;
5325 /* There's nothing to do for NULL_TREE. */
5330 memset (&id
, 0, sizeof (id
));
5331 id
.src_fn
= current_function_decl
;
5332 id
.dst_fn
= current_function_decl
;
5334 id
.decl_map
= new hash_map
<tree
, tree
>;
5335 id
.debug_map
= NULL
;
5337 id
.copy_decl
= copy_decl_no_change
;
5338 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
5339 id
.transform_new_cfg
= false;
5340 id
.transform_return_to_modify
= false;
5341 id
.transform_parameter
= false;
5342 id
.transform_lang_insert_block
= NULL
;
5344 /* Walk the tree once to find local labels. */
5345 memset (&wi
, 0, sizeof (wi
));
5346 hash_set
<tree
> visited
;
5349 walk_gimple_seq (seq
, mark_local_labels_stmt
, NULL
, &wi
);
5351 copy
= gimple_seq_copy (seq
);
5353 /* Walk the copy, remapping decls. */
5354 memset (&wi
, 0, sizeof (wi
));
5356 walk_gimple_seq (copy
, replace_locals_stmt
, replace_locals_op
, &wi
);
5361 delete id
.debug_map
;
5362 if (id
.dependence_map
)
5364 delete id
.dependence_map
;
5365 id
.dependence_map
= NULL
;
5372 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5375 debug_find_tree_1 (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
, void *data
)
5384 debug_find_tree (tree top
, tree search
)
5386 return walk_tree_without_duplicates (&top
, debug_find_tree_1
, search
) != 0;
5390 /* Declare the variables created by the inliner. Add all the variables in
5391 VARS to BIND_EXPR. */
5394 declare_inline_vars (tree block
, tree vars
)
5397 for (t
= vars
; t
; t
= DECL_CHAIN (t
))
5399 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
5400 gcc_assert (!TREE_STATIC (t
) && !TREE_ASM_WRITTEN (t
));
5401 add_local_decl (cfun
, t
);
5405 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), vars
);
5408 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5409 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5410 VAR_DECL translation. */
5413 copy_decl_for_dup_finish (copy_body_data
*id
, tree decl
, tree copy
)
5415 /* Don't generate debug information for the copy if we wouldn't have
5416 generated it for the copy either. */
5417 DECL_ARTIFICIAL (copy
) = DECL_ARTIFICIAL (decl
);
5418 DECL_IGNORED_P (copy
) = DECL_IGNORED_P (decl
);
5420 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5421 declaration inspired this copy. */
5422 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (decl
);
5424 /* The new variable/label has no RTL, yet. */
5425 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy
), TS_DECL_WRTL
)
5426 && !TREE_STATIC (copy
) && !DECL_EXTERNAL (copy
))
5427 SET_DECL_RTL (copy
, 0);
5429 /* These args would always appear unused, if not for this. */
5430 TREE_USED (copy
) = 1;
5432 /* Set the context for the new declaration. */
5433 if (!DECL_CONTEXT (decl
))
5434 /* Globals stay global. */
5436 else if (DECL_CONTEXT (decl
) != id
->src_fn
)
5437 /* Things that weren't in the scope of the function we're inlining
5438 from aren't in the scope we're inlining to, either. */
5440 else if (TREE_STATIC (decl
))
5441 /* Function-scoped static variables should stay in the original
5446 /* Ordinary automatic local variables are now in the scope of the
5448 DECL_CONTEXT (copy
) = id
->dst_fn
;
5449 if (VAR_P (copy
) && id
->dst_simt_vars
&& !is_gimple_reg (copy
))
5451 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy
)))
5452 DECL_ATTRIBUTES (copy
)
5453 = tree_cons (get_identifier ("omp simt private"), NULL
,
5454 DECL_ATTRIBUTES (copy
));
5455 id
->dst_simt_vars
->safe_push (copy
);
5463 copy_decl_to_var (tree decl
, copy_body_data
*id
)
5467 gcc_assert (TREE_CODE (decl
) == PARM_DECL
5468 || TREE_CODE (decl
) == RESULT_DECL
);
5470 type
= TREE_TYPE (decl
);
5472 copy
= build_decl (DECL_SOURCE_LOCATION (id
->dst_fn
),
5473 VAR_DECL
, DECL_NAME (decl
), type
);
5474 if (DECL_PT_UID_SET_P (decl
))
5475 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
5476 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
5477 TREE_READONLY (copy
) = TREE_READONLY (decl
);
5478 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
5479 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (decl
);
5481 return copy_decl_for_dup_finish (id
, decl
, copy
);
5484 /* Like copy_decl_to_var, but create a return slot object instead of a
5485 pointer variable for return by invisible reference. */
5488 copy_result_decl_to_var (tree decl
, copy_body_data
*id
)
5492 gcc_assert (TREE_CODE (decl
) == PARM_DECL
5493 || TREE_CODE (decl
) == RESULT_DECL
);
5495 type
= TREE_TYPE (decl
);
5496 if (DECL_BY_REFERENCE (decl
))
5497 type
= TREE_TYPE (type
);
5499 copy
= build_decl (DECL_SOURCE_LOCATION (id
->dst_fn
),
5500 VAR_DECL
, DECL_NAME (decl
), type
);
5501 if (DECL_PT_UID_SET_P (decl
))
5502 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
5503 TREE_READONLY (copy
) = TREE_READONLY (decl
);
5504 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
5505 if (!DECL_BY_REFERENCE (decl
))
5507 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
5508 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (decl
);
5511 return copy_decl_for_dup_finish (id
, decl
, copy
);
5515 copy_decl_no_change (tree decl
, copy_body_data
*id
)
5519 copy
= copy_node (decl
);
5521 /* The COPY is not abstract; it will be generated in DST_FN. */
5522 DECL_ABSTRACT_P (copy
) = false;
5523 lang_hooks
.dup_lang_specific_decl (copy
);
5525 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5526 been taken; it's for internal bookkeeping in expand_goto_internal. */
5527 if (TREE_CODE (copy
) == LABEL_DECL
)
5529 TREE_ADDRESSABLE (copy
) = 0;
5530 LABEL_DECL_UID (copy
) = -1;
5533 return copy_decl_for_dup_finish (id
, decl
, copy
);
5537 copy_decl_maybe_to_var (tree decl
, copy_body_data
*id
)
5539 if (TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == RESULT_DECL
)
5540 return copy_decl_to_var (decl
, id
);
5542 return copy_decl_no_change (decl
, id
);
5545 /* Return a copy of the function's argument tree. */
5547 copy_arguments_for_versioning (tree orig_parm
, copy_body_data
* id
,
5548 bitmap args_to_skip
, tree
*vars
)
5551 tree new_parm
= NULL
;
5556 for (arg
= orig_parm
; arg
; arg
= DECL_CHAIN (arg
), i
++)
5557 if (!args_to_skip
|| !bitmap_bit_p (args_to_skip
, i
))
5559 tree new_tree
= remap_decl (arg
, id
);
5560 if (TREE_CODE (new_tree
) != PARM_DECL
)
5561 new_tree
= id
->copy_decl (arg
, id
);
5562 lang_hooks
.dup_lang_specific_decl (new_tree
);
5564 parg
= &DECL_CHAIN (new_tree
);
5566 else if (!id
->decl_map
->get (arg
))
5568 /* Make an equivalent VAR_DECL. If the argument was used
5569 as temporary variable later in function, the uses will be
5570 replaced by local variable. */
5571 tree var
= copy_decl_to_var (arg
, id
);
5572 insert_decl_map (id
, arg
, var
);
5573 /* Declare this new variable. */
5574 DECL_CHAIN (var
) = *vars
;
5580 /* Return a copy of the function's static chain. */
5582 copy_static_chain (tree static_chain
, copy_body_data
* id
)
5584 tree
*chain_copy
, *pvar
;
5586 chain_copy
= &static_chain
;
5587 for (pvar
= chain_copy
; *pvar
; pvar
= &DECL_CHAIN (*pvar
))
5589 tree new_tree
= remap_decl (*pvar
, id
);
5590 lang_hooks
.dup_lang_specific_decl (new_tree
);
5591 DECL_CHAIN (new_tree
) = DECL_CHAIN (*pvar
);
5594 return static_chain
;
5597 /* Return true if the function is allowed to be versioned.
5598 This is a guard for the versioning functionality. */
5601 tree_versionable_function_p (tree fndecl
)
5603 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl
))
5604 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl
)) == NULL
);
5607 /* Delete all unreachable basic blocks and update callgraph.
5608 Doing so is somewhat nontrivial because we need to update all clones and
5609 remove inline function that become unreachable. */
5612 delete_unreachable_blocks_update_callgraph (copy_body_data
*id
)
5614 bool changed
= false;
5615 basic_block b
, next_bb
;
5617 find_unreachable_blocks ();
5619 /* Delete all unreachable basic blocks. */
5621 for (b
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
; b
5622 != EXIT_BLOCK_PTR_FOR_FN (cfun
); b
= next_bb
)
5624 next_bb
= b
->next_bb
;
5626 if (!(b
->flags
& BB_REACHABLE
))
5628 gimple_stmt_iterator bsi
;
5630 for (bsi
= gsi_start_bb (b
); !gsi_end_p (bsi
); gsi_next (&bsi
))
5632 struct cgraph_edge
*e
;
5633 struct cgraph_node
*node
;
5635 id
->dst_node
->remove_stmt_references (gsi_stmt (bsi
));
5637 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_CALL
5638 &&(e
= id
->dst_node
->get_edge (gsi_stmt (bsi
))) != NULL
)
5640 if (!e
->inline_failed
)
5641 e
->callee
->remove_symbol_and_inline_clones (id
->dst_node
);
5645 if (id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
5646 && id
->dst_node
->clones
)
5647 for (node
= id
->dst_node
->clones
; node
!= id
->dst_node
;)
5649 node
->remove_stmt_references (gsi_stmt (bsi
));
5650 if (gimple_code (gsi_stmt (bsi
)) == GIMPLE_CALL
5651 && (e
= node
->get_edge (gsi_stmt (bsi
))) != NULL
)
5653 if (!e
->inline_failed
)
5654 e
->callee
->remove_symbol_and_inline_clones (id
->dst_node
);
5660 node
= node
->clones
;
5661 else if (node
->next_sibling_clone
)
5662 node
= node
->next_sibling_clone
;
5665 while (node
!= id
->dst_node
&& !node
->next_sibling_clone
)
5666 node
= node
->clone_of
;
5667 if (node
!= id
->dst_node
)
5668 node
= node
->next_sibling_clone
;
5672 delete_basic_block (b
);
5680 /* Update clone info after duplication. */
5683 update_clone_info (copy_body_data
* id
)
5685 struct cgraph_node
*node
;
5686 if (!id
->dst_node
->clones
)
5688 for (node
= id
->dst_node
->clones
; node
!= id
->dst_node
;)
5690 /* First update replace maps to match the new body. */
5691 if (node
->clone
.tree_map
)
5694 for (i
= 0; i
< vec_safe_length (node
->clone
.tree_map
); i
++)
5696 struct ipa_replace_map
*replace_info
;
5697 replace_info
= (*node
->clone
.tree_map
)[i
];
5698 walk_tree (&replace_info
->old_tree
, copy_tree_body_r
, id
, NULL
);
5699 walk_tree (&replace_info
->new_tree
, copy_tree_body_r
, id
, NULL
);
5703 node
= node
->clones
;
5704 else if (node
->next_sibling_clone
)
5705 node
= node
->next_sibling_clone
;
5708 while (node
!= id
->dst_node
&& !node
->next_sibling_clone
)
5709 node
= node
->clone_of
;
5710 if (node
!= id
->dst_node
)
5711 node
= node
->next_sibling_clone
;
5716 /* Create a copy of a function's tree.
5717 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5718 of the original function and the new copied function
5719 respectively. In case we want to replace a DECL
5720 tree with another tree while duplicating the function's
5721 body, TREE_MAP represents the mapping between these
5722 trees. If UPDATE_CLONES is set, the call_stmt fields
5723 of edges of clones of the function will be updated.
5725 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5727 If SKIP_RETURN is true, the new version will return void.
5728 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5729 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5732 tree_function_versioning (tree old_decl
, tree new_decl
,
5733 vec
<ipa_replace_map
*, va_gc
> *tree_map
,
5734 bool update_clones
, bitmap args_to_skip
,
5735 bool skip_return
, bitmap blocks_to_copy
,
5736 basic_block new_entry
)
5738 struct cgraph_node
*old_version_node
;
5739 struct cgraph_node
*new_version_node
;
5743 struct ipa_replace_map
*replace_info
;
5744 basic_block old_entry_block
, bb
;
5745 auto_vec
<gimple
*, 10> init_stmts
;
5746 tree vars
= NULL_TREE
;
5747 bitmap debug_args_to_skip
= args_to_skip
;
5749 gcc_assert (TREE_CODE (old_decl
) == FUNCTION_DECL
5750 && TREE_CODE (new_decl
) == FUNCTION_DECL
);
5751 DECL_POSSIBLY_INLINED (old_decl
) = 1;
5753 old_version_node
= cgraph_node::get (old_decl
);
5754 gcc_checking_assert (old_version_node
);
5755 new_version_node
= cgraph_node::get (new_decl
);
5756 gcc_checking_assert (new_version_node
);
5758 /* Copy over debug args. */
5759 if (DECL_HAS_DEBUG_ARGS_P (old_decl
))
5761 vec
<tree
, va_gc
> **new_debug_args
, **old_debug_args
;
5762 gcc_checking_assert (decl_debug_args_lookup (new_decl
) == NULL
);
5763 DECL_HAS_DEBUG_ARGS_P (new_decl
) = 0;
5764 old_debug_args
= decl_debug_args_lookup (old_decl
);
5767 new_debug_args
= decl_debug_args_insert (new_decl
);
5768 *new_debug_args
= vec_safe_copy (*old_debug_args
);
5772 /* Output the inlining info for this abstract function, since it has been
5773 inlined. If we don't do this now, we can lose the information about the
5774 variables in the function when the blocks get blown away as soon as we
5775 remove the cgraph node. */
5776 (*debug_hooks
->outlining_inline_function
) (old_decl
);
5778 DECL_ARTIFICIAL (new_decl
) = 1;
5779 DECL_ABSTRACT_ORIGIN (new_decl
) = DECL_ORIGIN (old_decl
);
5780 if (DECL_ORIGIN (old_decl
) == old_decl
)
5781 old_version_node
->used_as_abstract_origin
= true;
5782 DECL_FUNCTION_PERSONALITY (new_decl
) = DECL_FUNCTION_PERSONALITY (old_decl
);
5784 /* Prepare the data structures for the tree copy. */
5785 memset (&id
, 0, sizeof (id
));
5787 /* Generate a new name for the new version. */
5788 id
.statements_to_fold
= new hash_set
<gimple
*>;
5790 id
.decl_map
= new hash_map
<tree
, tree
>;
5791 id
.debug_map
= NULL
;
5792 id
.src_fn
= old_decl
;
5793 id
.dst_fn
= new_decl
;
5794 id
.src_node
= old_version_node
;
5795 id
.dst_node
= new_version_node
;
5796 id
.src_cfun
= DECL_STRUCT_FUNCTION (old_decl
);
5797 id
.blocks_to_copy
= blocks_to_copy
;
5799 id
.copy_decl
= copy_decl_no_change
;
5800 id
.transform_call_graph_edges
5801 = update_clones
? CB_CGE_MOVE_CLONES
: CB_CGE_MOVE
;
5802 id
.transform_new_cfg
= true;
5803 id
.transform_return_to_modify
= false;
5804 id
.transform_parameter
= false;
5805 id
.transform_lang_insert_block
= NULL
;
5807 old_entry_block
= ENTRY_BLOCK_PTR_FOR_FN
5808 (DECL_STRUCT_FUNCTION (old_decl
));
5809 DECL_RESULT (new_decl
) = DECL_RESULT (old_decl
);
5810 DECL_ARGUMENTS (new_decl
) = DECL_ARGUMENTS (old_decl
);
5811 initialize_cfun (new_decl
, old_decl
,
5812 new_entry
? new_entry
->count
: old_entry_block
->count
);
5813 if (DECL_STRUCT_FUNCTION (new_decl
)->gimple_df
)
5814 DECL_STRUCT_FUNCTION (new_decl
)->gimple_df
->ipa_pta
5815 = id
.src_cfun
->gimple_df
->ipa_pta
;
5817 /* Copy the function's static chain. */
5818 p
= DECL_STRUCT_FUNCTION (old_decl
)->static_chain_decl
;
5820 DECL_STRUCT_FUNCTION (new_decl
)->static_chain_decl
5821 = copy_static_chain (p
, &id
);
5823 /* If there's a tree_map, prepare for substitution. */
5825 for (i
= 0; i
< tree_map
->length (); i
++)
5828 replace_info
= (*tree_map
)[i
];
5829 if (replace_info
->replace_p
)
5832 if (!replace_info
->old_tree
)
5834 int p
= replace_info
->parm_num
;
5836 tree req_type
, new_type
;
5838 for (parm
= DECL_ARGUMENTS (old_decl
); p
;
5839 parm
= DECL_CHAIN (parm
))
5841 replace_info
->old_tree
= parm
;
5842 parm_num
= replace_info
->parm_num
;
5843 req_type
= TREE_TYPE (parm
);
5844 new_type
= TREE_TYPE (replace_info
->new_tree
);
5845 if (!useless_type_conversion_p (req_type
, new_type
))
5847 if (fold_convertible_p (req_type
, replace_info
->new_tree
))
5848 replace_info
->new_tree
5849 = fold_build1 (NOP_EXPR
, req_type
,
5850 replace_info
->new_tree
);
5851 else if (TYPE_SIZE (req_type
) == TYPE_SIZE (new_type
))
5852 replace_info
->new_tree
5853 = fold_build1 (VIEW_CONVERT_EXPR
, req_type
,
5854 replace_info
->new_tree
);
5859 fprintf (dump_file
, " const ");
5860 print_generic_expr (dump_file
,
5861 replace_info
->new_tree
);
5863 " can't be converted to param ");
5864 print_generic_expr (dump_file
, parm
);
5865 fprintf (dump_file
, "\n");
5867 replace_info
->old_tree
= NULL
;
5872 gcc_assert (TREE_CODE (replace_info
->old_tree
) == PARM_DECL
);
5873 if (replace_info
->old_tree
)
5875 init
= setup_one_parameter (&id
, replace_info
->old_tree
,
5876 replace_info
->new_tree
, id
.src_fn
,
5880 init_stmts
.safe_push (init
);
5881 if (MAY_HAVE_DEBUG_BIND_STMTS
&& args_to_skip
)
5887 for (parm
= DECL_ARGUMENTS (old_decl
), p
= 0; parm
;
5888 parm
= DECL_CHAIN (parm
), p
++)
5889 if (parm
== replace_info
->old_tree
)
5897 if (debug_args_to_skip
== args_to_skip
)
5899 debug_args_to_skip
= BITMAP_ALLOC (NULL
);
5900 bitmap_copy (debug_args_to_skip
, args_to_skip
);
5902 bitmap_clear_bit (debug_args_to_skip
, parm_num
);
5908 /* Copy the function's arguments. */
5909 if (DECL_ARGUMENTS (old_decl
) != NULL_TREE
)
5910 DECL_ARGUMENTS (new_decl
)
5911 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl
), &id
,
5912 args_to_skip
, &vars
);
5914 DECL_INITIAL (new_decl
) = remap_blocks (DECL_INITIAL (id
.src_fn
), &id
);
5915 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl
)) = new_decl
;
5917 declare_inline_vars (DECL_INITIAL (new_decl
), vars
);
5919 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl
)->local_decls
))
5920 /* Add local vars. */
5921 add_local_variables (DECL_STRUCT_FUNCTION (old_decl
), cfun
, &id
);
5923 if (DECL_RESULT (old_decl
) == NULL_TREE
)
5925 else if (skip_return
&& !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl
))))
5927 DECL_RESULT (new_decl
)
5928 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl
)),
5929 RESULT_DECL
, NULL_TREE
, void_type_node
);
5930 DECL_CONTEXT (DECL_RESULT (new_decl
)) = new_decl
;
5931 cfun
->returns_struct
= 0;
5932 cfun
->returns_pcc_struct
= 0;
5937 DECL_RESULT (new_decl
) = remap_decl (DECL_RESULT (old_decl
), &id
);
5938 lang_hooks
.dup_lang_specific_decl (DECL_RESULT (new_decl
));
5939 if (gimple_in_ssa_p (id
.src_cfun
)
5940 && DECL_BY_REFERENCE (DECL_RESULT (old_decl
))
5941 && (old_name
= ssa_default_def (id
.src_cfun
, DECL_RESULT (old_decl
))))
5943 tree new_name
= make_ssa_name (DECL_RESULT (new_decl
));
5944 insert_decl_map (&id
, old_name
, new_name
);
5945 SSA_NAME_DEF_STMT (new_name
) = gimple_build_nop ();
5946 set_ssa_default_def (cfun
, DECL_RESULT (new_decl
), new_name
);
5950 /* Set up the destination functions loop tree. */
5951 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl
)) != NULL
)
5953 cfun
->curr_properties
&= ~PROP_loops
;
5954 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
5955 cfun
->curr_properties
|= PROP_loops
;
5958 /* Copy the Function's body. */
5959 copy_body (&id
, ENTRY_BLOCK_PTR_FOR_FN (cfun
), EXIT_BLOCK_PTR_FOR_FN (cfun
),
5962 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5963 number_blocks (new_decl
);
5965 /* We want to create the BB unconditionally, so that the addition of
5966 debug stmts doesn't affect BB count, which may in the end cause
5967 codegen differences. */
5968 bb
= split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
5969 while (init_stmts
.length ())
5970 insert_init_stmt (&id
, bb
, init_stmts
.pop ());
5971 update_clone_info (&id
);
5973 /* Remap the nonlocal_goto_save_area, if any. */
5974 if (cfun
->nonlocal_goto_save_area
)
5976 struct walk_stmt_info wi
;
5978 memset (&wi
, 0, sizeof (wi
));
5980 walk_tree (&cfun
->nonlocal_goto_save_area
, remap_gimple_op_r
, &wi
, NULL
);
5986 delete id
.debug_map
;
5987 free_dominance_info (CDI_DOMINATORS
);
5988 free_dominance_info (CDI_POST_DOMINATORS
);
5990 update_max_bb_count ();
5991 fold_marked_statements (0, id
.statements_to_fold
);
5992 delete id
.statements_to_fold
;
5993 delete_unreachable_blocks_update_callgraph (&id
);
5994 if (id
.dst_node
->definition
)
5995 cgraph_edge::rebuild_references ();
5996 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP
))
5998 calculate_dominance_info (CDI_DOMINATORS
);
5999 fix_loop_structure (NULL
);
6001 update_ssa (TODO_update_ssa
);
6003 /* After partial cloning we need to rescale frequencies, so they are
6004 within proper range in the cloned function. */
6007 struct cgraph_edge
*e
;
6008 rebuild_frequencies ();
6010 new_version_node
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
6011 for (e
= new_version_node
->callees
; e
; e
= e
->next_callee
)
6013 basic_block bb
= gimple_bb (e
->call_stmt
);
6014 e
->count
= bb
->count
;
6016 for (e
= new_version_node
->indirect_calls
; e
; e
= e
->next_callee
)
6018 basic_block bb
= gimple_bb (e
->call_stmt
);
6019 e
->count
= bb
->count
;
6023 if (debug_args_to_skip
&& MAY_HAVE_DEBUG_BIND_STMTS
)
6026 vec
<tree
, va_gc
> **debug_args
= NULL
;
6027 unsigned int len
= 0;
6028 for (parm
= DECL_ARGUMENTS (old_decl
), i
= 0;
6029 parm
; parm
= DECL_CHAIN (parm
), i
++)
6030 if (bitmap_bit_p (debug_args_to_skip
, i
) && is_gimple_reg (parm
))
6034 if (debug_args
== NULL
)
6036 debug_args
= decl_debug_args_insert (new_decl
);
6037 len
= vec_safe_length (*debug_args
);
6039 ddecl
= make_node (DEBUG_EXPR_DECL
);
6040 DECL_ARTIFICIAL (ddecl
) = 1;
6041 TREE_TYPE (ddecl
) = TREE_TYPE (parm
);
6042 SET_DECL_MODE (ddecl
, DECL_MODE (parm
));
6043 vec_safe_push (*debug_args
, DECL_ORIGIN (parm
));
6044 vec_safe_push (*debug_args
, ddecl
);
6046 if (debug_args
!= NULL
)
6048 /* On the callee side, add
6051 stmts to the first bb where var is a VAR_DECL created for the
6052 optimized away parameter in DECL_INITIAL block. This hints
6053 in the debug info that var (whole DECL_ORIGIN is the parm
6054 PARM_DECL) is optimized away, but could be looked up at the
6055 call site as value of D#X there. */
6056 tree var
= vars
, vexpr
;
6057 gimple_stmt_iterator cgsi
6058 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
6061 i
= vec_safe_length (*debug_args
);
6065 while (var
!= NULL_TREE
6066 && DECL_ABSTRACT_ORIGIN (var
) != (**debug_args
)[i
])
6067 var
= TREE_CHAIN (var
);
6068 if (var
== NULL_TREE
)
6070 vexpr
= make_node (DEBUG_EXPR_DECL
);
6071 parm
= (**debug_args
)[i
];
6072 DECL_ARTIFICIAL (vexpr
) = 1;
6073 TREE_TYPE (vexpr
) = TREE_TYPE (parm
);
6074 SET_DECL_MODE (vexpr
, DECL_MODE (parm
));
6075 def_temp
= gimple_build_debug_bind (var
, vexpr
, NULL
);
6076 gsi_insert_before (&cgsi
, def_temp
, GSI_NEW_STMT
);
6077 def_temp
= gimple_build_debug_source_bind (vexpr
, parm
, NULL
);
6078 gsi_insert_before (&cgsi
, def_temp
, GSI_NEW_STMT
);
6084 if (debug_args_to_skip
&& debug_args_to_skip
!= args_to_skip
)
6085 BITMAP_FREE (debug_args_to_skip
);
6086 free_dominance_info (CDI_DOMINATORS
);
6087 free_dominance_info (CDI_POST_DOMINATORS
);
6089 gcc_assert (!id
.debug_stmts
.exists ());
6094 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6095 the callee and return the inlined body on success. */
6098 maybe_inline_call_in_expr (tree exp
)
6100 tree fn
= get_callee_fndecl (exp
);
6102 /* We can only try to inline "const" functions. */
6103 if (fn
&& TREE_READONLY (fn
) && DECL_SAVED_TREE (fn
))
6105 call_expr_arg_iterator iter
;
6108 hash_map
<tree
, tree
> decl_map
;
6110 /* Remap the parameters. */
6111 for (param
= DECL_ARGUMENTS (fn
), arg
= first_call_expr_arg (exp
, &iter
);
6113 param
= DECL_CHAIN (param
), arg
= next_call_expr_arg (&iter
))
6114 decl_map
.put (param
, arg
);
6116 memset (&id
, 0, sizeof (id
));
6118 id
.dst_fn
= current_function_decl
;
6119 id
.src_cfun
= DECL_STRUCT_FUNCTION (fn
);
6120 id
.decl_map
= &decl_map
;
6122 id
.copy_decl
= copy_decl_no_change
;
6123 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
6124 id
.transform_new_cfg
= false;
6125 id
.transform_return_to_modify
= true;
6126 id
.transform_parameter
= true;
6127 id
.transform_lang_insert_block
= NULL
;
6129 /* Make sure not to unshare trees behind the front-end's back
6130 since front-end specific mechanisms may rely on sharing. */
6131 id
.regimplify
= false;
6132 id
.do_not_unshare
= true;
6134 /* We're not inside any EH region. */
6137 t
= copy_tree_body (&id
);
6139 /* We can only return something suitable for use in a GENERIC
6141 if (TREE_CODE (t
) == MODIFY_EXPR
)
6142 return TREE_OPERAND (t
, 1);
6148 /* Duplicate a type, fields and all. */
6151 build_duplicate_type (tree type
)
6153 struct copy_body_data id
;
6155 memset (&id
, 0, sizeof (id
));
6156 id
.src_fn
= current_function_decl
;
6157 id
.dst_fn
= current_function_decl
;
6159 id
.decl_map
= new hash_map
<tree
, tree
>;
6160 id
.debug_map
= NULL
;
6161 id
.copy_decl
= copy_decl_no_change
;
6163 type
= remap_type_1 (type
, &id
);
6167 delete id
.debug_map
;
6169 TYPE_CANONICAL (type
) = type
;
6174 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6175 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6179 copy_fn (tree fn
, tree
& parms
, tree
& result
)
6183 hash_map
<tree
, tree
> decl_map
;
6188 memset (&id
, 0, sizeof (id
));
6190 id
.dst_fn
= current_function_decl
;
6191 id
.src_cfun
= DECL_STRUCT_FUNCTION (fn
);
6192 id
.decl_map
= &decl_map
;
6194 id
.copy_decl
= copy_decl_no_change
;
6195 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
6196 id
.transform_new_cfg
= false;
6197 id
.transform_return_to_modify
= false;
6198 id
.transform_parameter
= true;
6199 id
.transform_lang_insert_block
= NULL
;
6201 /* Make sure not to unshare trees behind the front-end's back
6202 since front-end specific mechanisms may rely on sharing. */
6203 id
.regimplify
= false;
6204 id
.do_not_unshare
= true;
6206 /* We're not inside any EH region. */
6209 /* Remap the parameters and result and return them to the caller. */
6210 for (param
= DECL_ARGUMENTS (fn
);
6212 param
= DECL_CHAIN (param
))
6214 *p
= remap_decl (param
, &id
);
6215 p
= &DECL_CHAIN (*p
);
6218 if (DECL_RESULT (fn
))
6219 result
= remap_decl (DECL_RESULT (fn
), &id
);
6223 return copy_tree_body (&id
);