2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "tree-pass.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
42 #include "tree-iterator.h"
44 #include "gimple-fold.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
51 #include "tree-into-ssa.h"
57 #include "value-prof.h"
60 #include "stringpool.h"
63 #include "tree-cfgcleanup.h"
64 #include "tree-ssa-live.h"
66 /* I'm not real happy about this, but we need to handle gimple and
69 /* Inlining, Cloning, Versioning, Parallelization
71 Inlining: a function body is duplicated, but the PARM_DECLs are
72 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
73 MODIFY_EXPRs that store to a dedicated returned-value variable.
74 The duplicated eh_region info of the copy will later be appended
75 to the info for the caller; the eh_region info in copied throwing
76 statements and RESX statements are adjusted accordingly.
78 Cloning: (only in C++) We have one body for a con/de/structor, and
79 multiple function decls, each with a unique parameter list.
80 Duplicate the body, using the given splay tree; some parameters
81 will become constants (like 0 or 1).
83 Versioning: a function body is duplicated and the result is a new
84 function rather than into blocks of an existing function as with
85 inlining. Some parameters will become constants.
87 Parallelization: a region of a function is duplicated resulting in
88 a new function. Variables may be replaced with complex expressions
89 to enable shared variable semantics.
91 All of these will simultaneously lookup any callgraph edges. If
92 we're going to inline the duplicated function body, and the given
93 function has some cloned callgraph nodes (one for each place this
94 function will be inlined) those callgraph edges will be duplicated.
95 If we're cloning the body, those callgraph edges will be
96 updated to point into the new body. (Note that the original
97 callgraph node and edge list will not be altered.)
99 See the CALL_EXPR handling case in copy_tree_body_r (). */
103 o In order to make inlining-on-trees work, we pessimized
104 function-local static constants. In particular, they are now
105 always output, even when not addressed. Fix this by treating
106 function-local static constants just like global static
107 constants; the back-end already knows not to output them if they
110 o Provide heuristics to clamp inlining of recursive template
114 /* Weights that estimate_num_insns uses to estimate the size of the
117 eni_weights eni_size_weights
;
119 /* Weights that estimate_num_insns uses to estimate the time necessary
120 to execute the produced code. */
122 eni_weights eni_time_weights
;
126 static tree
declare_return_variable (copy_body_data
*, tree
, tree
,
128 static void remap_block (tree
*, copy_body_data
*);
129 static void copy_bind_expr (tree
*, int *, copy_body_data
*);
130 static void declare_inline_vars (tree
, tree
);
131 static void remap_save_expr (tree
*, hash_map
<tree
, tree
> *, int *);
132 static void prepend_lexical_block (tree current_block
, tree new_block
);
133 static tree
copy_decl_to_var (tree
, copy_body_data
*);
134 static tree
copy_result_decl_to_var (tree
, copy_body_data
*);
135 static tree
copy_decl_maybe_to_var (tree
, copy_body_data
*);
136 static gimple_seq
remap_gimple_stmt (gimple
*, copy_body_data
*);
137 static void insert_init_stmt (copy_body_data
*, basic_block
, gimple
*);
139 /* Insert a tree->tree mapping for ID. Despite the name suggests
140 that the trees should be variables, it is used for more than that. */
143 insert_decl_map (copy_body_data
*id
, tree key
, tree value
)
145 id
->decl_map
->put (key
, value
);
147 /* Always insert an identity map as well. If we see this same new
148 node again, we won't want to duplicate it a second time. */
150 id
->decl_map
->put (value
, value
);
153 /* Insert a tree->tree mapping for ID. This is only used for
157 insert_debug_decl_map (copy_body_data
*id
, tree key
, tree value
)
159 if (!gimple_in_ssa_p (id
->src_cfun
))
162 if (!opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
165 if (!target_for_debug_bind (key
))
168 gcc_assert (TREE_CODE (key
) == PARM_DECL
);
169 gcc_assert (VAR_P (value
));
172 id
->debug_map
= new hash_map
<tree
, tree
>;
174 id
->debug_map
->put (key
, value
);
177 /* If nonzero, we're remapping the contents of inlined debug
178 statements. If negative, an error has occurred, such as a
179 reference to a variable that isn't available in the inlined
181 static int processing_debug_stmt
= 0;
183 /* Construct new SSA name for old NAME. ID is the inline context. */
186 remap_ssa_name (tree name
, copy_body_data
*id
)
191 gcc_assert (TREE_CODE (name
) == SSA_NAME
);
193 n
= id
->decl_map
->get (name
);
195 return unshare_expr (*n
);
197 if (processing_debug_stmt
)
199 if (SSA_NAME_IS_DEFAULT_DEF (name
)
200 && TREE_CODE (SSA_NAME_VAR (name
)) == PARM_DECL
201 && id
->entry_bb
== NULL
202 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun
)))
204 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
206 gimple_stmt_iterator gsi
;
207 tree val
= SSA_NAME_VAR (name
);
209 n
= id
->decl_map
->get (val
);
212 if (TREE_CODE (val
) != PARM_DECL
213 && !(VAR_P (val
) && DECL_ABSTRACT_ORIGIN (val
)))
215 processing_debug_stmt
= -1;
218 n
= id
->decl_map
->get (val
);
219 if (n
&& TREE_CODE (*n
) == DEBUG_EXPR_DECL
)
221 def_temp
= gimple_build_debug_source_bind (vexpr
, val
, NULL
);
222 DECL_ARTIFICIAL (vexpr
) = 1;
223 TREE_TYPE (vexpr
) = TREE_TYPE (name
);
224 SET_DECL_MODE (vexpr
, DECL_MODE (SSA_NAME_VAR (name
)));
225 gsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
226 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
227 insert_decl_map (id
, val
, vexpr
);
231 processing_debug_stmt
= -1;
235 /* Remap anonymous SSA names or SSA names of anonymous decls. */
236 var
= SSA_NAME_VAR (name
);
238 || (!SSA_NAME_IS_DEFAULT_DEF (name
)
240 && !VAR_DECL_IS_VIRTUAL_OPERAND (var
)
241 && DECL_ARTIFICIAL (var
)
242 && DECL_IGNORED_P (var
)
243 && !DECL_NAME (var
)))
245 struct ptr_info_def
*pi
;
246 new_tree
= make_ssa_name (remap_type (TREE_TYPE (name
), id
));
247 if (!var
&& SSA_NAME_IDENTIFIER (name
))
248 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree
, SSA_NAME_IDENTIFIER (name
));
249 insert_decl_map (id
, name
, new_tree
);
250 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree
)
251 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
);
252 /* At least IPA points-to info can be directly transferred. */
253 if (id
->src_cfun
->gimple_df
254 && id
->src_cfun
->gimple_df
->ipa_pta
255 && POINTER_TYPE_P (TREE_TYPE (name
))
256 && (pi
= SSA_NAME_PTR_INFO (name
))
259 struct ptr_info_def
*new_pi
= get_ptr_info (new_tree
);
265 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
267 new_tree
= remap_decl (var
, id
);
269 /* We might've substituted constant or another SSA_NAME for
272 Replace the SSA name representing RESULT_DECL by variable during
273 inlining: this saves us from need to introduce PHI node in a case
274 return value is just partly initialized. */
275 if ((VAR_P (new_tree
) || TREE_CODE (new_tree
) == PARM_DECL
)
276 && (!SSA_NAME_VAR (name
)
277 || TREE_CODE (SSA_NAME_VAR (name
)) != RESULT_DECL
278 || !id
->transform_return_to_modify
))
280 struct ptr_info_def
*pi
;
281 new_tree
= make_ssa_name (new_tree
);
282 insert_decl_map (id
, name
, new_tree
);
283 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree
)
284 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
);
285 /* At least IPA points-to info can be directly transferred. */
286 if (id
->src_cfun
->gimple_df
287 && id
->src_cfun
->gimple_df
->ipa_pta
288 && POINTER_TYPE_P (TREE_TYPE (name
))
289 && (pi
= SSA_NAME_PTR_INFO (name
))
292 struct ptr_info_def
*new_pi
= get_ptr_info (new_tree
);
295 if (SSA_NAME_IS_DEFAULT_DEF (name
))
297 /* By inlining function having uninitialized variable, we might
298 extend the lifetime (variable might get reused). This cause
299 ICE in the case we end up extending lifetime of SSA name across
300 abnormal edge, but also increase register pressure.
302 We simply initialize all uninitialized vars by 0 except
303 for case we are inlining to very first BB. We can avoid
304 this for all BBs that are not inside strongly connected
305 regions of the CFG, but this is expensive to test. */
307 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name
)
308 && (!SSA_NAME_VAR (name
)
309 || TREE_CODE (SSA_NAME_VAR (name
)) != PARM_DECL
)
310 && (id
->entry_bb
!= EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun
),
312 || EDGE_COUNT (id
->entry_bb
->preds
) != 1))
314 gimple_stmt_iterator gsi
= gsi_last_bb (id
->entry_bb
);
316 tree zero
= build_zero_cst (TREE_TYPE (new_tree
));
318 init_stmt
= gimple_build_assign (new_tree
, zero
);
319 gsi_insert_after (&gsi
, init_stmt
, GSI_NEW_STMT
);
320 SSA_NAME_IS_DEFAULT_DEF (new_tree
) = 0;
324 SSA_NAME_DEF_STMT (new_tree
) = gimple_build_nop ();
325 set_ssa_default_def (cfun
, SSA_NAME_VAR (new_tree
), new_tree
);
330 insert_decl_map (id
, name
, new_tree
);
334 /* Remap DECL during the copying of the BLOCK tree for the function. */
337 remap_decl (tree decl
, copy_body_data
*id
)
341 /* We only remap local variables in the current function. */
343 /* See if we have remapped this declaration. */
345 n
= id
->decl_map
->get (decl
);
347 if (!n
&& processing_debug_stmt
)
349 processing_debug_stmt
= -1;
353 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
354 necessary DECLs have already been remapped and we do not want to duplicate
355 a decl coming from outside of the sequence we are copying. */
357 && id
->prevent_decl_creation_for_types
358 && id
->remapping_type_depth
> 0
359 && (VAR_P (decl
) || TREE_CODE (decl
) == PARM_DECL
))
362 /* If we didn't already have an equivalent for this declaration, create one
366 /* Make a copy of the variable or label. */
367 tree t
= id
->copy_decl (decl
, id
);
369 /* Remember it, so that if we encounter this local entity again
370 we can reuse this copy. Do this early because remap_type may
371 need this decl for TYPE_STUB_DECL. */
372 insert_decl_map (id
, decl
, t
);
377 /* Remap types, if necessary. */
378 TREE_TYPE (t
) = remap_type (TREE_TYPE (t
), id
);
379 if (TREE_CODE (t
) == TYPE_DECL
)
381 DECL_ORIGINAL_TYPE (t
) = remap_type (DECL_ORIGINAL_TYPE (t
), id
);
383 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
384 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
385 is not set on the TYPE_DECL, for example in LTO mode. */
386 if (DECL_ORIGINAL_TYPE (t
) == TREE_TYPE (t
))
388 tree x
= build_variant_type_copy (TREE_TYPE (t
));
389 TYPE_STUB_DECL (x
) = TYPE_STUB_DECL (TREE_TYPE (t
));
390 TYPE_NAME (x
) = TYPE_NAME (TREE_TYPE (t
));
391 DECL_ORIGINAL_TYPE (t
) = x
;
395 /* Remap sizes as necessary. */
396 walk_tree (&DECL_SIZE (t
), copy_tree_body_r
, id
, NULL
);
397 walk_tree (&DECL_SIZE_UNIT (t
), copy_tree_body_r
, id
, NULL
);
399 /* If fields, do likewise for offset and qualifier. */
400 if (TREE_CODE (t
) == FIELD_DECL
)
402 walk_tree (&DECL_FIELD_OFFSET (t
), copy_tree_body_r
, id
, NULL
);
403 if (TREE_CODE (DECL_CONTEXT (t
)) == QUAL_UNION_TYPE
)
404 walk_tree (&DECL_QUALIFIER (t
), copy_tree_body_r
, id
, NULL
);
410 if (id
->do_not_unshare
)
413 return unshare_expr (*n
);
417 remap_type_1 (tree type
, copy_body_data
*id
)
421 /* We do need a copy. build and register it now. If this is a pointer or
422 reference type, remap the designated type and make a new pointer or
424 if (TREE_CODE (type
) == POINTER_TYPE
)
426 new_tree
= build_pointer_type_for_mode (remap_type (TREE_TYPE (type
), id
),
428 TYPE_REF_CAN_ALIAS_ALL (type
));
429 if (TYPE_ATTRIBUTES (type
) || TYPE_QUALS (type
))
430 new_tree
= build_type_attribute_qual_variant (new_tree
,
431 TYPE_ATTRIBUTES (type
),
433 insert_decl_map (id
, type
, new_tree
);
436 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
438 new_tree
= build_reference_type_for_mode (remap_type (TREE_TYPE (type
), id
),
440 TYPE_REF_CAN_ALIAS_ALL (type
));
441 if (TYPE_ATTRIBUTES (type
) || TYPE_QUALS (type
))
442 new_tree
= build_type_attribute_qual_variant (new_tree
,
443 TYPE_ATTRIBUTES (type
),
445 insert_decl_map (id
, type
, new_tree
);
449 new_tree
= copy_node (type
);
451 insert_decl_map (id
, type
, new_tree
);
453 /* This is a new type, not a copy of an old type. Need to reassociate
454 variants. We can handle everything except the main variant lazily. */
455 t
= TYPE_MAIN_VARIANT (type
);
458 t
= remap_type (t
, id
);
459 TYPE_MAIN_VARIANT (new_tree
) = t
;
460 TYPE_NEXT_VARIANT (new_tree
) = TYPE_NEXT_VARIANT (t
);
461 TYPE_NEXT_VARIANT (t
) = new_tree
;
465 TYPE_MAIN_VARIANT (new_tree
) = new_tree
;
466 TYPE_NEXT_VARIANT (new_tree
) = NULL
;
469 if (TYPE_STUB_DECL (type
))
470 TYPE_STUB_DECL (new_tree
) = remap_decl (TYPE_STUB_DECL (type
), id
);
472 /* Lazily create pointer and reference types. */
473 TYPE_POINTER_TO (new_tree
) = NULL
;
474 TYPE_REFERENCE_TO (new_tree
) = NULL
;
476 /* Copy all types that may contain references to local variables; be sure to
477 preserve sharing in between type and its main variant when possible. */
478 switch (TREE_CODE (new_tree
))
482 case FIXED_POINT_TYPE
:
485 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
)
487 gcc_checking_assert (TYPE_MIN_VALUE (type
) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type
)));
488 gcc_checking_assert (TYPE_MAX_VALUE (type
) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type
)));
490 TYPE_MIN_VALUE (new_tree
) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree
));
491 TYPE_MAX_VALUE (new_tree
) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree
));
495 t
= TYPE_MIN_VALUE (new_tree
);
496 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
497 walk_tree (&TYPE_MIN_VALUE (new_tree
), copy_tree_body_r
, id
, NULL
);
499 t
= TYPE_MAX_VALUE (new_tree
);
500 if (t
&& TREE_CODE (t
) != INTEGER_CST
)
501 walk_tree (&TYPE_MAX_VALUE (new_tree
), copy_tree_body_r
, id
, NULL
);
506 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
507 && TREE_TYPE (type
) == TREE_TYPE (TYPE_MAIN_VARIANT (type
)))
508 TREE_TYPE (new_tree
) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree
));
510 TREE_TYPE (new_tree
) = remap_type (TREE_TYPE (new_tree
), id
);
511 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
512 && TYPE_ARG_TYPES (type
) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type
)))
513 TYPE_ARG_TYPES (new_tree
) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree
));
515 walk_tree (&TYPE_ARG_TYPES (new_tree
), copy_tree_body_r
, id
, NULL
);
519 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
520 && TREE_TYPE (type
) == TREE_TYPE (TYPE_MAIN_VARIANT (type
)))
521 TREE_TYPE (new_tree
) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree
));
523 TREE_TYPE (new_tree
) = remap_type (TREE_TYPE (new_tree
), id
);
525 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
)
527 gcc_checking_assert (TYPE_DOMAIN (type
)
528 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type
)));
529 TYPE_DOMAIN (new_tree
) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree
));
533 TYPE_DOMAIN (new_tree
) = remap_type (TYPE_DOMAIN (new_tree
), id
);
534 /* For array bounds where we have decided not to copy over the bounds
535 variable which isn't used in OpenMP/OpenACC region, change them to
536 an uninitialized VAR_DECL temporary. */
537 if (TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree
)) == error_mark_node
538 && id
->adjust_array_error_bounds
539 && TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) != error_mark_node
)
541 tree v
= create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree
)));
543 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE
,
544 DECL_ATTRIBUTES (v
));
545 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree
)) = v
;
552 case QUAL_UNION_TYPE
:
553 if (TYPE_MAIN_VARIANT (type
) != type
554 && TYPE_FIELDS (type
) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type
)))
555 TYPE_FIELDS (new_tree
) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree
));
560 for (f
= TYPE_FIELDS (new_tree
); f
; f
= DECL_CHAIN (f
))
562 t
= remap_decl (f
, id
);
563 DECL_CONTEXT (t
) = new_tree
;
567 TYPE_FIELDS (new_tree
) = nreverse (nf
);
573 /* Shouldn't have been thought variable sized. */
577 /* All variants of type share the same size, so use the already remaped data. */
578 if (TYPE_MAIN_VARIANT (new_tree
) != new_tree
)
580 tree s
= TYPE_SIZE (type
);
581 tree mvs
= TYPE_SIZE (TYPE_MAIN_VARIANT (type
));
582 tree su
= TYPE_SIZE_UNIT (type
);
583 tree mvsu
= TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type
));
584 gcc_checking_assert ((TREE_CODE (s
) == PLACEHOLDER_EXPR
585 && (TREE_CODE (mvs
) == PLACEHOLDER_EXPR
))
587 gcc_checking_assert ((TREE_CODE (su
) == PLACEHOLDER_EXPR
588 && (TREE_CODE (mvsu
) == PLACEHOLDER_EXPR
))
590 TYPE_SIZE (new_tree
) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree
));
591 TYPE_SIZE_UNIT (new_tree
) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree
));
595 walk_tree (&TYPE_SIZE (new_tree
), copy_tree_body_r
, id
, NULL
);
596 walk_tree (&TYPE_SIZE_UNIT (new_tree
), copy_tree_body_r
, id
, NULL
);
602 /* Helper function for remap_type_2, called through walk_tree. */
605 remap_type_3 (tree
*tp
, int *walk_subtrees
, void *data
)
607 copy_body_data
*id
= (copy_body_data
*) data
;
612 else if (DECL_P (*tp
) && remap_decl (*tp
, id
) != *tp
)
618 /* Return true if TYPE needs to be remapped because remap_decl on any
619 needed embedded decl returns something other than that decl. */
622 remap_type_2 (tree type
, copy_body_data
*id
)
626 #define RETURN_TRUE_IF_VAR(T) \
632 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
634 if (!TYPE_SIZES_GIMPLIFIED (type) \
635 && walk_tree (&_t, remap_type_3, id, NULL)) \
641 switch (TREE_CODE (type
))
647 return remap_type_2 (TREE_TYPE (type
), id
);
651 case FIXED_POINT_TYPE
:
654 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type
));
655 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type
));
659 if (remap_type_2 (TREE_TYPE (type
), id
)
660 || (TYPE_DOMAIN (type
) && remap_type_2 (TYPE_DOMAIN (type
), id
)))
666 case QUAL_UNION_TYPE
:
667 for (t
= TYPE_FIELDS (type
); t
; t
= DECL_CHAIN (t
))
668 if (TREE_CODE (t
) == FIELD_DECL
)
670 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t
));
671 RETURN_TRUE_IF_VAR (DECL_SIZE (t
));
672 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t
));
673 if (TREE_CODE (type
) == QUAL_UNION_TYPE
)
674 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t
));
682 RETURN_TRUE_IF_VAR (TYPE_SIZE (type
));
683 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type
));
685 #undef RETURN_TRUE_IF_VAR
689 remap_type (tree type
, copy_body_data
*id
)
697 /* See if we have remapped this type. */
698 node
= id
->decl_map
->get (type
);
702 /* The type only needs remapping if it's variably modified. */
703 if (! variably_modified_type_p (type
, id
->src_fn
)
704 /* Don't remap if copy_decl method doesn't always return a new
705 decl and for all embedded decls returns the passed in decl. */
706 || (id
->dont_remap_vla_if_no_change
&& !remap_type_2 (type
, id
)))
708 insert_decl_map (id
, type
, type
);
712 id
->remapping_type_depth
++;
713 tmp
= remap_type_1 (type
, id
);
714 id
->remapping_type_depth
--;
719 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
722 can_be_nonlocal (tree decl
, copy_body_data
*id
)
724 /* We cannot duplicate function decls. */
725 if (TREE_CODE (decl
) == FUNCTION_DECL
)
728 /* Local static vars must be non-local or we get multiple declaration
730 if (VAR_P (decl
) && !auto_var_in_fn_p (decl
, id
->src_fn
))
737 remap_decls (tree decls
, vec
<tree
, va_gc
> **nonlocalized_list
,
741 tree new_decls
= NULL_TREE
;
743 /* Remap its variables. */
744 for (old_var
= decls
; old_var
; old_var
= DECL_CHAIN (old_var
))
748 if (can_be_nonlocal (old_var
, id
))
750 /* We need to add this variable to the local decls as otherwise
751 nothing else will do so. */
752 if (VAR_P (old_var
) && ! DECL_EXTERNAL (old_var
) && cfun
)
753 add_local_decl (cfun
, old_var
);
754 if ((!optimize
|| debug_info_level
> DINFO_LEVEL_TERSE
)
755 && !DECL_IGNORED_P (old_var
)
756 && nonlocalized_list
)
757 vec_safe_push (*nonlocalized_list
, old_var
);
761 /* Remap the variable. */
762 new_var
= remap_decl (old_var
, id
);
764 /* If we didn't remap this variable, we can't mess with its
765 TREE_CHAIN. If we remapped this variable to the return slot, it's
766 already declared somewhere else, so don't declare it here. */
768 if (new_var
== id
->retvar
)
772 if ((!optimize
|| debug_info_level
> DINFO_LEVEL_TERSE
)
773 && !DECL_IGNORED_P (old_var
)
774 && nonlocalized_list
)
775 vec_safe_push (*nonlocalized_list
, old_var
);
779 gcc_assert (DECL_P (new_var
));
780 DECL_CHAIN (new_var
) = new_decls
;
783 /* Also copy value-expressions. */
784 if (VAR_P (new_var
) && DECL_HAS_VALUE_EXPR_P (new_var
))
786 tree tem
= DECL_VALUE_EXPR (new_var
);
787 bool old_regimplify
= id
->regimplify
;
788 id
->remapping_type_depth
++;
789 walk_tree (&tem
, copy_tree_body_r
, id
, NULL
);
790 id
->remapping_type_depth
--;
791 id
->regimplify
= old_regimplify
;
792 SET_DECL_VALUE_EXPR (new_var
, tem
);
797 return nreverse (new_decls
);
800 /* Copy the BLOCK to contain remapped versions of the variables
801 therein. And hook the new block into the block-tree. */
804 remap_block (tree
*block
, copy_body_data
*id
)
809 /* Make the new block. */
811 new_block
= make_node (BLOCK
);
812 TREE_USED (new_block
) = TREE_USED (old_block
);
813 BLOCK_ABSTRACT_ORIGIN (new_block
) = BLOCK_ORIGIN (old_block
);
814 BLOCK_SOURCE_LOCATION (new_block
) = BLOCK_SOURCE_LOCATION (old_block
);
815 BLOCK_NONLOCALIZED_VARS (new_block
)
816 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block
));
819 /* Remap its variables. */
820 BLOCK_VARS (new_block
) = remap_decls (BLOCK_VARS (old_block
),
821 &BLOCK_NONLOCALIZED_VARS (new_block
),
824 if (id
->transform_lang_insert_block
)
825 id
->transform_lang_insert_block (new_block
);
827 /* Remember the remapped block. */
828 insert_decl_map (id
, old_block
, new_block
);
831 /* Copy the whole block tree and root it in id->block. */
834 remap_blocks (tree block
, copy_body_data
*id
)
837 tree new_tree
= block
;
842 remap_block (&new_tree
, id
);
843 gcc_assert (new_tree
!= block
);
844 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
845 prepend_lexical_block (new_tree
, remap_blocks (t
, id
));
846 /* Blocks are in arbitrary order, but make things slightly prettier and do
847 not swap order when producing a copy. */
848 BLOCK_SUBBLOCKS (new_tree
) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree
));
852 /* Remap the block tree rooted at BLOCK to nothing. */
855 remap_blocks_to_null (tree block
, copy_body_data
*id
)
858 insert_decl_map (id
, block
, NULL_TREE
);
859 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
860 remap_blocks_to_null (t
, id
);
863 /* Remap the location info pointed to by LOCUS. */
866 remap_location (location_t locus
, copy_body_data
*id
)
868 if (LOCATION_BLOCK (locus
))
870 tree
*n
= id
->decl_map
->get (LOCATION_BLOCK (locus
));
873 return set_block (locus
, *n
);
876 locus
= LOCATION_LOCUS (locus
);
878 if (locus
!= UNKNOWN_LOCATION
&& id
->block
)
879 return set_block (locus
, id
->block
);
885 copy_statement_list (tree
*tp
)
887 tree_stmt_iterator oi
, ni
;
890 new_tree
= alloc_stmt_list ();
891 ni
= tsi_start (new_tree
);
892 oi
= tsi_start (*tp
);
893 TREE_TYPE (new_tree
) = TREE_TYPE (*tp
);
896 for (; !tsi_end_p (oi
); tsi_next (&oi
))
898 tree stmt
= tsi_stmt (oi
);
899 if (TREE_CODE (stmt
) == STATEMENT_LIST
)
900 /* This copy is not redundant; tsi_link_after will smash this
901 STATEMENT_LIST into the end of the one we're building, and we
902 don't want to do that with the original. */
903 copy_statement_list (&stmt
);
904 tsi_link_after (&ni
, stmt
, TSI_CONTINUE_LINKING
);
909 copy_bind_expr (tree
*tp
, int *walk_subtrees
, copy_body_data
*id
)
911 tree block
= BIND_EXPR_BLOCK (*tp
);
912 /* Copy (and replace) the statement. */
913 copy_tree_r (tp
, walk_subtrees
, NULL
);
916 remap_block (&block
, id
);
917 BIND_EXPR_BLOCK (*tp
) = block
;
920 if (BIND_EXPR_VARS (*tp
))
921 /* This will remap a lot of the same decls again, but this should be
923 BIND_EXPR_VARS (*tp
) = remap_decls (BIND_EXPR_VARS (*tp
), NULL
, id
);
927 /* Create a new gimple_seq by remapping all the statements in BODY
928 using the inlining information in ID. */
931 remap_gimple_seq (gimple_seq body
, copy_body_data
*id
)
933 gimple_stmt_iterator si
;
934 gimple_seq new_body
= NULL
;
936 for (si
= gsi_start (body
); !gsi_end_p (si
); gsi_next (&si
))
938 gimple_seq new_stmts
= remap_gimple_stmt (gsi_stmt (si
), id
);
939 gimple_seq_add_seq (&new_body
, new_stmts
);
946 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
947 block using the mapping information in ID. */
950 copy_gimple_bind (gbind
*stmt
, copy_body_data
*id
)
953 tree new_block
, new_vars
;
954 gimple_seq body
, new_body
;
956 /* Copy the statement. Note that we purposely don't use copy_stmt
957 here because we need to remap statements as we copy. */
958 body
= gimple_bind_body (stmt
);
959 new_body
= remap_gimple_seq (body
, id
);
961 new_block
= gimple_bind_block (stmt
);
963 remap_block (&new_block
, id
);
965 /* This will remap a lot of the same decls again, but this should be
967 new_vars
= gimple_bind_vars (stmt
);
969 new_vars
= remap_decls (new_vars
, NULL
, id
);
971 new_bind
= gimple_build_bind (new_vars
, new_body
, new_block
);
976 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
981 if (TREE_CODE (decl
) == SSA_NAME
)
983 decl
= SSA_NAME_VAR (decl
);
988 return (TREE_CODE (decl
) == PARM_DECL
);
991 /* Remap the dependence CLIQUE from the source to the destination function
992 as specified in ID. */
994 static unsigned short
995 remap_dependence_clique (copy_body_data
*id
, unsigned short clique
)
997 if (clique
== 0 || processing_debug_stmt
)
999 if (!id
->dependence_map
)
1000 id
->dependence_map
= new hash_map
<dependence_hash
, unsigned short>;
1002 unsigned short &newc
= id
->dependence_map
->get_or_insert (clique
, &existed
);
1005 /* Clique 1 is reserved for local ones set by PTA. */
1006 if (cfun
->last_clique
== 0)
1007 cfun
->last_clique
= 1;
1008 newc
= ++cfun
->last_clique
;
1013 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1014 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1015 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1016 recursing into the children nodes of *TP. */
1019 remap_gimple_op_r (tree
*tp
, int *walk_subtrees
, void *data
)
1021 struct walk_stmt_info
*wi_p
= (struct walk_stmt_info
*) data
;
1022 copy_body_data
*id
= (copy_body_data
*) wi_p
->info
;
1023 tree fn
= id
->src_fn
;
1025 /* For recursive invocations this is no longer the LHS itself. */
1026 bool is_lhs
= wi_p
->is_lhs
;
1027 wi_p
->is_lhs
= false;
1029 if (TREE_CODE (*tp
) == SSA_NAME
)
1031 *tp
= remap_ssa_name (*tp
, id
);
1034 SSA_NAME_DEF_STMT (*tp
) = wi_p
->stmt
;
1037 else if (auto_var_in_fn_p (*tp
, fn
))
1039 /* Local variables and labels need to be replaced by equivalent
1040 variables. We don't want to copy static variables; there's
1041 only one of those, no matter how many times we inline the
1042 containing function. Similarly for globals from an outer
1046 /* Remap the declaration. */
1047 new_decl
= remap_decl (*tp
, id
);
1048 gcc_assert (new_decl
);
1049 /* Replace this variable with the copy. */
1050 STRIP_TYPE_NOPS (new_decl
);
1051 /* ??? The C++ frontend uses void * pointer zero to initialize
1052 any other type. This confuses the middle-end type verification.
1053 As cloned bodies do not go through gimplification again the fixup
1054 there doesn't trigger. */
1055 if (TREE_CODE (new_decl
) == INTEGER_CST
1056 && !useless_type_conversion_p (TREE_TYPE (*tp
), TREE_TYPE (new_decl
)))
1057 new_decl
= fold_convert (TREE_TYPE (*tp
), new_decl
);
1061 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
1063 else if (TREE_CODE (*tp
) == SAVE_EXPR
)
1065 else if (TREE_CODE (*tp
) == LABEL_DECL
1066 && (!DECL_CONTEXT (*tp
)
1067 || decl_function_context (*tp
) == id
->src_fn
))
1068 /* These may need to be remapped for EH handling. */
1069 *tp
= remap_decl (*tp
, id
);
1070 else if (TREE_CODE (*tp
) == FIELD_DECL
)
1072 /* If the enclosing record type is variably_modified_type_p, the field
1073 has already been remapped. Otherwise, it need not be. */
1074 tree
*n
= id
->decl_map
->get (*tp
);
1079 else if (TYPE_P (*tp
))
1080 /* Types may need remapping as well. */
1081 *tp
= remap_type (*tp
, id
);
1082 else if (CONSTANT_CLASS_P (*tp
))
1084 /* If this is a constant, we have to copy the node iff the type
1085 will be remapped. copy_tree_r will not copy a constant. */
1086 tree new_type
= remap_type (TREE_TYPE (*tp
), id
);
1088 if (new_type
== TREE_TYPE (*tp
))
1091 else if (TREE_CODE (*tp
) == INTEGER_CST
)
1092 *tp
= wide_int_to_tree (new_type
, wi::to_wide (*tp
));
1095 *tp
= copy_node (*tp
);
1096 TREE_TYPE (*tp
) = new_type
;
1101 /* Otherwise, just copy the node. Note that copy_tree_r already
1102 knows not to copy VAR_DECLs, etc., so this is safe. */
1104 if (TREE_CODE (*tp
) == MEM_REF
&& !id
->do_not_fold
)
1106 /* We need to re-canonicalize MEM_REFs from inline substitutions
1107 that can happen when a pointer argument is an ADDR_EXPR.
1108 Recurse here manually to allow that. */
1109 tree ptr
= TREE_OPERAND (*tp
, 0);
1110 tree type
= remap_type (TREE_TYPE (*tp
), id
);
1112 walk_tree (&ptr
, remap_gimple_op_r
, data
, NULL
);
1113 *tp
= fold_build2 (MEM_REF
, type
, ptr
, TREE_OPERAND (*tp
, 1));
1114 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1115 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1116 TREE_NO_WARNING (*tp
) = TREE_NO_WARNING (old
);
1117 if (MR_DEPENDENCE_CLIQUE (old
) != 0)
1119 MR_DEPENDENCE_CLIQUE (*tp
)
1120 = remap_dependence_clique (id
, MR_DEPENDENCE_CLIQUE (old
));
1121 MR_DEPENDENCE_BASE (*tp
) = MR_DEPENDENCE_BASE (old
);
1123 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1124 remapped a parameter as the property might be valid only
1125 for the parameter itself. */
1126 if (TREE_THIS_NOTRAP (old
)
1127 && (!is_parm (TREE_OPERAND (old
, 0))
1128 || (!id
->transform_parameter
&& is_parm (ptr
))))
1129 TREE_THIS_NOTRAP (*tp
) = 1;
1130 REF_REVERSE_STORAGE_ORDER (*tp
) = REF_REVERSE_STORAGE_ORDER (old
);
1135 /* Here is the "usual case". Copy this tree node, and then
1136 tweak some special cases. */
1137 copy_tree_r (tp
, walk_subtrees
, NULL
);
1139 if (TREE_CODE (*tp
) != OMP_CLAUSE
)
1140 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
1142 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
1144 /* The copied TARGET_EXPR has never been expanded, even if the
1145 original node was expanded already. */
1146 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
1147 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
1149 else if (TREE_CODE (*tp
) == ADDR_EXPR
)
1151 /* Variable substitution need not be simple. In particular,
1152 the MEM_REF substitution above. Make sure that
1153 TREE_CONSTANT and friends are up-to-date. */
1154 int invariant
= is_gimple_min_invariant (*tp
);
1155 walk_tree (&TREE_OPERAND (*tp
, 0), remap_gimple_op_r
, data
, NULL
);
1156 recompute_tree_invariant_for_addr_expr (*tp
);
1158 /* If this used to be invariant, but is not any longer,
1159 then regimplification is probably needed. */
1160 if (invariant
&& !is_gimple_min_invariant (*tp
))
1161 id
->regimplify
= true;
1167 /* Update the TREE_BLOCK for the cloned expr. */
1170 tree new_block
= id
->remapping_type_depth
== 0 ? id
->block
: NULL
;
1171 tree old_block
= TREE_BLOCK (*tp
);
1175 n
= id
->decl_map
->get (TREE_BLOCK (*tp
));
1179 TREE_SET_BLOCK (*tp
, new_block
);
1182 /* Keep iterating. */
1187 /* Called from copy_body_id via walk_tree. DATA is really a
1188 `copy_body_data *'. */
1191 copy_tree_body_r (tree
*tp
, int *walk_subtrees
, void *data
)
1193 copy_body_data
*id
= (copy_body_data
*) data
;
1194 tree fn
= id
->src_fn
;
1197 /* Begin by recognizing trees that we'll completely rewrite for the
1198 inlining context. Our output for these trees is completely
1199 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1200 into an edge). Further down, we'll handle trees that get
1201 duplicated and/or tweaked. */
1203 /* When requested, RETURN_EXPRs should be transformed to just the
1204 contained MODIFY_EXPR. The branch semantics of the return will
1205 be handled elsewhere by manipulating the CFG rather than a statement. */
1206 if (TREE_CODE (*tp
) == RETURN_EXPR
&& id
->transform_return_to_modify
)
1208 tree assignment
= TREE_OPERAND (*tp
, 0);
1210 /* If we're returning something, just turn that into an
1211 assignment into the equivalent of the original RESULT_DECL.
1212 If the "assignment" is just the result decl, the result
1213 decl has already been set (e.g. a recent "foo (&result_decl,
1214 ...)"); just toss the entire RETURN_EXPR. */
1215 if (assignment
&& TREE_CODE (assignment
) == MODIFY_EXPR
)
1217 /* Replace the RETURN_EXPR with (a copy of) the
1218 MODIFY_EXPR hanging underneath. */
1219 *tp
= copy_node (assignment
);
1221 else /* Else the RETURN_EXPR returns no value. */
1224 return (tree
) (void *)1;
1227 else if (TREE_CODE (*tp
) == SSA_NAME
)
1229 *tp
= remap_ssa_name (*tp
, id
);
1234 /* Local variables and labels need to be replaced by equivalent
1235 variables. We don't want to copy static variables; there's only
1236 one of those, no matter how many times we inline the containing
1237 function. Similarly for globals from an outer function. */
1238 else if (auto_var_in_fn_p (*tp
, fn
))
1242 /* Remap the declaration. */
1243 new_decl
= remap_decl (*tp
, id
);
1244 gcc_assert (new_decl
);
1245 /* Replace this variable with the copy. */
1246 STRIP_TYPE_NOPS (new_decl
);
1250 else if (TREE_CODE (*tp
) == STATEMENT_LIST
)
1251 copy_statement_list (tp
);
1252 else if (TREE_CODE (*tp
) == SAVE_EXPR
1253 || TREE_CODE (*tp
) == TARGET_EXPR
)
1254 remap_save_expr (tp
, id
->decl_map
, walk_subtrees
);
1255 else if (TREE_CODE (*tp
) == LABEL_DECL
1256 && (! DECL_CONTEXT (*tp
)
1257 || decl_function_context (*tp
) == id
->src_fn
))
1258 /* These may need to be remapped for EH handling. */
1259 *tp
= remap_decl (*tp
, id
);
1260 else if (TREE_CODE (*tp
) == BIND_EXPR
)
1261 copy_bind_expr (tp
, walk_subtrees
, id
);
1262 /* Types may need remapping as well. */
1263 else if (TYPE_P (*tp
))
1264 *tp
= remap_type (*tp
, id
);
1266 /* If this is a constant, we have to copy the node iff the type will be
1267 remapped. copy_tree_r will not copy a constant. */
1268 else if (CONSTANT_CLASS_P (*tp
))
1270 tree new_type
= remap_type (TREE_TYPE (*tp
), id
);
1272 if (new_type
== TREE_TYPE (*tp
))
1275 else if (TREE_CODE (*tp
) == INTEGER_CST
)
1276 *tp
= wide_int_to_tree (new_type
, wi::to_wide (*tp
));
1279 *tp
= copy_node (*tp
);
1280 TREE_TYPE (*tp
) = new_type
;
1284 /* Otherwise, just copy the node. Note that copy_tree_r already
1285 knows not to copy VAR_DECLs, etc., so this is safe. */
1288 /* Here we handle trees that are not completely rewritten.
1289 First we detect some inlining-induced bogosities for
1291 if (TREE_CODE (*tp
) == MODIFY_EXPR
1292 && TREE_OPERAND (*tp
, 0) == TREE_OPERAND (*tp
, 1)
1293 && (auto_var_in_fn_p (TREE_OPERAND (*tp
, 0), fn
)))
1295 /* Some assignments VAR = VAR; don't generate any rtl code
1296 and thus don't count as variable modification. Avoid
1297 keeping bogosities like 0 = 0. */
1298 tree decl
= TREE_OPERAND (*tp
, 0), value
;
1301 n
= id
->decl_map
->get (decl
);
1305 STRIP_TYPE_NOPS (value
);
1306 if (TREE_CONSTANT (value
) || TREE_READONLY (value
))
1308 *tp
= build_empty_stmt (EXPR_LOCATION (*tp
));
1309 return copy_tree_body_r (tp
, walk_subtrees
, data
);
1313 else if (TREE_CODE (*tp
) == INDIRECT_REF
)
1315 /* Get rid of *& from inline substitutions that can happen when a
1316 pointer argument is an ADDR_EXPR. */
1317 tree decl
= TREE_OPERAND (*tp
, 0);
1318 tree
*n
= id
->decl_map
->get (decl
);
1321 /* If we happen to get an ADDR_EXPR in n->value, strip
1322 it manually here as we'll eventually get ADDR_EXPRs
1323 which lie about their types pointed to. In this case
1324 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1325 but we absolutely rely on that. As fold_indirect_ref
1326 does other useful transformations, try that first, though. */
1327 tree type
= TREE_TYPE (*tp
);
1328 tree ptr
= id
->do_not_unshare
? *n
: unshare_expr (*n
);
1330 *tp
= id
->do_not_fold
? NULL
: gimple_fold_indirect_ref (ptr
);
1333 type
= remap_type (type
, id
);
1334 if (TREE_CODE (ptr
) == ADDR_EXPR
&& !id
->do_not_fold
)
1337 = fold_indirect_ref_1 (EXPR_LOCATION (ptr
), type
, ptr
);
1338 /* ??? We should either assert here or build
1339 a VIEW_CONVERT_EXPR instead of blindly leaking
1340 incompatible types to our IL. */
1342 *tp
= TREE_OPERAND (ptr
, 0);
1346 *tp
= build1 (INDIRECT_REF
, type
, ptr
);
1347 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1348 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1349 TREE_READONLY (*tp
) = TREE_READONLY (old
);
1350 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1351 have remapped a parameter as the property might be
1352 valid only for the parameter itself. */
1353 if (TREE_THIS_NOTRAP (old
)
1354 && (!is_parm (TREE_OPERAND (old
, 0))
1355 || (!id
->transform_parameter
&& is_parm (ptr
))))
1356 TREE_THIS_NOTRAP (*tp
) = 1;
1363 else if (TREE_CODE (*tp
) == MEM_REF
&& !id
->do_not_fold
)
1365 /* We need to re-canonicalize MEM_REFs from inline substitutions
1366 that can happen when a pointer argument is an ADDR_EXPR.
1367 Recurse here manually to allow that. */
1368 tree ptr
= TREE_OPERAND (*tp
, 0);
1369 tree type
= remap_type (TREE_TYPE (*tp
), id
);
1371 walk_tree (&ptr
, copy_tree_body_r
, data
, NULL
);
1372 *tp
= fold_build2 (MEM_REF
, type
, ptr
, TREE_OPERAND (*tp
, 1));
1373 TREE_THIS_VOLATILE (*tp
) = TREE_THIS_VOLATILE (old
);
1374 TREE_SIDE_EFFECTS (*tp
) = TREE_SIDE_EFFECTS (old
);
1375 TREE_NO_WARNING (*tp
) = TREE_NO_WARNING (old
);
1376 if (MR_DEPENDENCE_CLIQUE (old
) != 0)
1378 MR_DEPENDENCE_CLIQUE (*tp
)
1379 = remap_dependence_clique (id
, MR_DEPENDENCE_CLIQUE (old
));
1380 MR_DEPENDENCE_BASE (*tp
) = MR_DEPENDENCE_BASE (old
);
1382 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1383 remapped a parameter as the property might be valid only
1384 for the parameter itself. */
1385 if (TREE_THIS_NOTRAP (old
)
1386 && (!is_parm (TREE_OPERAND (old
, 0))
1387 || (!id
->transform_parameter
&& is_parm (ptr
))))
1388 TREE_THIS_NOTRAP (*tp
) = 1;
1389 REF_REVERSE_STORAGE_ORDER (*tp
) = REF_REVERSE_STORAGE_ORDER (old
);
1394 /* Here is the "usual case". Copy this tree node, and then
1395 tweak some special cases. */
1396 copy_tree_r (tp
, walk_subtrees
, NULL
);
1398 /* If EXPR has block defined, map it to newly constructed block.
1399 When inlining we want EXPRs without block appear in the block
1400 of function call if we are not remapping a type. */
1403 new_block
= id
->remapping_type_depth
== 0 ? id
->block
: NULL
;
1404 if (TREE_BLOCK (*tp
))
1407 n
= id
->decl_map
->get (TREE_BLOCK (*tp
));
1411 TREE_SET_BLOCK (*tp
, new_block
);
1414 if (TREE_CODE (*tp
) != OMP_CLAUSE
)
1415 TREE_TYPE (*tp
) = remap_type (TREE_TYPE (*tp
), id
);
1417 /* The copied TARGET_EXPR has never been expanded, even if the
1418 original node was expanded already. */
1419 if (TREE_CODE (*tp
) == TARGET_EXPR
&& TREE_OPERAND (*tp
, 3))
1421 TREE_OPERAND (*tp
, 1) = TREE_OPERAND (*tp
, 3);
1422 TREE_OPERAND (*tp
, 3) = NULL_TREE
;
1425 /* Variable substitution need not be simple. In particular, the
1426 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1427 and friends are up-to-date. */
1428 else if (TREE_CODE (*tp
) == ADDR_EXPR
)
1430 int invariant
= is_gimple_min_invariant (*tp
);
1431 walk_tree (&TREE_OPERAND (*tp
, 0), copy_tree_body_r
, id
, NULL
);
1433 /* Handle the case where we substituted an INDIRECT_REF
1434 into the operand of the ADDR_EXPR. */
1435 if (TREE_CODE (TREE_OPERAND (*tp
, 0)) == INDIRECT_REF
1436 && !id
->do_not_fold
)
1438 tree t
= TREE_OPERAND (TREE_OPERAND (*tp
, 0), 0);
1439 if (TREE_TYPE (t
) != TREE_TYPE (*tp
))
1440 t
= fold_convert (remap_type (TREE_TYPE (*tp
), id
), t
);
1444 recompute_tree_invariant_for_addr_expr (*tp
);
1446 /* If this used to be invariant, but is not any longer,
1447 then regimplification is probably needed. */
1448 if (invariant
&& !is_gimple_min_invariant (*tp
))
1449 id
->regimplify
= true;
1455 /* Keep iterating. */
1459 /* Helper for remap_gimple_stmt. Given an EH region number for the
1460 source function, map that to the duplicate EH region number in
1461 the destination function. */
1464 remap_eh_region_nr (int old_nr
, copy_body_data
*id
)
1466 eh_region old_r
, new_r
;
1468 old_r
= get_eh_region_from_number_fn (id
->src_cfun
, old_nr
);
1469 new_r
= static_cast<eh_region
> (*id
->eh_map
->get (old_r
));
1471 return new_r
->index
;
1474 /* Similar, but operate on INTEGER_CSTs. */
1477 remap_eh_region_tree_nr (tree old_t_nr
, copy_body_data
*id
)
1481 old_nr
= tree_to_shwi (old_t_nr
);
1482 new_nr
= remap_eh_region_nr (old_nr
, id
);
1484 return build_int_cst (integer_type_node
, new_nr
);
1487 /* Helper for copy_bb. Remap statement STMT using the inlining
1488 information in ID. Return the new statement copy. */
1491 remap_gimple_stmt (gimple
*stmt
, copy_body_data
*id
)
1493 gimple
*copy
= NULL
;
1494 struct walk_stmt_info wi
;
1495 bool skip_first
= false;
1496 gimple_seq stmts
= NULL
;
1498 if (is_gimple_debug (stmt
)
1499 && (gimple_debug_nonbind_marker_p (stmt
)
1500 ? !DECL_STRUCT_FUNCTION (id
->dst_fn
)->debug_nonbind_markers
1501 : !opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
)))
1504 /* Begin by recognizing trees that we'll completely rewrite for the
1505 inlining context. Our output for these trees is completely
1506 different from our input (e.g. RETURN_EXPR is deleted and morphs
1507 into an edge). Further down, we'll handle trees that get
1508 duplicated and/or tweaked. */
1510 /* When requested, GIMPLE_RETURN should be transformed to just the
1511 contained GIMPLE_ASSIGN. The branch semantics of the return will
1512 be handled elsewhere by manipulating the CFG rather than the
1514 if (gimple_code (stmt
) == GIMPLE_RETURN
&& id
->transform_return_to_modify
)
1516 tree retval
= gimple_return_retval (as_a
<greturn
*> (stmt
));
1518 /* If we're returning something, just turn that into an
1519 assignment to the equivalent of the original RESULT_DECL.
1520 If RETVAL is just the result decl, the result decl has
1521 already been set (e.g. a recent "foo (&result_decl, ...)");
1522 just toss the entire GIMPLE_RETURN. */
1524 && (TREE_CODE (retval
) != RESULT_DECL
1525 && (TREE_CODE (retval
) != SSA_NAME
1526 || ! SSA_NAME_VAR (retval
)
1527 || TREE_CODE (SSA_NAME_VAR (retval
)) != RESULT_DECL
)))
1529 copy
= gimple_build_assign (id
->do_not_unshare
1530 ? id
->retvar
: unshare_expr (id
->retvar
),
1532 /* id->retvar is already substituted. Skip it on later remapping. */
1538 else if (gimple_has_substatements (stmt
))
1542 /* When cloning bodies from the C++ front end, we will be handed bodies
1543 in High GIMPLE form. Handle here all the High GIMPLE statements that
1544 have embedded statements. */
1545 switch (gimple_code (stmt
))
1548 copy
= copy_gimple_bind (as_a
<gbind
*> (stmt
), id
);
1553 gcatch
*catch_stmt
= as_a
<gcatch
*> (stmt
);
1554 s1
= remap_gimple_seq (gimple_catch_handler (catch_stmt
), id
);
1555 copy
= gimple_build_catch (gimple_catch_types (catch_stmt
), s1
);
1559 case GIMPLE_EH_FILTER
:
1560 s1
= remap_gimple_seq (gimple_eh_filter_failure (stmt
), id
);
1561 copy
= gimple_build_eh_filter (gimple_eh_filter_types (stmt
), s1
);
1565 s1
= remap_gimple_seq (gimple_try_eval (stmt
), id
);
1566 s2
= remap_gimple_seq (gimple_try_cleanup (stmt
), id
);
1567 copy
= gimple_build_try (s1
, s2
, gimple_try_kind (stmt
));
1570 case GIMPLE_WITH_CLEANUP_EXPR
:
1571 s1
= remap_gimple_seq (gimple_wce_cleanup (stmt
), id
);
1572 copy
= gimple_build_wce (s1
);
1575 case GIMPLE_OMP_PARALLEL
:
1577 gomp_parallel
*omp_par_stmt
= as_a
<gomp_parallel
*> (stmt
);
1578 s1
= remap_gimple_seq (gimple_omp_body (omp_par_stmt
), id
);
1579 copy
= gimple_build_omp_parallel
1581 gimple_omp_parallel_clauses (omp_par_stmt
),
1582 gimple_omp_parallel_child_fn (omp_par_stmt
),
1583 gimple_omp_parallel_data_arg (omp_par_stmt
));
1587 case GIMPLE_OMP_TASK
:
1588 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1589 copy
= gimple_build_omp_task
1591 gimple_omp_task_clauses (stmt
),
1592 gimple_omp_task_child_fn (stmt
),
1593 gimple_omp_task_data_arg (stmt
),
1594 gimple_omp_task_copy_fn (stmt
),
1595 gimple_omp_task_arg_size (stmt
),
1596 gimple_omp_task_arg_align (stmt
));
1599 case GIMPLE_OMP_FOR
:
1600 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1601 s2
= remap_gimple_seq (gimple_omp_for_pre_body (stmt
), id
);
1602 copy
= gimple_build_omp_for (s1
, gimple_omp_for_kind (stmt
),
1603 gimple_omp_for_clauses (stmt
),
1604 gimple_omp_for_collapse (stmt
), s2
);
1607 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
1609 gimple_omp_for_set_index (copy
, i
,
1610 gimple_omp_for_index (stmt
, i
));
1611 gimple_omp_for_set_initial (copy
, i
,
1612 gimple_omp_for_initial (stmt
, i
));
1613 gimple_omp_for_set_final (copy
, i
,
1614 gimple_omp_for_final (stmt
, i
));
1615 gimple_omp_for_set_incr (copy
, i
,
1616 gimple_omp_for_incr (stmt
, i
));
1617 gimple_omp_for_set_cond (copy
, i
,
1618 gimple_omp_for_cond (stmt
, i
));
1623 case GIMPLE_OMP_MASTER
:
1624 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1625 copy
= gimple_build_omp_master (s1
);
1628 case GIMPLE_OMP_TASKGROUP
:
1629 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1630 copy
= gimple_build_omp_taskgroup
1631 (s1
, gimple_omp_taskgroup_clauses (stmt
));
1634 case GIMPLE_OMP_ORDERED
:
1635 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1636 copy
= gimple_build_omp_ordered
1638 gimple_omp_ordered_clauses (as_a
<gomp_ordered
*> (stmt
)));
1641 case GIMPLE_OMP_SCAN
:
1642 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1643 copy
= gimple_build_omp_scan
1644 (s1
, gimple_omp_scan_clauses (as_a
<gomp_scan
*> (stmt
)));
1647 case GIMPLE_OMP_SECTION
:
1648 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1649 copy
= gimple_build_omp_section (s1
);
1652 case GIMPLE_OMP_SECTIONS
:
1653 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1654 copy
= gimple_build_omp_sections
1655 (s1
, gimple_omp_sections_clauses (stmt
));
1658 case GIMPLE_OMP_SINGLE
:
1659 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1660 copy
= gimple_build_omp_single
1661 (s1
, gimple_omp_single_clauses (stmt
));
1664 case GIMPLE_OMP_TARGET
:
1665 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1666 copy
= gimple_build_omp_target
1667 (s1
, gimple_omp_target_kind (stmt
),
1668 gimple_omp_target_clauses (stmt
));
1671 case GIMPLE_OMP_TEAMS
:
1672 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1673 copy
= gimple_build_omp_teams
1674 (s1
, gimple_omp_teams_clauses (stmt
));
1677 case GIMPLE_OMP_CRITICAL
:
1678 s1
= remap_gimple_seq (gimple_omp_body (stmt
), id
);
1679 copy
= gimple_build_omp_critical (s1
,
1680 gimple_omp_critical_name
1681 (as_a
<gomp_critical
*> (stmt
)),
1682 gimple_omp_critical_clauses
1683 (as_a
<gomp_critical
*> (stmt
)));
1686 case GIMPLE_TRANSACTION
:
1688 gtransaction
*old_trans_stmt
= as_a
<gtransaction
*> (stmt
);
1689 gtransaction
*new_trans_stmt
;
1690 s1
= remap_gimple_seq (gimple_transaction_body (old_trans_stmt
),
1692 copy
= new_trans_stmt
= gimple_build_transaction (s1
);
1693 gimple_transaction_set_subcode (new_trans_stmt
,
1694 gimple_transaction_subcode (old_trans_stmt
));
1695 gimple_transaction_set_label_norm (new_trans_stmt
,
1696 gimple_transaction_label_norm (old_trans_stmt
));
1697 gimple_transaction_set_label_uninst (new_trans_stmt
,
1698 gimple_transaction_label_uninst (old_trans_stmt
));
1699 gimple_transaction_set_label_over (new_trans_stmt
,
1700 gimple_transaction_label_over (old_trans_stmt
));
1710 if (gimple_assign_copy_p (stmt
)
1711 && gimple_assign_lhs (stmt
) == gimple_assign_rhs1 (stmt
)
1712 && auto_var_in_fn_p (gimple_assign_lhs (stmt
), id
->src_fn
))
1714 /* Here we handle statements that are not completely rewritten.
1715 First we detect some inlining-induced bogosities for
1718 /* Some assignments VAR = VAR; don't generate any rtl code
1719 and thus don't count as variable modification. Avoid
1720 keeping bogosities like 0 = 0. */
1721 tree decl
= gimple_assign_lhs (stmt
), value
;
1724 n
= id
->decl_map
->get (decl
);
1728 STRIP_TYPE_NOPS (value
);
1729 if (TREE_CONSTANT (value
) || TREE_READONLY (value
))
1734 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1735 in a block that we aren't copying during tree_function_versioning,
1736 just drop the clobber stmt. */
1737 if (id
->blocks_to_copy
&& gimple_clobber_p (stmt
))
1739 tree lhs
= gimple_assign_lhs (stmt
);
1740 if (TREE_CODE (lhs
) == MEM_REF
1741 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
)
1743 gimple
*def_stmt
= SSA_NAME_DEF_STMT (TREE_OPERAND (lhs
, 0));
1744 if (gimple_bb (def_stmt
)
1745 && !bitmap_bit_p (id
->blocks_to_copy
,
1746 gimple_bb (def_stmt
)->index
))
1751 if (gimple_debug_bind_p (stmt
))
1754 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt
),
1755 gimple_debug_bind_get_value (stmt
),
1757 if (id
->reset_location
)
1758 gimple_set_location (copy
, input_location
);
1759 id
->debug_stmts
.safe_push (copy
);
1760 gimple_seq_add_stmt (&stmts
, copy
);
1763 if (gimple_debug_source_bind_p (stmt
))
1765 gdebug
*copy
= gimple_build_debug_source_bind
1766 (gimple_debug_source_bind_get_var (stmt
),
1767 gimple_debug_source_bind_get_value (stmt
),
1769 if (id
->reset_location
)
1770 gimple_set_location (copy
, input_location
);
1771 id
->debug_stmts
.safe_push (copy
);
1772 gimple_seq_add_stmt (&stmts
, copy
);
1775 if (gimple_debug_nonbind_marker_p (stmt
))
1777 /* If the inlined function has too many debug markers,
1779 if (id
->src_cfun
->debug_marker_count
1780 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT
))
1783 gdebug
*copy
= as_a
<gdebug
*> (gimple_copy (stmt
));
1784 if (id
->reset_location
)
1785 gimple_set_location (copy
, input_location
);
1786 id
->debug_stmts
.safe_push (copy
);
1787 gimple_seq_add_stmt (&stmts
, copy
);
1791 /* Create a new deep copy of the statement. */
1792 copy
= gimple_copy (stmt
);
1794 /* Clear flags that need revisiting. */
1795 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (copy
))
1797 if (gimple_call_tail_p (call_stmt
))
1798 gimple_call_set_tail (call_stmt
, false);
1799 if (gimple_call_from_thunk_p (call_stmt
))
1800 gimple_call_set_from_thunk (call_stmt
, false);
1801 if (gimple_call_internal_p (call_stmt
))
1802 switch (gimple_call_internal_fn (call_stmt
))
1804 case IFN_GOMP_SIMD_LANE
:
1805 case IFN_GOMP_SIMD_VF
:
1806 case IFN_GOMP_SIMD_LAST_LANE
:
1807 case IFN_GOMP_SIMD_ORDERED_START
:
1808 case IFN_GOMP_SIMD_ORDERED_END
:
1809 DECL_STRUCT_FUNCTION (id
->dst_fn
)->has_simduid_loops
= true;
1816 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1817 RESX and EH_DISPATCH. */
1819 switch (gimple_code (copy
))
1823 tree r
, fndecl
= gimple_call_fndecl (copy
);
1824 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
1825 switch (DECL_FUNCTION_CODE (fndecl
))
1827 case BUILT_IN_EH_COPY_VALUES
:
1828 r
= gimple_call_arg (copy
, 1);
1829 r
= remap_eh_region_tree_nr (r
, id
);
1830 gimple_call_set_arg (copy
, 1, r
);
1833 case BUILT_IN_EH_POINTER
:
1834 case BUILT_IN_EH_FILTER
:
1835 r
= gimple_call_arg (copy
, 0);
1836 r
= remap_eh_region_tree_nr (r
, id
);
1837 gimple_call_set_arg (copy
, 0, r
);
1844 /* Reset alias info if we didn't apply measures to
1845 keep it valid over inlining by setting DECL_PT_UID. */
1846 if (!id
->src_cfun
->gimple_df
1847 || !id
->src_cfun
->gimple_df
->ipa_pta
)
1848 gimple_call_reset_alias_info (as_a
<gcall
*> (copy
));
1854 gresx
*resx_stmt
= as_a
<gresx
*> (copy
);
1855 int r
= gimple_resx_region (resx_stmt
);
1856 r
= remap_eh_region_nr (r
, id
);
1857 gimple_resx_set_region (resx_stmt
, r
);
1861 case GIMPLE_EH_DISPATCH
:
1863 geh_dispatch
*eh_dispatch
= as_a
<geh_dispatch
*> (copy
);
1864 int r
= gimple_eh_dispatch_region (eh_dispatch
);
1865 r
= remap_eh_region_nr (r
, id
);
1866 gimple_eh_dispatch_set_region (eh_dispatch
, r
);
1875 /* If STMT has a block defined, map it to the newly constructed block. */
1876 if (tree block
= gimple_block (copy
))
1879 n
= id
->decl_map
->get (block
);
1881 gimple_set_block (copy
, *n
);
1884 if (id
->reset_location
)
1885 gimple_set_location (copy
, input_location
);
1887 /* Debug statements ought to be rebuilt and not copied. */
1888 gcc_checking_assert (!is_gimple_debug (copy
));
1890 /* Remap all the operands in COPY. */
1891 memset (&wi
, 0, sizeof (wi
));
1894 walk_tree (gimple_op_ptr (copy
, 1), remap_gimple_op_r
, &wi
, NULL
);
1896 walk_gimple_op (copy
, remap_gimple_op_r
, &wi
);
1898 /* Clear the copied virtual operands. We are not remapping them here
1899 but are going to recreate them from scratch. */
1900 if (gimple_has_mem_ops (copy
))
1902 gimple_set_vdef (copy
, NULL_TREE
);
1903 gimple_set_vuse (copy
, NULL_TREE
);
1906 gimple_seq_add_stmt (&stmts
, copy
);
1911 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1915 copy_bb (copy_body_data
*id
, basic_block bb
,
1916 profile_count num
, profile_count den
)
1918 gimple_stmt_iterator gsi
, copy_gsi
, seq_gsi
;
1919 basic_block copy_basic_block
;
1923 profile_count::adjust_for_ipa_scaling (&num
, &den
);
1925 /* Search for previous copied basic block. */
1928 prev
= prev
->prev_bb
;
1930 /* create_basic_block() will append every new block to
1931 basic_block_info automatically. */
1932 copy_basic_block
= create_basic_block (NULL
, (basic_block
) prev
->aux
);
1933 copy_basic_block
->count
= bb
->count
.apply_scale (num
, den
);
1935 copy_gsi
= gsi_start_bb (copy_basic_block
);
1937 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1940 gimple
*stmt
= gsi_stmt (gsi
);
1941 gimple
*orig_stmt
= stmt
;
1942 gimple_stmt_iterator stmts_gsi
;
1943 bool stmt_added
= false;
1945 id
->regimplify
= false;
1946 stmts
= remap_gimple_stmt (stmt
, id
);
1948 if (gimple_seq_empty_p (stmts
))
1953 for (stmts_gsi
= gsi_start (stmts
);
1954 !gsi_end_p (stmts_gsi
); )
1956 stmt
= gsi_stmt (stmts_gsi
);
1958 /* Advance iterator now before stmt is moved to seq_gsi. */
1959 gsi_next (&stmts_gsi
);
1961 if (gimple_nop_p (stmt
))
1964 gimple_duplicate_stmt_histograms (cfun
, stmt
, id
->src_cfun
,
1967 /* With return slot optimization we can end up with
1968 non-gimple (foo *)&this->m, fix that here. */
1969 if (is_gimple_assign (stmt
)
1970 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
1971 && !is_gimple_val (gimple_assign_rhs1 (stmt
)))
1974 new_rhs
= force_gimple_operand_gsi (&seq_gsi
,
1975 gimple_assign_rhs1 (stmt
),
1977 GSI_CONTINUE_LINKING
);
1978 gimple_assign_set_rhs1 (stmt
, new_rhs
);
1979 id
->regimplify
= false;
1982 gsi_insert_after (&seq_gsi
, stmt
, GSI_NEW_STMT
);
1985 gimple_regimplify_operands (stmt
, &seq_gsi
);
1993 /* If copy_basic_block has been empty at the start of this iteration,
1994 call gsi_start_bb again to get at the newly added statements. */
1995 if (gsi_end_p (copy_gsi
))
1996 copy_gsi
= gsi_start_bb (copy_basic_block
);
1998 gsi_next (©_gsi
);
2000 /* Process the new statement. The call to gimple_regimplify_operands
2001 possibly turned the statement into multiple statements, we
2002 need to process all of them. */
2008 stmt
= gsi_stmt (copy_gsi
);
2009 call_stmt
= dyn_cast
<gcall
*> (stmt
);
2011 && gimple_call_va_arg_pack_p (call_stmt
)
2013 && ! gimple_call_va_arg_pack_p (id
->call_stmt
))
2015 /* __builtin_va_arg_pack () should be replaced by
2016 all arguments corresponding to ... in the caller. */
2020 size_t nargs
= gimple_call_num_args (id
->call_stmt
);
2023 for (p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
2026 /* Create the new array of arguments. */
2027 n
= nargs
+ gimple_call_num_args (call_stmt
);
2028 argarray
.create (n
);
2029 argarray
.safe_grow_cleared (n
);
2031 /* Copy all the arguments before '...' */
2032 memcpy (argarray
.address (),
2033 gimple_call_arg_ptr (call_stmt
, 0),
2034 gimple_call_num_args (call_stmt
) * sizeof (tree
));
2036 /* Append the arguments passed in '...' */
2037 memcpy (argarray
.address () + gimple_call_num_args (call_stmt
),
2038 gimple_call_arg_ptr (id
->call_stmt
, 0)
2039 + (gimple_call_num_args (id
->call_stmt
) - nargs
),
2040 nargs
* sizeof (tree
));
2042 new_call
= gimple_build_call_vec (gimple_call_fn (call_stmt
),
2045 argarray
.release ();
2047 /* Copy all GIMPLE_CALL flags, location and block, except
2048 GF_CALL_VA_ARG_PACK. */
2049 gimple_call_copy_flags (new_call
, call_stmt
);
2050 gimple_call_set_va_arg_pack (new_call
, false);
2051 /* location includes block. */
2052 gimple_set_location (new_call
, gimple_location (stmt
));
2053 gimple_call_set_lhs (new_call
, gimple_call_lhs (call_stmt
));
2055 gsi_replace (©_gsi
, new_call
, false);
2060 && (decl
= gimple_call_fndecl (stmt
))
2061 && fndecl_built_in_p (decl
, BUILT_IN_VA_ARG_PACK_LEN
))
2063 /* __builtin_va_arg_pack_len () should be replaced by
2064 the number of anonymous arguments. */
2065 size_t nargs
= gimple_call_num_args (id
->call_stmt
);
2069 for (p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
2072 if (!gimple_call_lhs (stmt
))
2074 /* Drop unused calls. */
2075 gsi_remove (©_gsi
, false);
2078 else if (!gimple_call_va_arg_pack_p (id
->call_stmt
))
2080 count
= build_int_cst (integer_type_node
, nargs
);
2081 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
), count
);
2082 gsi_replace (©_gsi
, new_stmt
, false);
2085 else if (nargs
!= 0)
2087 tree newlhs
= create_tmp_reg_or_ssa_name (integer_type_node
);
2088 count
= build_int_cst (integer_type_node
, nargs
);
2089 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2090 PLUS_EXPR
, newlhs
, count
);
2091 gimple_call_set_lhs (stmt
, newlhs
);
2092 gsi_insert_after (©_gsi
, new_stmt
, GSI_NEW_STMT
);
2097 && gimple_call_internal_p (stmt
)
2098 && gimple_call_internal_fn (stmt
) == IFN_TSAN_FUNC_EXIT
)
2100 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2101 gsi_remove (©_gsi
, false);
2105 /* Statements produced by inlining can be unfolded, especially
2106 when we constant propagated some operands. We can't fold
2107 them right now for two reasons:
2108 1) folding require SSA_NAME_DEF_STMTs to be correct
2109 2) we can't change function calls to builtins.
2110 So we just mark statement for later folding. We mark
2111 all new statements, instead just statements that has changed
2112 by some nontrivial substitution so even statements made
2113 foldable indirectly are updated. If this turns out to be
2114 expensive, copy_body can be told to watch for nontrivial
2116 if (id
->statements_to_fold
)
2117 id
->statements_to_fold
->add (stmt
);
2119 /* We're duplicating a CALL_EXPR. Find any corresponding
2120 callgraph edges and update or duplicate them. */
2121 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
2123 struct cgraph_edge
*edge
;
2125 switch (id
->transform_call_graph_edges
)
2127 case CB_CGE_DUPLICATE
:
2128 edge
= id
->src_node
->get_edge (orig_stmt
);
2131 struct cgraph_edge
*old_edge
= edge
;
2132 profile_count old_cnt
= edge
->count
;
2133 edge
= edge
->clone (id
->dst_node
, call_stmt
,
2138 /* Speculative calls consist of two edges - direct and
2139 indirect. Duplicate the whole thing and distribute
2140 frequencies accordingly. */
2141 if (edge
->speculative
)
2143 struct cgraph_edge
*direct
, *indirect
;
2144 struct ipa_ref
*ref
;
2146 gcc_assert (!edge
->indirect_unknown_callee
);
2147 old_edge
->speculative_call_info (direct
, indirect
, ref
);
2149 profile_count indir_cnt
= indirect
->count
;
2150 indirect
= indirect
->clone (id
->dst_node
, call_stmt
,
2155 profile_probability prob
2156 = indir_cnt
.probability_in (old_cnt
+ indir_cnt
);
2158 = copy_basic_block
->count
.apply_probability (prob
);
2159 edge
->count
= copy_basic_block
->count
- indirect
->count
;
2160 id
->dst_node
->clone_reference (ref
, stmt
);
2163 edge
->count
= copy_basic_block
->count
;
2167 case CB_CGE_MOVE_CLONES
:
2168 id
->dst_node
->set_call_stmt_including_clones (orig_stmt
,
2170 edge
= id
->dst_node
->get_edge (stmt
);
2174 edge
= id
->dst_node
->get_edge (orig_stmt
);
2176 edge
->set_call_stmt (call_stmt
);
2183 /* Constant propagation on argument done during inlining
2184 may create new direct call. Produce an edge for it. */
2186 || (edge
->indirect_inlining_edge
2187 && id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
))
2188 && id
->dst_node
->definition
2189 && (fn
= gimple_call_fndecl (stmt
)) != NULL
)
2191 struct cgraph_node
*dest
= cgraph_node::get_create (fn
);
2193 /* We have missing edge in the callgraph. This can happen
2194 when previous inlining turned an indirect call into a
2195 direct call by constant propagating arguments or we are
2196 producing dead clone (for further cloning). In all
2197 other cases we hit a bug (incorrect node sharing is the
2198 most common reason for missing edges). */
2199 gcc_assert (!dest
->definition
2200 || dest
->address_taken
2201 || !id
->src_node
->definition
2202 || !id
->dst_node
->definition
);
2203 if (id
->transform_call_graph_edges
== CB_CGE_MOVE_CLONES
)
2204 id
->dst_node
->create_edge_including_clones
2205 (dest
, orig_stmt
, call_stmt
, bb
->count
,
2206 CIF_ORIGINALLY_INDIRECT_CALL
);
2208 id
->dst_node
->create_edge (dest
, call_stmt
,
2209 bb
->count
)->inline_failed
2210 = CIF_ORIGINALLY_INDIRECT_CALL
;
2213 fprintf (dump_file
, "Created new direct edge to %s\n",
2218 notice_special_calls (as_a
<gcall
*> (stmt
));
2221 maybe_duplicate_eh_stmt_fn (cfun
, stmt
, id
->src_cfun
, orig_stmt
,
2222 id
->eh_map
, id
->eh_lp_nr
);
2224 gsi_next (©_gsi
);
2226 while (!gsi_end_p (copy_gsi
));
2228 copy_gsi
= gsi_last_bb (copy_basic_block
);
2231 return copy_basic_block
;
2234 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2235 form is quite easy, since dominator relationship for old basic blocks does
2238 There is however exception where inlining might change dominator relation
2239 across EH edges from basic block within inlined functions destinating
2240 to landing pads in function we inline into.
2242 The function fills in PHI_RESULTs of such PHI nodes if they refer
2243 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2244 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2245 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2246 set, and this means that there will be no overlapping live ranges
2247 for the underlying symbol.
2249 This might change in future if we allow redirecting of EH edges and
2250 we might want to change way build CFG pre-inlining to include
2251 all the possible edges then. */
2253 update_ssa_across_abnormal_edges (basic_block bb
, basic_block ret_bb
,
2254 bool can_throw
, bool nonlocal_goto
)
2259 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2261 || ((basic_block
)e
->dest
->aux
)->index
== ENTRY_BLOCK
)
2267 gcc_assert (e
->flags
& EDGE_EH
);
2270 gcc_assert (!(e
->flags
& EDGE_EH
));
2272 for (si
= gsi_start_phis (e
->dest
); !gsi_end_p (si
); gsi_next (&si
))
2278 /* For abnormal goto/call edges the receiver can be the
2279 ENTRY_BLOCK. Do not assert this cannot happen. */
2281 gcc_assert ((e
->flags
& EDGE_EH
)
2282 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)));
2284 re
= find_edge (ret_bb
, e
->dest
);
2285 gcc_checking_assert (re
);
2286 gcc_assert ((re
->flags
& (EDGE_EH
| EDGE_ABNORMAL
))
2287 == (e
->flags
& (EDGE_EH
| EDGE_ABNORMAL
)));
2289 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
),
2290 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi
, re
)));
2295 /* Insert clobbers for automatic variables of inlined ID->src_fn
2296 function at the start of basic block ID->eh_landing_pad_dest. */
2299 add_clobbers_to_eh_landing_pad (copy_body_data
*id
)
2302 basic_block bb
= id
->eh_landing_pad_dest
;
2303 live_vars_map
*vars
= NULL
;
2304 unsigned int cnt
= 0;
2306 FOR_EACH_VEC_SAFE_ELT (id
->src_cfun
->local_decls
, i
, var
)
2308 && !DECL_HARD_REGISTER (var
)
2309 && !TREE_THIS_VOLATILE (var
)
2310 && !DECL_HAS_VALUE_EXPR_P (var
)
2311 && !is_gimple_reg (var
)
2312 && auto_var_in_fn_p (var
, id
->src_fn
)
2313 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var
)))
2315 tree
*t
= id
->decl_map
->get (var
);
2320 && !DECL_HARD_REGISTER (new_var
)
2321 && !TREE_THIS_VOLATILE (new_var
)
2322 && !DECL_HAS_VALUE_EXPR_P (new_var
)
2323 && !is_gimple_reg (new_var
)
2324 && auto_var_in_fn_p (new_var
, id
->dst_fn
))
2327 vars
= new live_vars_map
;
2328 vars
->put (DECL_UID (var
), cnt
++);
2334 vec
<bitmap_head
> live
= compute_live_vars (id
->src_cfun
, vars
);
2335 FOR_EACH_VEC_SAFE_ELT (id
->src_cfun
->local_decls
, i
, var
)
2340 bool needed
= false;
2341 unsigned int *v
= vars
->get (DECL_UID (var
));
2344 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
2345 if ((e
->flags
& EDGE_EH
) != 0
2346 && e
->src
->index
>= id
->add_clobbers_to_eh_landing_pads
)
2348 basic_block src_bb
= (basic_block
) e
->src
->aux
;
2350 if (bitmap_bit_p (&live
[src_bb
->index
], *v
))
2358 tree new_var
= *id
->decl_map
->get (var
);
2359 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
2360 tree clobber
= build_clobber (TREE_TYPE (new_var
));
2361 gimple
*clobber_stmt
= gimple_build_assign (new_var
, clobber
);
2362 gsi_insert_before (&gsi
, clobber_stmt
, GSI_NEW_STMT
);
2365 destroy_live_vars (live
);
2369 /* Copy edges from BB into its copy constructed earlier, scale profile
2370 accordingly. Edges will be taken care of later. Assume aux
2371 pointers to point to the copies of each BB. Return true if any
2372 debug stmts are left after a statement that must end the basic block. */
2375 copy_edges_for_bb (basic_block bb
, profile_count num
, profile_count den
,
2376 basic_block ret_bb
, basic_block abnormal_goto_dest
,
2379 basic_block new_bb
= (basic_block
) bb
->aux
;
2382 gimple_stmt_iterator si
;
2383 bool need_debug_cleanup
= false;
2385 /* Use the indices from the original blocks to create edges for the
2387 FOR_EACH_EDGE (old_edge
, ei
, bb
->succs
)
2388 if (!(old_edge
->flags
& EDGE_EH
))
2391 int flags
= old_edge
->flags
;
2392 location_t locus
= old_edge
->goto_locus
;
2394 /* Return edges do get a FALLTHRU flag when they get inlined. */
2395 if (old_edge
->dest
->index
== EXIT_BLOCK
2396 && !(flags
& (EDGE_TRUE_VALUE
|EDGE_FALSE_VALUE
|EDGE_FAKE
))
2397 && old_edge
->dest
->aux
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
2398 flags
|= EDGE_FALLTHRU
;
2401 = make_edge (new_bb
, (basic_block
) old_edge
->dest
->aux
, flags
);
2402 new_edge
->probability
= old_edge
->probability
;
2403 if (!id
->reset_location
)
2404 new_edge
->goto_locus
= remap_location (locus
, id
);
2407 if (bb
->index
== ENTRY_BLOCK
|| bb
->index
== EXIT_BLOCK
)
2410 /* When doing function splitting, we must decrease count of the return block
2411 which was previously reachable by block we did not copy. */
2412 if (single_succ_p (bb
) && single_succ_edge (bb
)->dest
->index
== EXIT_BLOCK
)
2413 FOR_EACH_EDGE (old_edge
, ei
, bb
->preds
)
2414 if (old_edge
->src
->index
!= ENTRY_BLOCK
2415 && !old_edge
->src
->aux
)
2416 new_bb
->count
-= old_edge
->count ().apply_scale (num
, den
);
2418 for (si
= gsi_start_bb (new_bb
); !gsi_end_p (si
);)
2421 bool can_throw
, nonlocal_goto
;
2423 copy_stmt
= gsi_stmt (si
);
2424 if (!is_gimple_debug (copy_stmt
))
2425 update_stmt (copy_stmt
);
2427 /* Do this before the possible split_block. */
2430 /* If this tree could throw an exception, there are two
2431 cases where we need to add abnormal edge(s): the
2432 tree wasn't in a region and there is a "current
2433 region" in the caller; or the original tree had
2434 EH edges. In both cases split the block after the tree,
2435 and add abnormal edge(s) as needed; we need both
2436 those from the callee and the caller.
2437 We check whether the copy can throw, because the const
2438 propagation can change an INDIRECT_REF which throws
2439 into a COMPONENT_REF which doesn't. If the copy
2440 can throw, the original could also throw. */
2441 can_throw
= stmt_can_throw_internal (cfun
, copy_stmt
);
2443 = (stmt_can_make_abnormal_goto (copy_stmt
)
2444 && !computed_goto_p (copy_stmt
));
2446 if (can_throw
|| nonlocal_goto
)
2448 if (!gsi_end_p (si
))
2450 while (!gsi_end_p (si
) && is_gimple_debug (gsi_stmt (si
)))
2453 need_debug_cleanup
= true;
2455 if (!gsi_end_p (si
))
2456 /* Note that bb's predecessor edges aren't necessarily
2457 right at this point; split_block doesn't care. */
2459 edge e
= split_block (new_bb
, copy_stmt
);
2462 new_bb
->aux
= e
->src
->aux
;
2463 si
= gsi_start_bb (new_bb
);
2467 bool update_probs
= false;
2469 if (gimple_code (copy_stmt
) == GIMPLE_EH_DISPATCH
)
2471 make_eh_dispatch_edges (as_a
<geh_dispatch
*> (copy_stmt
));
2472 update_probs
= true;
2476 make_eh_edges (copy_stmt
);
2477 update_probs
= true;
2480 /* EH edges may not match old edges. Copy as much as possible. */
2485 basic_block copy_stmt_bb
= gimple_bb (copy_stmt
);
2487 FOR_EACH_EDGE (old_edge
, ei
, bb
->succs
)
2488 if ((old_edge
->flags
& EDGE_EH
)
2489 && (e
= find_edge (copy_stmt_bb
,
2490 (basic_block
) old_edge
->dest
->aux
))
2491 && (e
->flags
& EDGE_EH
))
2492 e
->probability
= old_edge
->probability
;
2494 FOR_EACH_EDGE (e
, ei
, copy_stmt_bb
->succs
)
2495 if (e
->flags
& EDGE_EH
)
2497 if (!e
->probability
.initialized_p ())
2498 e
->probability
= profile_probability::never ();
2499 if (e
->dest
->index
< id
->add_clobbers_to_eh_landing_pads
)
2501 if (id
->eh_landing_pad_dest
== NULL
)
2502 id
->eh_landing_pad_dest
= e
->dest
;
2504 gcc_assert (id
->eh_landing_pad_dest
== e
->dest
);
2510 /* If the call we inline cannot make abnormal goto do not add
2511 additional abnormal edges but only retain those already present
2512 in the original function body. */
2513 if (abnormal_goto_dest
== NULL
)
2514 nonlocal_goto
= false;
2517 basic_block copy_stmt_bb
= gimple_bb (copy_stmt
);
2519 if (get_abnormal_succ_dispatcher (copy_stmt_bb
))
2520 nonlocal_goto
= false;
2521 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2522 in OpenMP regions which aren't allowed to be left abnormally.
2523 So, no need to add abnormal edge in that case. */
2524 else if (is_gimple_call (copy_stmt
)
2525 && gimple_call_internal_p (copy_stmt
)
2526 && (gimple_call_internal_fn (copy_stmt
)
2527 == IFN_ABNORMAL_DISPATCHER
)
2528 && gimple_call_arg (copy_stmt
, 0) == boolean_true_node
)
2529 nonlocal_goto
= false;
2531 make_single_succ_edge (copy_stmt_bb
, abnormal_goto_dest
,
2535 if ((can_throw
|| nonlocal_goto
)
2536 && gimple_in_ssa_p (cfun
))
2537 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt
), ret_bb
,
2538 can_throw
, nonlocal_goto
);
2540 return need_debug_cleanup
;
2543 /* Copy the PHIs. All blocks and edges are copied, some blocks
2544 was possibly split and new outgoing EH edges inserted.
2545 BB points to the block of original function and AUX pointers links
2546 the original and newly copied blocks. */
2549 copy_phis_for_bb (basic_block bb
, copy_body_data
*id
)
2551 basic_block
const new_bb
= (basic_block
) bb
->aux
;
2556 bool inserted
= false;
2558 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
2564 res
= PHI_RESULT (phi
);
2566 if (!virtual_operand_p (res
))
2568 walk_tree (&new_res
, copy_tree_body_r
, id
, NULL
);
2569 if (EDGE_COUNT (new_bb
->preds
) == 0)
2571 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2572 SSA_NAME_DEF_STMT (new_res
) = gimple_build_nop ();
2576 new_phi
= create_phi_node (new_res
, new_bb
);
2577 FOR_EACH_EDGE (new_edge
, ei
, new_bb
->preds
)
2579 edge old_edge
= find_edge ((basic_block
) new_edge
->src
->aux
,
2586 /* When doing partial cloning, we allow PHIs on the entry
2587 block as long as all the arguments are the same.
2588 Find any input edge to see argument to copy. */
2590 FOR_EACH_EDGE (old_edge
, ei2
, bb
->preds
)
2591 if (!old_edge
->src
->aux
)
2594 arg
= PHI_ARG_DEF_FROM_EDGE (phi
, old_edge
);
2596 walk_tree (&new_arg
, copy_tree_body_r
, id
, NULL
);
2597 gcc_assert (new_arg
);
2598 /* With return slot optimization we can end up with
2599 non-gimple (foo *)&this->m, fix that here. */
2600 if (TREE_CODE (new_arg
) != SSA_NAME
2601 && TREE_CODE (new_arg
) != FUNCTION_DECL
2602 && !is_gimple_val (new_arg
))
2604 gimple_seq stmts
= NULL
;
2605 new_arg
= force_gimple_operand (new_arg
, &stmts
, true,
2607 gsi_insert_seq_on_edge (new_edge
, stmts
);
2610 locus
= gimple_phi_arg_location_from_edge (phi
, old_edge
);
2611 if (id
->reset_location
)
2612 locus
= input_location
;
2614 locus
= remap_location (locus
, id
);
2615 add_phi_arg (new_phi
, new_arg
, new_edge
, locus
);
2621 /* Commit the delayed edge insertions. */
2623 FOR_EACH_EDGE (new_edge
, ei
, new_bb
->preds
)
2624 gsi_commit_one_edge_insert (new_edge
, NULL
);
2628 /* Wrapper for remap_decl so it can be used as a callback. */
2631 remap_decl_1 (tree decl
, void *data
)
2633 return remap_decl (decl
, (copy_body_data
*) data
);
2636 /* Build struct function and associated datastructures for the new clone
2637 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2638 the cfun to the function of new_fndecl (and current_function_decl too). */
2641 initialize_cfun (tree new_fndecl
, tree callee_fndecl
, profile_count count
)
2643 struct function
*src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2645 if (!DECL_ARGUMENTS (new_fndecl
))
2646 DECL_ARGUMENTS (new_fndecl
) = DECL_ARGUMENTS (callee_fndecl
);
2647 if (!DECL_RESULT (new_fndecl
))
2648 DECL_RESULT (new_fndecl
) = DECL_RESULT (callee_fndecl
);
2650 /* Register specific tree functions. */
2651 gimple_register_cfg_hooks ();
2653 /* Get clean struct function. */
2654 push_struct_function (new_fndecl
);
2656 /* We will rebuild these, so just sanity check that they are empty. */
2657 gcc_assert (VALUE_HISTOGRAMS (cfun
) == NULL
);
2658 gcc_assert (cfun
->local_decls
== NULL
);
2659 gcc_assert (cfun
->cfg
== NULL
);
2660 gcc_assert (cfun
->decl
== new_fndecl
);
2662 /* Copy items we preserve during cloning. */
2663 cfun
->static_chain_decl
= src_cfun
->static_chain_decl
;
2664 cfun
->nonlocal_goto_save_area
= src_cfun
->nonlocal_goto_save_area
;
2665 cfun
->function_end_locus
= src_cfun
->function_end_locus
;
2666 cfun
->curr_properties
= src_cfun
->curr_properties
;
2667 cfun
->last_verified
= src_cfun
->last_verified
;
2668 cfun
->va_list_gpr_size
= src_cfun
->va_list_gpr_size
;
2669 cfun
->va_list_fpr_size
= src_cfun
->va_list_fpr_size
;
2670 cfun
->has_nonlocal_label
= src_cfun
->has_nonlocal_label
;
2671 cfun
->calls_eh_return
= src_cfun
->calls_eh_return
;
2672 cfun
->stdarg
= src_cfun
->stdarg
;
2673 cfun
->after_inlining
= src_cfun
->after_inlining
;
2674 cfun
->can_throw_non_call_exceptions
2675 = src_cfun
->can_throw_non_call_exceptions
;
2676 cfun
->can_delete_dead_exceptions
= src_cfun
->can_delete_dead_exceptions
;
2677 cfun
->returns_struct
= src_cfun
->returns_struct
;
2678 cfun
->returns_pcc_struct
= src_cfun
->returns_pcc_struct
;
2680 init_empty_tree_cfg ();
2682 profile_status_for_fn (cfun
) = profile_status_for_fn (src_cfun
);
2684 profile_count num
= count
;
2685 profile_count den
= ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
;
2686 profile_count::adjust_for_ipa_scaling (&num
, &den
);
2688 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
=
2689 ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
.apply_scale (count
,
2690 ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
);
2691 EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
=
2692 EXIT_BLOCK_PTR_FOR_FN (src_cfun
)->count
.apply_scale (count
,
2693 ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
);
2695 init_eh_for_function ();
2697 if (src_cfun
->gimple_df
)
2699 init_tree_ssa (cfun
);
2700 cfun
->gimple_df
->in_ssa_p
= src_cfun
->gimple_df
->in_ssa_p
;
2701 if (cfun
->gimple_df
->in_ssa_p
)
2702 init_ssa_operands (cfun
);
2706 /* Helper function for copy_cfg_body. Move debug stmts from the end
2707 of NEW_BB to the beginning of successor basic blocks when needed. If the
2708 successor has multiple predecessors, reset them, otherwise keep
2712 maybe_move_debug_stmts_to_successors (copy_body_data
*id
, basic_block new_bb
)
2716 gimple_stmt_iterator si
= gsi_last_nondebug_bb (new_bb
);
2719 || gsi_one_before_end_p (si
)
2720 || !(stmt_can_throw_internal (cfun
, gsi_stmt (si
))
2721 || stmt_can_make_abnormal_goto (gsi_stmt (si
))))
2724 FOR_EACH_EDGE (e
, ei
, new_bb
->succs
)
2726 gimple_stmt_iterator ssi
= gsi_last_bb (new_bb
);
2727 gimple_stmt_iterator dsi
= gsi_after_labels (e
->dest
);
2728 while (is_gimple_debug (gsi_stmt (ssi
)))
2730 gimple
*stmt
= gsi_stmt (ssi
);
2735 /* For the last edge move the debug stmts instead of copying
2737 if (ei_one_before_end_p (ei
))
2741 if (!single_pred_p (e
->dest
) && gimple_debug_bind_p (stmt
))
2743 gimple_debug_bind_reset_value (stmt
);
2744 gimple_set_location (stmt
, UNKNOWN_LOCATION
);
2746 gsi_remove (&si
, false);
2747 gsi_insert_before (&dsi
, stmt
, GSI_SAME_STMT
);
2751 if (gimple_debug_bind_p (stmt
))
2753 var
= gimple_debug_bind_get_var (stmt
);
2754 if (single_pred_p (e
->dest
))
2756 value
= gimple_debug_bind_get_value (stmt
);
2757 value
= unshare_expr (value
);
2758 new_stmt
= gimple_build_debug_bind (var
, value
, stmt
);
2761 new_stmt
= gimple_build_debug_bind (var
, NULL_TREE
, NULL
);
2763 else if (gimple_debug_source_bind_p (stmt
))
2765 var
= gimple_debug_source_bind_get_var (stmt
);
2766 value
= gimple_debug_source_bind_get_value (stmt
);
2767 new_stmt
= gimple_build_debug_source_bind (var
, value
, stmt
);
2769 else if (gimple_debug_nonbind_marker_p (stmt
))
2770 new_stmt
= as_a
<gdebug
*> (gimple_copy (stmt
));
2773 gsi_insert_before (&dsi
, new_stmt
, GSI_SAME_STMT
);
2774 id
->debug_stmts
.safe_push (new_stmt
);
2780 /* Make a copy of the sub-loops of SRC_PARENT and place them
2781 as siblings of DEST_PARENT. */
2784 copy_loops (copy_body_data
*id
,
2785 struct loop
*dest_parent
, struct loop
*src_parent
)
2787 struct loop
*src_loop
= src_parent
->inner
;
2790 if (!id
->blocks_to_copy
2791 || bitmap_bit_p (id
->blocks_to_copy
, src_loop
->header
->index
))
2793 struct loop
*dest_loop
= alloc_loop ();
2795 /* Assign the new loop its header and latch and associate
2796 those with the new loop. */
2797 dest_loop
->header
= (basic_block
)src_loop
->header
->aux
;
2798 dest_loop
->header
->loop_father
= dest_loop
;
2799 if (src_loop
->latch
!= NULL
)
2801 dest_loop
->latch
= (basic_block
)src_loop
->latch
->aux
;
2802 dest_loop
->latch
->loop_father
= dest_loop
;
2805 /* Copy loop meta-data. */
2806 copy_loop_info (src_loop
, dest_loop
);
2807 if (dest_loop
->unroll
)
2808 cfun
->has_unroll
= true;
2809 if (dest_loop
->force_vectorize
)
2810 cfun
->has_force_vectorize_loops
= true;
2811 if (id
->src_cfun
->last_clique
!= 0)
2812 dest_loop
->owned_clique
2813 = remap_dependence_clique (id
,
2814 src_loop
->owned_clique
2815 ? src_loop
->owned_clique
: 1);
2817 /* Finally place it into the loop array and the loop tree. */
2818 place_new_loop (cfun
, dest_loop
);
2819 flow_loop_tree_node_add (dest_parent
, dest_loop
);
2821 if (src_loop
->simduid
)
2823 dest_loop
->simduid
= remap_decl (src_loop
->simduid
, id
);
2824 cfun
->has_simduid_loops
= true;
2828 copy_loops (id
, dest_loop
, src_loop
);
2830 src_loop
= src_loop
->next
;
2834 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2837 redirect_all_calls (copy_body_data
* id
, basic_block bb
)
2839 gimple_stmt_iterator si
;
2840 gimple
*last
= last_stmt (bb
);
2841 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
2843 gimple
*stmt
= gsi_stmt (si
);
2844 if (is_gimple_call (stmt
))
2846 struct cgraph_edge
*edge
= id
->dst_node
->get_edge (stmt
);
2849 edge
->redirect_call_stmt_to_callee ();
2850 if (stmt
== last
&& id
->call_stmt
&& maybe_clean_eh_stmt (stmt
))
2851 gimple_purge_dead_eh_edges (bb
);
2857 /* Make a copy of the body of FN so that it can be inserted inline in
2858 another function. Walks FN via CFG, returns new fndecl. */
2861 copy_cfg_body (copy_body_data
* id
,
2862 basic_block entry_block_map
, basic_block exit_block_map
,
2863 basic_block new_entry
)
2865 tree callee_fndecl
= id
->src_fn
;
2866 /* Original cfun for the callee, doesn't change. */
2867 struct function
*src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2868 struct function
*cfun_to_copy
;
2870 tree new_fndecl
= NULL
;
2871 bool need_debug_cleanup
= false;
2873 profile_count den
= ENTRY_BLOCK_PTR_FOR_FN (src_cfun
)->count
;
2874 profile_count num
= entry_block_map
->count
;
2876 cfun_to_copy
= id
->src_cfun
= DECL_STRUCT_FUNCTION (callee_fndecl
);
2878 /* Register specific tree functions. */
2879 gimple_register_cfg_hooks ();
2881 /* If we are inlining just region of the function, make sure to connect
2882 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2883 part of loop, we must compute frequency and probability of
2884 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2885 probabilities of edges incoming from nonduplicated region. */
2890 den
= profile_count::zero ();
2892 FOR_EACH_EDGE (e
, ei
, new_entry
->preds
)
2895 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
= den
;
2898 profile_count::adjust_for_ipa_scaling (&num
, &den
);
2900 /* Must have a CFG here at this point. */
2901 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2902 (DECL_STRUCT_FUNCTION (callee_fndecl
)));
2905 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy
)->aux
= entry_block_map
;
2906 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy
)->aux
= exit_block_map
;
2907 entry_block_map
->aux
= ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy
);
2908 exit_block_map
->aux
= EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy
);
2910 /* Duplicate any exception-handling regions. */
2912 id
->eh_map
= duplicate_eh_regions (cfun_to_copy
, NULL
, id
->eh_lp_nr
,
2915 /* Use aux pointers to map the original blocks to copy. */
2916 FOR_EACH_BB_FN (bb
, cfun_to_copy
)
2917 if (!id
->blocks_to_copy
|| bitmap_bit_p (id
->blocks_to_copy
, bb
->index
))
2919 basic_block new_bb
= copy_bb (id
, bb
, num
, den
);
2922 new_bb
->loop_father
= entry_block_map
->loop_father
;
2925 last
= last_basic_block_for_fn (cfun
);
2927 /* Now that we've duplicated the blocks, duplicate their edges. */
2928 basic_block abnormal_goto_dest
= NULL
;
2930 && stmt_can_make_abnormal_goto (id
->call_stmt
))
2932 gimple_stmt_iterator gsi
= gsi_for_stmt (id
->call_stmt
);
2934 bb
= gimple_bb (id
->call_stmt
);
2936 if (gsi_end_p (gsi
))
2937 abnormal_goto_dest
= get_abnormal_succ_dispatcher (bb
);
2939 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2940 if (!id
->blocks_to_copy
2941 || (bb
->index
> 0 && bitmap_bit_p (id
->blocks_to_copy
, bb
->index
)))
2942 need_debug_cleanup
|= copy_edges_for_bb (bb
, num
, den
, exit_block_map
,
2943 abnormal_goto_dest
, id
);
2945 if (id
->eh_landing_pad_dest
)
2947 add_clobbers_to_eh_landing_pad (id
);
2948 id
->eh_landing_pad_dest
= NULL
;
2953 edge e
= make_edge (entry_block_map
, (basic_block
)new_entry
->aux
,
2955 e
->probability
= profile_probability::always ();
2958 /* Duplicate the loop tree, if available and wanted. */
2959 if (loops_for_fn (src_cfun
) != NULL
2960 && current_loops
!= NULL
)
2962 copy_loops (id
, entry_block_map
->loop_father
,
2963 get_loop (src_cfun
, 0));
2964 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2965 loops_state_set (LOOPS_NEED_FIXUP
);
2968 /* If the loop tree in the source function needed fixup, mark the
2969 destination loop tree for fixup, too. */
2970 if (loops_for_fn (src_cfun
)->state
& LOOPS_NEED_FIXUP
)
2971 loops_state_set (LOOPS_NEED_FIXUP
);
2973 if (gimple_in_ssa_p (cfun
))
2974 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2975 if (!id
->blocks_to_copy
2976 || (bb
->index
> 0 && bitmap_bit_p (id
->blocks_to_copy
, bb
->index
)))
2977 copy_phis_for_bb (bb
, id
);
2979 FOR_ALL_BB_FN (bb
, cfun_to_copy
)
2982 if (need_debug_cleanup
2983 && bb
->index
!= ENTRY_BLOCK
2984 && bb
->index
!= EXIT_BLOCK
)
2985 maybe_move_debug_stmts_to_successors (id
, (basic_block
) bb
->aux
);
2986 /* Update call edge destinations. This cannot be done before loop
2987 info is updated, because we may split basic blocks. */
2988 if (id
->transform_call_graph_edges
== CB_CGE_DUPLICATE
2989 && bb
->index
!= ENTRY_BLOCK
2990 && bb
->index
!= EXIT_BLOCK
)
2991 redirect_all_calls (id
, (basic_block
)bb
->aux
);
2992 ((basic_block
)bb
->aux
)->aux
= NULL
;
2996 /* Zero out AUX fields of newly created block during EH edge
2998 for (; last
< last_basic_block_for_fn (cfun
); last
++)
3000 if (need_debug_cleanup
)
3001 maybe_move_debug_stmts_to_successors (id
,
3002 BASIC_BLOCK_FOR_FN (cfun
, last
));
3003 BASIC_BLOCK_FOR_FN (cfun
, last
)->aux
= NULL
;
3004 /* Update call edge destinations. This cannot be done before loop
3005 info is updated, because we may split basic blocks. */
3006 if (id
->transform_call_graph_edges
== CB_CGE_DUPLICATE
)
3007 redirect_all_calls (id
, BASIC_BLOCK_FOR_FN (cfun
, last
));
3009 entry_block_map
->aux
= NULL
;
3010 exit_block_map
->aux
= NULL
;
3017 if (id
->dependence_map
)
3019 delete id
->dependence_map
;
3020 id
->dependence_map
= NULL
;
3026 /* Copy the debug STMT using ID. We deal with these statements in a
3027 special way: if any variable in their VALUE expression wasn't
3028 remapped yet, we won't remap it, because that would get decl uids
3029 out of sync, causing codegen differences between -g and -g0. If
3030 this arises, we drop the VALUE expression altogether. */
3033 copy_debug_stmt (gdebug
*stmt
, copy_body_data
*id
)
3036 struct walk_stmt_info wi
;
3038 if (tree block
= gimple_block (stmt
))
3040 n
= id
->decl_map
->get (block
);
3041 gimple_set_block (stmt
, n
? *n
: id
->block
);
3044 if (gimple_debug_nonbind_marker_p (stmt
))
3047 /* Remap all the operands in COPY. */
3048 memset (&wi
, 0, sizeof (wi
));
3051 processing_debug_stmt
= 1;
3053 if (gimple_debug_source_bind_p (stmt
))
3054 t
= gimple_debug_source_bind_get_var (stmt
);
3055 else if (gimple_debug_bind_p (stmt
))
3056 t
= gimple_debug_bind_get_var (stmt
);
3060 if (TREE_CODE (t
) == PARM_DECL
&& id
->debug_map
3061 && (n
= id
->debug_map
->get (t
)))
3063 gcc_assert (VAR_P (*n
));
3066 else if (VAR_P (t
) && !is_global_var (t
) && !id
->decl_map
->get (t
))
3067 /* T is a non-localized variable. */;
3069 walk_tree (&t
, remap_gimple_op_r
, &wi
, NULL
);
3071 if (gimple_debug_bind_p (stmt
))
3073 gimple_debug_bind_set_var (stmt
, t
);
3075 if (gimple_debug_bind_has_value_p (stmt
))
3076 walk_tree (gimple_debug_bind_get_value_ptr (stmt
),
3077 remap_gimple_op_r
, &wi
, NULL
);
3079 /* Punt if any decl couldn't be remapped. */
3080 if (processing_debug_stmt
< 0)
3081 gimple_debug_bind_reset_value (stmt
);
3083 else if (gimple_debug_source_bind_p (stmt
))
3085 gimple_debug_source_bind_set_var (stmt
, t
);
3086 /* When inlining and source bind refers to one of the optimized
3087 away parameters, change the source bind into normal debug bind
3088 referring to the corresponding DEBUG_EXPR_DECL that should have
3089 been bound before the call stmt. */
3090 t
= gimple_debug_source_bind_get_value (stmt
);
3092 && TREE_CODE (t
) == PARM_DECL
3095 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (id
->src_fn
);
3097 if (debug_args
!= NULL
)
3099 for (i
= 0; i
< vec_safe_length (*debug_args
); i
+= 2)
3100 if ((**debug_args
)[i
] == DECL_ORIGIN (t
)
3101 && TREE_CODE ((**debug_args
)[i
+ 1]) == DEBUG_EXPR_DECL
)
3103 t
= (**debug_args
)[i
+ 1];
3104 stmt
->subcode
= GIMPLE_DEBUG_BIND
;
3105 gimple_debug_bind_set_value (stmt
, t
);
3110 if (gimple_debug_source_bind_p (stmt
))
3111 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt
),
3112 remap_gimple_op_r
, &wi
, NULL
);
3115 processing_debug_stmt
= 0;
3120 /* Process deferred debug stmts. In order to give values better odds
3121 of being successfully remapped, we delay the processing of debug
3122 stmts until all other stmts that might require remapping are
3126 copy_debug_stmts (copy_body_data
*id
)
3131 if (!id
->debug_stmts
.exists ())
3134 FOR_EACH_VEC_ELT (id
->debug_stmts
, i
, stmt
)
3135 copy_debug_stmt (stmt
, id
);
3137 id
->debug_stmts
.release ();
3140 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3141 another function. */
3144 copy_tree_body (copy_body_data
*id
)
3146 tree fndecl
= id
->src_fn
;
3147 tree body
= DECL_SAVED_TREE (fndecl
);
3149 walk_tree (&body
, copy_tree_body_r
, id
, NULL
);
3154 /* Make a copy of the body of FN so that it can be inserted inline in
3155 another function. */
3158 copy_body (copy_body_data
*id
,
3159 basic_block entry_block_map
, basic_block exit_block_map
,
3160 basic_block new_entry
)
3162 tree fndecl
= id
->src_fn
;
3165 /* If this body has a CFG, walk CFG and copy. */
3166 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl
)));
3167 body
= copy_cfg_body (id
, entry_block_map
, exit_block_map
,
3169 copy_debug_stmts (id
);
3174 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3175 defined in function FN, or of a data member thereof. */
3178 self_inlining_addr_expr (tree value
, tree fn
)
3182 if (TREE_CODE (value
) != ADDR_EXPR
)
3185 var
= get_base_address (TREE_OPERAND (value
, 0));
3187 return var
&& auto_var_in_fn_p (var
, fn
);
3190 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3191 lexical block and line number information from base_stmt, if given,
3192 or from the last stmt of the block otherwise. */
3195 insert_init_debug_bind (copy_body_data
*id
,
3196 basic_block bb
, tree var
, tree value
,
3200 gimple_stmt_iterator gsi
;
3203 if (!gimple_in_ssa_p (id
->src_cfun
))
3206 if (!opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
3209 tracked_var
= target_for_debug_bind (var
);
3215 gsi
= gsi_last_bb (bb
);
3216 if (!base_stmt
&& !gsi_end_p (gsi
))
3217 base_stmt
= gsi_stmt (gsi
);
3220 note
= gimple_build_debug_bind (tracked_var
, unshare_expr (value
), base_stmt
);
3224 if (!gsi_end_p (gsi
))
3225 gsi_insert_after (&gsi
, note
, GSI_SAME_STMT
);
3227 gsi_insert_before (&gsi
, note
, GSI_SAME_STMT
);
3234 insert_init_stmt (copy_body_data
*id
, basic_block bb
, gimple
*init_stmt
)
3236 /* If VAR represents a zero-sized variable, it's possible that the
3237 assignment statement may result in no gimple statements. */
3240 gimple_stmt_iterator si
= gsi_last_bb (bb
);
3242 /* We can end up with init statements that store to a non-register
3243 from a rhs with a conversion. Handle that here by forcing the
3244 rhs into a temporary. gimple_regimplify_operands is not
3245 prepared to do this for us. */
3246 if (!is_gimple_debug (init_stmt
)
3247 && !is_gimple_reg (gimple_assign_lhs (init_stmt
))
3248 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt
)))
3249 && gimple_assign_rhs_class (init_stmt
) == GIMPLE_UNARY_RHS
)
3251 tree rhs
= build1 (gimple_assign_rhs_code (init_stmt
),
3252 gimple_expr_type (init_stmt
),
3253 gimple_assign_rhs1 (init_stmt
));
3254 rhs
= force_gimple_operand_gsi (&si
, rhs
, true, NULL_TREE
, false,
3256 gimple_assign_set_rhs_code (init_stmt
, TREE_CODE (rhs
));
3257 gimple_assign_set_rhs1 (init_stmt
, rhs
);
3259 gsi_insert_after (&si
, init_stmt
, GSI_NEW_STMT
);
3260 gimple_regimplify_operands (init_stmt
, &si
);
3262 if (!is_gimple_debug (init_stmt
))
3264 tree def
= gimple_assign_lhs (init_stmt
);
3265 insert_init_debug_bind (id
, bb
, def
, def
, init_stmt
);
3270 /* Initialize parameter P with VALUE. If needed, produce init statement
3271 at the end of BB. When BB is NULL, we return init statement to be
3274 setup_one_parameter (copy_body_data
*id
, tree p
, tree value
, tree fn
,
3275 basic_block bb
, tree
*vars
)
3277 gimple
*init_stmt
= NULL
;
3280 tree def
= (gimple_in_ssa_p (cfun
)
3281 ? ssa_default_def (id
->src_cfun
, p
) : NULL
);
3284 && value
!= error_mark_node
3285 && !useless_type_conversion_p (TREE_TYPE (p
), TREE_TYPE (value
)))
3287 /* If we can match up types by promotion/demotion do so. */
3288 if (fold_convertible_p (TREE_TYPE (p
), value
))
3289 rhs
= fold_convert (TREE_TYPE (p
), value
);
3292 /* ??? For valid programs we should not end up here.
3293 Still if we end up with truly mismatched types here, fall back
3294 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3295 GIMPLE to the following passes. */
3296 if (!is_gimple_reg_type (TREE_TYPE (value
))
3297 || TYPE_SIZE (TREE_TYPE (p
)) == TYPE_SIZE (TREE_TYPE (value
)))
3298 rhs
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (p
), value
);
3300 rhs
= build_zero_cst (TREE_TYPE (p
));
3304 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3305 here since the type of this decl must be visible to the calling
3307 var
= copy_decl_to_var (p
, id
);
3309 /* Declare this new variable. */
3310 DECL_CHAIN (var
) = *vars
;
3313 /* Make gimplifier happy about this variable. */
3314 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
3316 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3317 we would not need to create a new variable here at all, if it
3318 weren't for debug info. Still, we can just use the argument
3320 if (TREE_READONLY (p
)
3321 && !TREE_ADDRESSABLE (p
)
3322 && value
&& !TREE_SIDE_EFFECTS (value
)
3325 /* We may produce non-gimple trees by adding NOPs or introduce
3326 invalid sharing when operand is not really constant.
3327 It is not big deal to prohibit constant propagation here as
3328 we will constant propagate in DOM1 pass anyway. */
3329 if (is_gimple_min_invariant (value
)
3330 && useless_type_conversion_p (TREE_TYPE (p
),
3332 /* We have to be very careful about ADDR_EXPR. Make sure
3333 the base variable isn't a local variable of the inlined
3334 function, e.g., when doing recursive inlining, direct or
3335 mutually-recursive or whatever, which is why we don't
3336 just test whether fn == current_function_decl. */
3337 && ! self_inlining_addr_expr (value
, fn
))
3339 insert_decl_map (id
, p
, value
);
3340 insert_debug_decl_map (id
, p
, var
);
3341 return insert_init_debug_bind (id
, bb
, var
, value
, NULL
);
3345 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3346 that way, when the PARM_DECL is encountered, it will be
3347 automatically replaced by the VAR_DECL. */
3348 insert_decl_map (id
, p
, var
);
3350 /* Even if P was TREE_READONLY, the new VAR should not be.
3351 In the original code, we would have constructed a
3352 temporary, and then the function body would have never
3353 changed the value of P. However, now, we will be
3354 constructing VAR directly. The constructor body may
3355 change its value multiple times as it is being
3356 constructed. Therefore, it must not be TREE_READONLY;
3357 the back-end assumes that TREE_READONLY variable is
3358 assigned to only once. */
3359 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p
)))
3360 TREE_READONLY (var
) = 0;
3362 /* If there is no setup required and we are in SSA, take the easy route
3363 replacing all SSA names representing the function parameter by the
3364 SSA name passed to function.
3366 We need to construct map for the variable anyway as it might be used
3367 in different SSA names when parameter is set in function.
3369 Do replacement at -O0 for const arguments replaced by constant.
3370 This is important for builtin_constant_p and other construct requiring
3371 constant argument to be visible in inlined function body. */
3372 if (gimple_in_ssa_p (cfun
) && rhs
&& def
&& is_gimple_reg (p
)
3374 || (TREE_READONLY (p
)
3375 && is_gimple_min_invariant (rhs
)))
3376 && (TREE_CODE (rhs
) == SSA_NAME
3377 || is_gimple_min_invariant (rhs
))
3378 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def
))
3380 insert_decl_map (id
, def
, rhs
);
3381 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
3384 /* If the value of argument is never used, don't care about initializing
3386 if (optimize
&& gimple_in_ssa_p (cfun
) && !def
&& is_gimple_reg (p
))
3388 gcc_assert (!value
|| !TREE_SIDE_EFFECTS (value
));
3389 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
3392 /* Initialize this VAR_DECL from the equivalent argument. Convert
3393 the argument to the proper type in case it was promoted. */
3396 if (rhs
== error_mark_node
)
3398 insert_decl_map (id
, p
, var
);
3399 return insert_init_debug_bind (id
, bb
, var
, rhs
, NULL
);
3402 STRIP_USELESS_TYPE_CONVERSION (rhs
);
3404 /* If we are in SSA form properly remap the default definition
3405 or assign to a dummy SSA name if the parameter is unused and
3406 we are not optimizing. */
3407 if (gimple_in_ssa_p (cfun
) && is_gimple_reg (p
))
3411 def
= remap_ssa_name (def
, id
);
3412 init_stmt
= gimple_build_assign (def
, rhs
);
3413 SSA_NAME_IS_DEFAULT_DEF (def
) = 0;
3414 set_ssa_default_def (cfun
, var
, NULL
);
3418 def
= make_ssa_name (var
);
3419 init_stmt
= gimple_build_assign (def
, rhs
);
3423 init_stmt
= gimple_build_assign (var
, rhs
);
3425 if (bb
&& init_stmt
)
3426 insert_init_stmt (id
, bb
, init_stmt
);
3431 /* Generate code to initialize the parameters of the function at the
3432 top of the stack in ID from the GIMPLE_CALL STMT. */
3435 initialize_inlined_parameters (copy_body_data
*id
, gimple
*stmt
,
3436 tree fn
, basic_block bb
)
3441 tree vars
= NULL_TREE
;
3442 tree static_chain
= gimple_call_chain (stmt
);
3444 /* Figure out what the parameters are. */
3445 parms
= DECL_ARGUMENTS (fn
);
3447 /* Loop through the parameter declarations, replacing each with an
3448 equivalent VAR_DECL, appropriately initialized. */
3449 for (p
= parms
, i
= 0; p
; p
= DECL_CHAIN (p
), i
++)
3452 val
= i
< gimple_call_num_args (stmt
) ? gimple_call_arg (stmt
, i
) : NULL
;
3453 setup_one_parameter (id
, p
, val
, fn
, bb
, &vars
);
3455 /* After remapping parameters remap their types. This has to be done
3456 in a second loop over all parameters to appropriately remap
3457 variable sized arrays when the size is specified in a
3458 parameter following the array. */
3459 for (p
= parms
, i
= 0; p
; p
= DECL_CHAIN (p
), i
++)
3461 tree
*varp
= id
->decl_map
->get (p
);
3462 if (varp
&& VAR_P (*varp
))
3464 tree def
= (gimple_in_ssa_p (cfun
) && is_gimple_reg (p
)
3465 ? ssa_default_def (id
->src_cfun
, p
) : NULL
);
3467 TREE_TYPE (var
) = remap_type (TREE_TYPE (var
), id
);
3468 /* Also remap the default definition if it was remapped
3469 to the default definition of the parameter replacement
3470 by the parameter setup. */
3473 tree
*defp
= id
->decl_map
->get (def
);
3475 && TREE_CODE (*defp
) == SSA_NAME
3476 && SSA_NAME_VAR (*defp
) == var
)
3477 TREE_TYPE (*defp
) = TREE_TYPE (var
);
3482 /* Initialize the static chain. */
3483 p
= DECL_STRUCT_FUNCTION (fn
)->static_chain_decl
;
3484 gcc_assert (fn
!= current_function_decl
);
3487 /* No static chain? Seems like a bug in tree-nested.c. */
3488 gcc_assert (static_chain
);
3490 setup_one_parameter (id
, p
, static_chain
, fn
, bb
, &vars
);
3493 declare_inline_vars (id
->block
, vars
);
3497 /* Declare a return variable to replace the RESULT_DECL for the
3498 function we are calling. An appropriate DECL_STMT is returned.
3499 The USE_STMT is filled to contain a use of the declaration to
3500 indicate the return value of the function.
3502 RETURN_SLOT, if non-null is place where to store the result. It
3503 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3504 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3506 The return value is a (possibly null) value that holds the result
3507 as seen by the caller. */
3510 declare_return_variable (copy_body_data
*id
, tree return_slot
, tree modify_dest
,
3511 basic_block entry_bb
)
3513 tree callee
= id
->src_fn
;
3514 tree result
= DECL_RESULT (callee
);
3515 tree callee_type
= TREE_TYPE (result
);
3519 /* Handle type-mismatches in the function declaration return type
3520 vs. the call expression. */
3522 caller_type
= TREE_TYPE (modify_dest
);
3524 caller_type
= TREE_TYPE (TREE_TYPE (callee
));
3526 /* We don't need to do anything for functions that don't return anything. */
3527 if (VOID_TYPE_P (callee_type
))
3530 /* If there was a return slot, then the return value is the
3531 dereferenced address of that object. */
3534 /* The front end shouldn't have used both return_slot and
3535 a modify expression. */
3536 gcc_assert (!modify_dest
);
3537 if (DECL_BY_REFERENCE (result
))
3539 tree return_slot_addr
= build_fold_addr_expr (return_slot
);
3540 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr
);
3542 /* We are going to construct *&return_slot and we can't do that
3543 for variables believed to be not addressable.
3545 FIXME: This check possibly can match, because values returned
3546 via return slot optimization are not believed to have address
3547 taken by alias analysis. */
3548 gcc_assert (TREE_CODE (return_slot
) != SSA_NAME
);
3549 var
= return_slot_addr
;
3554 gcc_assert (TREE_CODE (var
) != SSA_NAME
);
3555 if (TREE_ADDRESSABLE (result
))
3556 mark_addressable (var
);
3558 if ((TREE_CODE (TREE_TYPE (result
)) == COMPLEX_TYPE
3559 || TREE_CODE (TREE_TYPE (result
)) == VECTOR_TYPE
)
3560 && !DECL_GIMPLE_REG_P (result
)
3562 DECL_GIMPLE_REG_P (var
) = 0;
3567 /* All types requiring non-trivial constructors should have been handled. */
3568 gcc_assert (!TREE_ADDRESSABLE (callee_type
));
3570 /* Attempt to avoid creating a new temporary variable. */
3572 && TREE_CODE (modify_dest
) != SSA_NAME
)
3574 bool use_it
= false;
3576 /* We can't use MODIFY_DEST if there's type promotion involved. */
3577 if (!useless_type_conversion_p (callee_type
, caller_type
))
3580 /* ??? If we're assigning to a variable sized type, then we must
3581 reuse the destination variable, because we've no good way to
3582 create variable sized temporaries at this point. */
3583 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type
)) != INTEGER_CST
)
3586 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3587 reuse it as the result of the call directly. Don't do this if
3588 it would promote MODIFY_DEST to addressable. */
3589 else if (TREE_ADDRESSABLE (result
))
3593 tree base_m
= get_base_address (modify_dest
);
3595 /* If the base isn't a decl, then it's a pointer, and we don't
3596 know where that's going to go. */
3597 if (!DECL_P (base_m
))
3599 else if (is_global_var (base_m
))
3601 else if ((TREE_CODE (TREE_TYPE (result
)) == COMPLEX_TYPE
3602 || TREE_CODE (TREE_TYPE (result
)) == VECTOR_TYPE
)
3603 && !DECL_GIMPLE_REG_P (result
)
3604 && DECL_GIMPLE_REG_P (base_m
))
3606 else if (!TREE_ADDRESSABLE (base_m
))
3618 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type
)) == INTEGER_CST
);
3620 var
= copy_result_decl_to_var (result
, id
);
3621 DECL_SEEN_IN_BIND_EXPR_P (var
) = 1;
3623 /* Do not have the rest of GCC warn about this variable as it should
3624 not be visible to the user. */
3625 TREE_NO_WARNING (var
) = 1;
3627 declare_inline_vars (id
->block
, var
);
3629 /* Build the use expr. If the return type of the function was
3630 promoted, convert it back to the expected type. */
3632 if (!useless_type_conversion_p (caller_type
, TREE_TYPE (var
)))
3634 /* If we can match up types by promotion/demotion do so. */
3635 if (fold_convertible_p (caller_type
, var
))
3636 use
= fold_convert (caller_type
, var
);
3639 /* ??? For valid programs we should not end up here.
3640 Still if we end up with truly mismatched types here, fall back
3641 to using a MEM_REF to not leak invalid GIMPLE to the following
3643 /* Prevent var from being written into SSA form. */
3644 if (TREE_CODE (TREE_TYPE (var
)) == VECTOR_TYPE
3645 || TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
)
3646 DECL_GIMPLE_REG_P (var
) = false;
3647 else if (is_gimple_reg_type (TREE_TYPE (var
)))
3648 TREE_ADDRESSABLE (var
) = true;
3649 use
= fold_build2 (MEM_REF
, caller_type
,
3650 build_fold_addr_expr (var
),
3651 build_int_cst (ptr_type_node
, 0));
3655 STRIP_USELESS_TYPE_CONVERSION (use
);
3657 if (DECL_BY_REFERENCE (result
))
3659 TREE_ADDRESSABLE (var
) = 1;
3660 var
= build_fold_addr_expr (var
);
3664 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3665 way, when the RESULT_DECL is encountered, it will be
3666 automatically replaced by the VAR_DECL.
3668 When returning by reference, ensure that RESULT_DECL remaps to
3670 if (DECL_BY_REFERENCE (result
)
3671 && !is_gimple_val (var
))
3673 tree temp
= create_tmp_var (TREE_TYPE (result
), "retvalptr");
3674 insert_decl_map (id
, result
, temp
);
3675 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3676 it's default_def SSA_NAME. */
3677 if (gimple_in_ssa_p (id
->src_cfun
)
3678 && is_gimple_reg (result
))
3680 temp
= make_ssa_name (temp
);
3681 insert_decl_map (id
, ssa_default_def (id
->src_cfun
, result
), temp
);
3683 insert_init_stmt (id
, entry_bb
, gimple_build_assign (temp
, var
));
3686 insert_decl_map (id
, result
, var
);
3688 /* Remember this so we can ignore it in remap_decls. */
3693 /* Determine if the function can be copied. If so return NULL. If
3694 not return a string describng the reason for failure. */
3697 copy_forbidden (struct function
*fun
)
3699 const char *reason
= fun
->cannot_be_copied_reason
;
3701 /* Only examine the function once. */
3702 if (fun
->cannot_be_copied_set
)
3705 /* We cannot copy a function that receives a non-local goto
3706 because we cannot remap the destination label used in the
3707 function that is performing the non-local goto. */
3708 /* ??? Actually, this should be possible, if we work at it.
3709 No doubt there's just a handful of places that simply
3710 assume it doesn't happen and don't substitute properly. */
3711 if (fun
->has_nonlocal_label
)
3713 reason
= G_("function %q+F can never be copied "
3714 "because it receives a non-local goto");
3718 if (fun
->has_forced_label_in_static
)
3720 reason
= G_("function %q+F can never be copied because it saves "
3721 "address of local label in a static variable");
3726 fun
->cannot_be_copied_reason
= reason
;
3727 fun
->cannot_be_copied_set
= true;
3732 static const char *inline_forbidden_reason
;
3734 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3735 iff a function cannot be inlined. Also sets the reason why. */
3738 inline_forbidden_p_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
3739 struct walk_stmt_info
*wip
)
3741 tree fn
= (tree
) wip
->info
;
3743 gimple
*stmt
= gsi_stmt (*gsi
);
3745 switch (gimple_code (stmt
))
3748 /* Refuse to inline alloca call unless user explicitly forced so as
3749 this may change program's memory overhead drastically when the
3750 function using alloca is called in loop. In GCC present in
3751 SPEC2000 inlining into schedule_block cause it to require 2GB of
3752 RAM instead of 256MB. Don't do so for alloca calls emitted for
3753 VLA objects as those can't cause unbounded growth (they're always
3754 wrapped inside stack_save/stack_restore regions. */
3755 if (gimple_maybe_alloca_call_p (stmt
)
3756 && !gimple_call_alloca_for_var_p (as_a
<gcall
*> (stmt
))
3757 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
)))
3759 inline_forbidden_reason
3760 = G_("function %q+F can never be inlined because it uses "
3761 "alloca (override using the always_inline attribute)");
3762 *handled_ops_p
= true;
3766 t
= gimple_call_fndecl (stmt
);
3770 /* We cannot inline functions that call setjmp. */
3771 if (setjmp_call_p (t
))
3773 inline_forbidden_reason
3774 = G_("function %q+F can never be inlined because it uses setjmp");
3775 *handled_ops_p
= true;
3779 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
)
3780 switch (DECL_FUNCTION_CODE (t
))
3782 /* We cannot inline functions that take a variable number of
3784 case BUILT_IN_VA_START
:
3785 case BUILT_IN_NEXT_ARG
:
3786 case BUILT_IN_VA_END
:
3787 inline_forbidden_reason
3788 = G_("function %q+F can never be inlined because it "
3789 "uses variable argument lists");
3790 *handled_ops_p
= true;
3793 case BUILT_IN_LONGJMP
:
3794 /* We can't inline functions that call __builtin_longjmp at
3795 all. The non-local goto machinery really requires the
3796 destination be in a different function. If we allow the
3797 function calling __builtin_longjmp to be inlined into the
3798 function calling __builtin_setjmp, Things will Go Awry. */
3799 inline_forbidden_reason
3800 = G_("function %q+F can never be inlined because "
3801 "it uses setjmp-longjmp exception handling");
3802 *handled_ops_p
= true;
3805 case BUILT_IN_NONLOCAL_GOTO
:
3807 inline_forbidden_reason
3808 = G_("function %q+F can never be inlined because "
3809 "it uses non-local goto");
3810 *handled_ops_p
= true;
3813 case BUILT_IN_RETURN
:
3814 case BUILT_IN_APPLY_ARGS
:
3815 /* If a __builtin_apply_args caller would be inlined,
3816 it would be saving arguments of the function it has
3817 been inlined into. Similarly __builtin_return would
3818 return from the function the inline has been inlined into. */
3819 inline_forbidden_reason
3820 = G_("function %q+F can never be inlined because "
3821 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3822 *handled_ops_p
= true;
3831 t
= gimple_goto_dest (stmt
);
3833 /* We will not inline a function which uses computed goto. The
3834 addresses of its local labels, which may be tucked into
3835 global storage, are of course not constant across
3836 instantiations, which causes unexpected behavior. */
3837 if (TREE_CODE (t
) != LABEL_DECL
)
3839 inline_forbidden_reason
3840 = G_("function %q+F can never be inlined "
3841 "because it contains a computed goto");
3842 *handled_ops_p
= true;
3851 *handled_ops_p
= false;
3855 /* Return true if FNDECL is a function that cannot be inlined into
3859 inline_forbidden_p (tree fndecl
)
3861 struct function
*fun
= DECL_STRUCT_FUNCTION (fndecl
);
3862 struct walk_stmt_info wi
;
3864 bool forbidden_p
= false;
3866 /* First check for shared reasons not to copy the code. */
3867 inline_forbidden_reason
= copy_forbidden (fun
);
3868 if (inline_forbidden_reason
!= NULL
)
3871 /* Next, walk the statements of the function looking for
3872 constraucts we can't handle, or are non-optimal for inlining. */
3873 hash_set
<tree
> visited_nodes
;
3874 memset (&wi
, 0, sizeof (wi
));
3875 wi
.info
= (void *) fndecl
;
3876 wi
.pset
= &visited_nodes
;
3878 FOR_EACH_BB_FN (bb
, fun
)
3881 gimple_seq seq
= bb_seq (bb
);
3882 ret
= walk_gimple_seq (seq
, inline_forbidden_p_stmt
, NULL
, &wi
);
3883 forbidden_p
= (ret
!= NULL
);
3891 /* Return false if the function FNDECL cannot be inlined on account of its
3892 attributes, true otherwise. */
3894 function_attribute_inlinable_p (const_tree fndecl
)
3896 if (targetm
.attribute_table
)
3900 for (a
= DECL_ATTRIBUTES (fndecl
); a
; a
= TREE_CHAIN (a
))
3902 const_tree name
= TREE_PURPOSE (a
);
3905 for (i
= 0; targetm
.attribute_table
[i
].name
!= NULL
; i
++)
3906 if (is_attribute_p (targetm
.attribute_table
[i
].name
, name
))
3907 return targetm
.function_attribute_inlinable_p (fndecl
);
3914 /* Returns nonzero if FN is a function that does not have any
3915 fundamental inline blocking properties. */
3918 tree_inlinable_function_p (tree fn
)
3920 bool inlinable
= true;
3924 /* If we've already decided this function shouldn't be inlined,
3925 there's no need to check again. */
3926 if (DECL_UNINLINABLE (fn
))
3929 /* We only warn for functions declared `inline' by the user. */
3930 do_warning
= (warn_inline
3931 && DECL_DECLARED_INLINE_P (fn
)
3932 && !DECL_NO_INLINE_WARNING_P (fn
)
3933 && !DECL_IN_SYSTEM_HEADER (fn
));
3935 always_inline
= lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
));
3938 && always_inline
== NULL
)
3941 warning (OPT_Winline
, "function %q+F can never be inlined because it "
3942 "is suppressed using %<-fno-inline%>", fn
);
3946 else if (!function_attribute_inlinable_p (fn
))
3949 warning (OPT_Winline
, "function %q+F can never be inlined because it "
3950 "uses attributes conflicting with inlining", fn
);
3954 else if (inline_forbidden_p (fn
))
3956 /* See if we should warn about uninlinable functions. Previously,
3957 some of these warnings would be issued while trying to expand
3958 the function inline, but that would cause multiple warnings
3959 about functions that would for example call alloca. But since
3960 this a property of the function, just one warning is enough.
3961 As a bonus we can now give more details about the reason why a
3962 function is not inlinable. */
3964 error (inline_forbidden_reason
, fn
);
3965 else if (do_warning
)
3966 warning (OPT_Winline
, inline_forbidden_reason
, fn
);
3971 /* Squirrel away the result so that we don't have to check again. */
3972 DECL_UNINLINABLE (fn
) = !inlinable
;
3977 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3978 word size and take possible memcpy call into account and return
3979 cost based on whether optimizing for size or speed according to SPEED_P. */
3982 estimate_move_cost (tree type
, bool ARG_UNUSED (speed_p
))
3986 gcc_assert (!VOID_TYPE_P (type
));
3988 if (TREE_CODE (type
) == VECTOR_TYPE
)
3990 scalar_mode inner
= SCALAR_TYPE_MODE (TREE_TYPE (type
));
3991 machine_mode simd
= targetm
.vectorize
.preferred_simd_mode (inner
);
3993 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type
)));
3994 int simd_mode_size
= estimated_poly_value (GET_MODE_SIZE (simd
));
3995 return ((orig_mode_size
+ simd_mode_size
- 1)
3999 size
= int_size_in_bytes (type
);
4001 if (size
< 0 || size
> MOVE_MAX_PIECES
* MOVE_RATIO (speed_p
))
4002 /* Cost of a memcpy call, 3 arguments and the call. */
4005 return ((size
+ MOVE_MAX_PIECES
- 1) / MOVE_MAX_PIECES
);
4008 /* Returns cost of operation CODE, according to WEIGHTS */
4011 estimate_operator_cost (enum tree_code code
, eni_weights
*weights
,
4012 tree op1 ATTRIBUTE_UNUSED
, tree op2
)
4016 /* These are "free" conversions, or their presumed cost
4017 is folded into other operations. */
4022 case VIEW_CONVERT_EXPR
:
4025 /* Assign cost of 1 to usual operations.
4026 ??? We may consider mapping RTL costs to this. */
4032 case POINTER_PLUS_EXPR
:
4033 case POINTER_DIFF_EXPR
:
4036 case MULT_HIGHPART_EXPR
:
4038 case ADDR_SPACE_CONVERT_EXPR
:
4039 case FIXED_CONVERT_EXPR
:
4040 case FIX_TRUNC_EXPR
:
4059 case TRUTH_ANDIF_EXPR
:
4060 case TRUTH_ORIF_EXPR
:
4061 case TRUTH_AND_EXPR
:
4063 case TRUTH_XOR_EXPR
:
4064 case TRUTH_NOT_EXPR
:
4073 case UNORDERED_EXPR
:
4084 case PREDECREMENT_EXPR
:
4085 case PREINCREMENT_EXPR
:
4086 case POSTDECREMENT_EXPR
:
4087 case POSTINCREMENT_EXPR
:
4089 case REALIGN_LOAD_EXPR
:
4091 case WIDEN_SUM_EXPR
:
4092 case WIDEN_MULT_EXPR
:
4095 case WIDEN_MULT_PLUS_EXPR
:
4096 case WIDEN_MULT_MINUS_EXPR
:
4097 case WIDEN_LSHIFT_EXPR
:
4099 case VEC_WIDEN_MULT_HI_EXPR
:
4100 case VEC_WIDEN_MULT_LO_EXPR
:
4101 case VEC_WIDEN_MULT_EVEN_EXPR
:
4102 case VEC_WIDEN_MULT_ODD_EXPR
:
4103 case VEC_UNPACK_HI_EXPR
:
4104 case VEC_UNPACK_LO_EXPR
:
4105 case VEC_UNPACK_FLOAT_HI_EXPR
:
4106 case VEC_UNPACK_FLOAT_LO_EXPR
:
4107 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
4108 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
4109 case VEC_PACK_TRUNC_EXPR
:
4110 case VEC_PACK_SAT_EXPR
:
4111 case VEC_PACK_FIX_TRUNC_EXPR
:
4112 case VEC_PACK_FLOAT_EXPR
:
4113 case VEC_WIDEN_LSHIFT_HI_EXPR
:
4114 case VEC_WIDEN_LSHIFT_LO_EXPR
:
4115 case VEC_DUPLICATE_EXPR
:
4116 case VEC_SERIES_EXPR
:
4120 /* Few special cases of expensive operations. This is useful
4121 to avoid inlining on functions having too many of these. */
4122 case TRUNC_DIV_EXPR
:
4124 case FLOOR_DIV_EXPR
:
4125 case ROUND_DIV_EXPR
:
4126 case EXACT_DIV_EXPR
:
4127 case TRUNC_MOD_EXPR
:
4129 case FLOOR_MOD_EXPR
:
4130 case ROUND_MOD_EXPR
:
4132 if (TREE_CODE (op2
) != INTEGER_CST
)
4133 return weights
->div_mod_cost
;
4136 /* Bit-field insertion needs several shift and mask operations. */
4137 case BIT_INSERT_EXPR
:
4141 /* We expect a copy assignment with no operator. */
4142 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_SINGLE_RHS
);
4148 /* Estimate number of instructions that will be created by expanding
4149 the statements in the statement sequence STMTS.
4150 WEIGHTS contains weights attributed to various constructs. */
4153 estimate_num_insns_seq (gimple_seq stmts
, eni_weights
*weights
)
4156 gimple_stmt_iterator gsi
;
4159 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4160 cost
+= estimate_num_insns (gsi_stmt (gsi
), weights
);
4166 /* Estimate number of instructions that will be created by expanding STMT.
4167 WEIGHTS contains weights attributed to various constructs. */
4170 estimate_num_insns (gimple
*stmt
, eni_weights
*weights
)
4173 enum gimple_code code
= gimple_code (stmt
);
4180 /* Try to estimate the cost of assignments. We have three cases to
4182 1) Simple assignments to registers;
4183 2) Stores to things that must live in memory. This includes
4184 "normal" stores to scalars, but also assignments of large
4185 structures, or constructors of big arrays;
4187 Let us look at the first two cases, assuming we have "a = b + C":
4188 <GIMPLE_ASSIGN <var_decl "a">
4189 <plus_expr <var_decl "b"> <constant C>>
4190 If "a" is a GIMPLE register, the assignment to it is free on almost
4191 any target, because "a" usually ends up in a real register. Hence
4192 the only cost of this expression comes from the PLUS_EXPR, and we
4193 can ignore the GIMPLE_ASSIGN.
4194 If "a" is not a GIMPLE register, the assignment to "a" will most
4195 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4196 of moving something into "a", which we compute using the function
4197 estimate_move_cost. */
4198 if (gimple_clobber_p (stmt
))
4199 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4201 lhs
= gimple_assign_lhs (stmt
);
4202 rhs
= gimple_assign_rhs1 (stmt
);
4206 /* Account for the cost of moving to / from memory. */
4207 if (gimple_store_p (stmt
))
4208 cost
+= estimate_move_cost (TREE_TYPE (lhs
), weights
->time_based
);
4209 if (gimple_assign_load_p (stmt
))
4210 cost
+= estimate_move_cost (TREE_TYPE (rhs
), weights
->time_based
);
4212 cost
+= estimate_operator_cost (gimple_assign_rhs_code (stmt
), weights
,
4213 gimple_assign_rhs1 (stmt
),
4214 get_gimple_rhs_class (gimple_assign_rhs_code (stmt
))
4215 == GIMPLE_BINARY_RHS
4216 ? gimple_assign_rhs2 (stmt
) : NULL
);
4220 cost
= 1 + estimate_operator_cost (gimple_cond_code (stmt
), weights
,
4221 gimple_op (stmt
, 0),
4222 gimple_op (stmt
, 1));
4227 gswitch
*switch_stmt
= as_a
<gswitch
*> (stmt
);
4228 /* Take into account cost of the switch + guess 2 conditional jumps for
4231 TODO: once the switch expansion logic is sufficiently separated, we can
4232 do better job on estimating cost of the switch. */
4233 if (weights
->time_based
)
4234 cost
= floor_log2 (gimple_switch_num_labels (switch_stmt
)) * 2;
4236 cost
= gimple_switch_num_labels (switch_stmt
) * 2;
4244 if (gimple_call_internal_p (stmt
))
4246 else if ((decl
= gimple_call_fndecl (stmt
))
4247 && fndecl_built_in_p (decl
))
4249 /* Do not special case builtins where we see the body.
4250 This just confuse inliner. */
4251 struct cgraph_node
*node
;
4252 if (!(node
= cgraph_node::get (decl
))
4253 || node
->definition
)
4255 /* For buitins that are likely expanded to nothing or
4256 inlined do not account operand costs. */
4257 else if (is_simple_builtin (decl
))
4259 else if (is_inexpensive_builtin (decl
))
4260 return weights
->target_builtin_call_cost
;
4261 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
4263 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4264 specialize the cheap expansion we do here.
4265 ??? This asks for a more general solution. */
4266 switch (DECL_FUNCTION_CODE (decl
))
4271 if (TREE_CODE (gimple_call_arg (stmt
, 1)) == REAL_CST
4273 (&TREE_REAL_CST (gimple_call_arg (stmt
, 1)),
4275 return estimate_operator_cost
4276 (MULT_EXPR
, weights
, gimple_call_arg (stmt
, 0),
4277 gimple_call_arg (stmt
, 0));
4286 cost
= decl
? weights
->call_cost
: weights
->indirect_call_cost
;
4287 if (gimple_call_lhs (stmt
))
4288 cost
+= estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt
)),
4289 weights
->time_based
);
4290 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4292 tree arg
= gimple_call_arg (stmt
, i
);
4293 cost
+= estimate_move_cost (TREE_TYPE (arg
),
4294 weights
->time_based
);
4300 return weights
->return_cost
;
4306 case GIMPLE_PREDICT
:
4312 int count
= asm_str_count (gimple_asm_string (as_a
<gasm
*> (stmt
)));
4313 /* 1000 means infinity. This avoids overflows later
4314 with very long asm statements. */
4317 /* If this asm is asm inline, count anything as minimum size. */
4318 if (gimple_asm_inline_p (as_a
<gasm
*> (stmt
)))
4319 count
= MIN (1, count
);
4320 return MAX (1, count
);
4324 /* This is either going to be an external function call with one
4325 argument, or two register copy statements plus a goto. */
4328 case GIMPLE_EH_DISPATCH
:
4329 /* ??? This is going to turn into a switch statement. Ideally
4330 we'd have a look at the eh region and estimate the number of
4335 return estimate_num_insns_seq (
4336 gimple_bind_body (as_a
<gbind
*> (stmt
)),
4339 case GIMPLE_EH_FILTER
:
4340 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt
), weights
);
4343 return estimate_num_insns_seq (gimple_catch_handler (
4344 as_a
<gcatch
*> (stmt
)),
4348 return (estimate_num_insns_seq (gimple_try_eval (stmt
), weights
)
4349 + estimate_num_insns_seq (gimple_try_cleanup (stmt
), weights
));
4351 /* OMP directives are generally very expensive. */
4353 case GIMPLE_OMP_RETURN
:
4354 case GIMPLE_OMP_SECTIONS_SWITCH
:
4355 case GIMPLE_OMP_ATOMIC_STORE
:
4356 case GIMPLE_OMP_CONTINUE
:
4357 /* ...except these, which are cheap. */
4360 case GIMPLE_OMP_ATOMIC_LOAD
:
4361 return weights
->omp_cost
;
4363 case GIMPLE_OMP_FOR
:
4364 return (weights
->omp_cost
4365 + estimate_num_insns_seq (gimple_omp_body (stmt
), weights
)
4366 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt
), weights
));
4368 case GIMPLE_OMP_PARALLEL
:
4369 case GIMPLE_OMP_TASK
:
4370 case GIMPLE_OMP_CRITICAL
:
4371 case GIMPLE_OMP_MASTER
:
4372 case GIMPLE_OMP_TASKGROUP
:
4373 case GIMPLE_OMP_ORDERED
:
4374 case GIMPLE_OMP_SCAN
:
4375 case GIMPLE_OMP_SECTION
:
4376 case GIMPLE_OMP_SECTIONS
:
4377 case GIMPLE_OMP_SINGLE
:
4378 case GIMPLE_OMP_TARGET
:
4379 case GIMPLE_OMP_TEAMS
:
4380 return (weights
->omp_cost
4381 + estimate_num_insns_seq (gimple_omp_body (stmt
), weights
));
4383 case GIMPLE_TRANSACTION
:
4384 return (weights
->tm_cost
4385 + estimate_num_insns_seq (gimple_transaction_body (
4386 as_a
<gtransaction
*> (stmt
)),
4396 /* Estimate number of instructions that will be created by expanding
4397 function FNDECL. WEIGHTS contains weights attributed to various
4401 estimate_num_insns_fn (tree fndecl
, eni_weights
*weights
)
4403 struct function
*my_function
= DECL_STRUCT_FUNCTION (fndecl
);
4404 gimple_stmt_iterator bsi
;
4408 gcc_assert (my_function
&& my_function
->cfg
);
4409 FOR_EACH_BB_FN (bb
, my_function
)
4411 for (bsi
= gsi_start_bb (bb
); !gsi_end_p (bsi
); gsi_next (&bsi
))
4412 n
+= estimate_num_insns (gsi_stmt (bsi
), weights
);
4419 /* Initializes weights used by estimate_num_insns. */
4422 init_inline_once (void)
4424 eni_size_weights
.call_cost
= 1;
4425 eni_size_weights
.indirect_call_cost
= 3;
4426 eni_size_weights
.target_builtin_call_cost
= 1;
4427 eni_size_weights
.div_mod_cost
= 1;
4428 eni_size_weights
.omp_cost
= 40;
4429 eni_size_weights
.tm_cost
= 10;
4430 eni_size_weights
.time_based
= false;
4431 eni_size_weights
.return_cost
= 1;
4433 /* Estimating time for call is difficult, since we have no idea what the
4434 called function does. In the current uses of eni_time_weights,
4435 underestimating the cost does less harm than overestimating it, so
4436 we choose a rather small value here. */
4437 eni_time_weights
.call_cost
= 10;
4438 eni_time_weights
.indirect_call_cost
= 15;
4439 eni_time_weights
.target_builtin_call_cost
= 1;
4440 eni_time_weights
.div_mod_cost
= 10;
4441 eni_time_weights
.omp_cost
= 40;
4442 eni_time_weights
.tm_cost
= 40;
4443 eni_time_weights
.time_based
= true;
4444 eni_time_weights
.return_cost
= 2;
4448 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4451 prepend_lexical_block (tree current_block
, tree new_block
)
4453 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (current_block
);
4454 BLOCK_SUBBLOCKS (current_block
) = new_block
;
4455 BLOCK_SUPERCONTEXT (new_block
) = current_block
;
4458 /* Add local variables from CALLEE to CALLER. */
4461 add_local_variables (struct function
*callee
, struct function
*caller
,
4467 FOR_EACH_LOCAL_DECL (callee
, ix
, var
)
4468 if (!can_be_nonlocal (var
, id
))
4470 tree new_var
= remap_decl (var
, id
);
4472 /* Remap debug-expressions. */
4474 && DECL_HAS_DEBUG_EXPR_P (var
)
4477 tree tem
= DECL_DEBUG_EXPR (var
);
4478 bool old_regimplify
= id
->regimplify
;
4479 id
->remapping_type_depth
++;
4480 walk_tree (&tem
, copy_tree_body_r
, id
, NULL
);
4481 id
->remapping_type_depth
--;
4482 id
->regimplify
= old_regimplify
;
4483 SET_DECL_DEBUG_EXPR (new_var
, tem
);
4484 DECL_HAS_DEBUG_EXPR_P (new_var
) = 1;
4486 add_local_decl (caller
, new_var
);
4490 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4491 have brought in or introduced any debug stmts for SRCVAR. */
4494 reset_debug_binding (copy_body_data
*id
, tree srcvar
, gimple_seq
*bindings
)
4496 tree
*remappedvarp
= id
->decl_map
->get (srcvar
);
4501 if (!VAR_P (*remappedvarp
))
4504 if (*remappedvarp
== id
->retvar
)
4507 tree tvar
= target_for_debug_bind (*remappedvarp
);
4511 gdebug
*stmt
= gimple_build_debug_bind (tvar
, NULL_TREE
,
4513 gimple_seq_add_stmt (bindings
, stmt
);
4516 /* For each inlined variable for which we may have debug bind stmts,
4517 add before GSI a final debug stmt resetting it, marking the end of
4518 its life, so that var-tracking knows it doesn't have to compute
4519 further locations for it. */
4522 reset_debug_bindings (copy_body_data
*id
, gimple_stmt_iterator gsi
)
4526 gimple_seq bindings
= NULL
;
4528 if (!gimple_in_ssa_p (id
->src_cfun
))
4531 if (!opt_for_fn (id
->dst_fn
, flag_var_tracking_assignments
))
4534 for (var
= DECL_ARGUMENTS (id
->src_fn
);
4535 var
; var
= DECL_CHAIN (var
))
4536 reset_debug_binding (id
, var
, &bindings
);
4538 FOR_EACH_LOCAL_DECL (id
->src_cfun
, ix
, var
)
4539 reset_debug_binding (id
, var
, &bindings
);
4541 gsi_insert_seq_before_without_update (&gsi
, bindings
, GSI_SAME_STMT
);
4544 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4547 expand_call_inline (basic_block bb
, gimple
*stmt
, copy_body_data
*id
)
4551 hash_map
<tree
, tree
> *dst
;
4552 hash_map
<tree
, tree
> *st
= NULL
;
4555 struct cgraph_edge
*cg_edge
;
4556 cgraph_inline_failed_t reason
;
4557 basic_block return_block
;
4559 gimple_stmt_iterator gsi
, stmt_gsi
;
4560 bool successfully_inlined
= false;
4561 bool purge_dead_abnormal_edges
;
4563 unsigned int prop_mask
, src_properties
;
4564 struct function
*dst_cfun
;
4567 gimple
*simtenter_stmt
= NULL
;
4568 vec
<tree
> *simtvars_save
;
4570 /* The gimplifier uses input_location in too many places, such as
4571 internal_get_tmp_var (). */
4572 location_t saved_location
= input_location
;
4573 input_location
= gimple_location (stmt
);
4575 /* From here on, we're only interested in CALL_EXPRs. */
4576 call_stmt
= dyn_cast
<gcall
*> (stmt
);
4580 cg_edge
= id
->dst_node
->get_edge (stmt
);
4581 gcc_checking_assert (cg_edge
);
4582 /* First, see if we can figure out what function is being called.
4583 If we cannot, then there is no hope of inlining the function. */
4584 if (cg_edge
->indirect_unknown_callee
)
4586 fn
= cg_edge
->callee
->decl
;
4587 gcc_checking_assert (fn
);
4589 /* If FN is a declaration of a function in a nested scope that was
4590 globally declared inline, we don't set its DECL_INITIAL.
4591 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4592 C++ front-end uses it for cdtors to refer to their internal
4593 declarations, that are not real functions. Fortunately those
4594 don't have trees to be saved, so we can tell by checking their
4596 if (!DECL_INITIAL (fn
)
4597 && DECL_ABSTRACT_ORIGIN (fn
)
4598 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn
)))
4599 fn
= DECL_ABSTRACT_ORIGIN (fn
);
4601 /* Don't try to inline functions that are not well-suited to inlining. */
4602 if (cg_edge
->inline_failed
)
4604 reason
= cg_edge
->inline_failed
;
4605 /* If this call was originally indirect, we do not want to emit any
4606 inlining related warnings or sorry messages because there are no
4607 guarantees regarding those. */
4608 if (cg_edge
->indirect_inlining_edge
)
4611 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn
))
4612 /* For extern inline functions that get redefined we always
4613 silently ignored always_inline flag. Better behavior would
4614 be to be able to keep both bodies and use extern inline body
4615 for inlining, but we can't do that because frontends overwrite
4617 && !cg_edge
->callee
->local
.redefined_extern_inline
4618 /* During early inline pass, report only when optimization is
4620 && (symtab
->global_info_ready
4622 || cgraph_inline_failed_type (reason
) == CIF_FINAL_ERROR
)
4623 /* PR 20090218-1_0.c. Body can be provided by another module. */
4624 && (reason
!= CIF_BODY_NOT_AVAILABLE
|| !flag_generate_lto
))
4626 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn
,
4627 cgraph_inline_failed_string (reason
));
4628 if (gimple_location (stmt
) != UNKNOWN_LOCATION
)
4629 inform (gimple_location (stmt
), "called from here");
4630 else if (DECL_SOURCE_LOCATION (cfun
->decl
) != UNKNOWN_LOCATION
)
4631 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
4632 "called from this function");
4634 else if (warn_inline
4635 && DECL_DECLARED_INLINE_P (fn
)
4636 && !DECL_NO_INLINE_WARNING_P (fn
)
4637 && !DECL_IN_SYSTEM_HEADER (fn
)
4638 && reason
!= CIF_UNSPECIFIED
4639 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn
))
4640 /* Do not warn about not inlined recursive calls. */
4641 && !cg_edge
->recursive_p ()
4642 /* Avoid warnings during early inline pass. */
4643 && symtab
->global_info_ready
)
4645 auto_diagnostic_group d
;
4646 if (warning (OPT_Winline
, "inlining failed in call to %q+F: %s",
4647 fn
, _(cgraph_inline_failed_string (reason
))))
4649 if (gimple_location (stmt
) != UNKNOWN_LOCATION
)
4650 inform (gimple_location (stmt
), "called from here");
4651 else if (DECL_SOURCE_LOCATION (cfun
->decl
) != UNKNOWN_LOCATION
)
4652 inform (DECL_SOURCE_LOCATION (cfun
->decl
),
4653 "called from this function");
4658 id
->src_node
= cg_edge
->callee
;
4660 /* If callee is thunk, all we need is to adjust the THIS pointer
4661 and redirect to function being thunked. */
4662 if (id
->src_node
->thunk
.thunk_p
)
4665 tree virtual_offset
= NULL
;
4666 profile_count count
= cg_edge
->count
;
4668 gimple_stmt_iterator iter
= gsi_for_stmt (stmt
);
4671 edge
= id
->src_node
->callees
->clone (id
->dst_node
, call_stmt
,
4673 profile_count::one (),
4674 profile_count::one (),
4676 edge
->count
= count
;
4677 if (id
->src_node
->thunk
.virtual_offset_p
)
4678 virtual_offset
= size_int (id
->src_node
->thunk
.virtual_value
);
4679 op
= create_tmp_reg_fn (cfun
, TREE_TYPE (gimple_call_arg (stmt
, 0)),
4681 gsi_insert_before (&iter
, gimple_build_assign (op
,
4682 gimple_call_arg (stmt
, 0)),
4684 gcc_assert (id
->src_node
->thunk
.this_adjusting
);
4685 op
= thunk_adjust (&iter
, op
, 1, id
->src_node
->thunk
.fixed_offset
,
4686 virtual_offset
, id
->src_node
->thunk
.indirect_offset
);
4688 gimple_call_set_arg (stmt
, 0, op
);
4689 gimple_call_set_fndecl (stmt
, edge
->callee
->decl
);
4691 id
->src_node
->remove ();
4692 expand_call_inline (bb
, stmt
, id
);
4693 maybe_remove_unused_call_args (cfun
, stmt
);
4696 fn
= cg_edge
->callee
->decl
;
4697 cg_edge
->callee
->get_untransformed_body ();
4699 if (flag_checking
&& cg_edge
->callee
->decl
!= id
->dst_node
->decl
)
4700 cg_edge
->callee
->verify ();
4702 /* We will be inlining this callee. */
4703 id
->eh_lp_nr
= lookup_stmt_eh_lp (stmt
);
4705 /* Update the callers EH personality. */
4706 if (DECL_FUNCTION_PERSONALITY (fn
))
4707 DECL_FUNCTION_PERSONALITY (cg_edge
->caller
->decl
)
4708 = DECL_FUNCTION_PERSONALITY (fn
);
4710 /* Split the block before the GIMPLE_CALL. */
4711 stmt_gsi
= gsi_for_stmt (stmt
);
4712 gsi_prev (&stmt_gsi
);
4713 e
= split_block (bb
, gsi_end_p (stmt_gsi
) ? NULL
: gsi_stmt (stmt_gsi
));
4715 return_block
= e
->dest
;
4718 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4719 been the source of abnormal edges. In this case, schedule
4720 the removal of dead abnormal edges. */
4721 gsi
= gsi_start_bb (return_block
);
4723 purge_dead_abnormal_edges
= gsi_end_p (gsi
);
4725 stmt_gsi
= gsi_start_bb (return_block
);
4727 /* Build a block containing code to initialize the arguments, the
4728 actual inline expansion of the body, and a label for the return
4729 statements within the function to jump to. The type of the
4730 statement expression is the return type of the function call.
4731 ??? If the call does not have an associated block then we will
4732 remap all callee blocks to NULL, effectively dropping most of
4733 its debug information. This should only happen for calls to
4734 artificial decls inserted by the compiler itself. We need to
4735 either link the inlined blocks into the caller block tree or
4736 not refer to them in any way to not break GC for locations. */
4737 if (tree block
= gimple_block (stmt
))
4739 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4740 to make inlined_function_outer_scope_p return true on this BLOCK. */
4741 location_t loc
= LOCATION_LOCUS (gimple_location (stmt
));
4742 if (loc
== UNKNOWN_LOCATION
)
4743 loc
= LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn
));
4744 if (loc
== UNKNOWN_LOCATION
)
4745 loc
= BUILTINS_LOCATION
;
4746 id
->block
= make_node (BLOCK
);
4747 BLOCK_ABSTRACT_ORIGIN (id
->block
) = DECL_ORIGIN (fn
);
4748 BLOCK_SOURCE_LOCATION (id
->block
) = loc
;
4749 prepend_lexical_block (block
, id
->block
);
4752 /* Local declarations will be replaced by their equivalents in this map. */
4754 id
->decl_map
= new hash_map
<tree
, tree
>;
4755 dst
= id
->debug_map
;
4756 id
->debug_map
= NULL
;
4757 if (flag_stack_reuse
!= SR_NONE
)
4758 id
->add_clobbers_to_eh_landing_pads
= last_basic_block_for_fn (cfun
);
4760 /* Record the function we are about to inline. */
4762 id
->src_cfun
= DECL_STRUCT_FUNCTION (fn
);
4763 id
->reset_location
= DECL_IGNORED_P (fn
);
4764 id
->call_stmt
= call_stmt
;
4766 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4767 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4768 dst_cfun
= DECL_STRUCT_FUNCTION (id
->dst_fn
);
4769 simtvars_save
= id
->dst_simt_vars
;
4770 if (!(dst_cfun
->curr_properties
& PROP_gimple_lomp_dev
)
4771 && (simduid
= bb
->loop_father
->simduid
) != NULL_TREE
4772 && (simduid
= ssa_default_def (dst_cfun
, simduid
)) != NULL_TREE
4773 && single_imm_use (simduid
, &use
, &simtenter_stmt
)
4774 && is_gimple_call (simtenter_stmt
)
4775 && gimple_call_internal_p (simtenter_stmt
, IFN_GOMP_SIMT_ENTER
))
4776 vec_alloc (id
->dst_simt_vars
, 0);
4778 id
->dst_simt_vars
= NULL
;
4780 if (profile_status_for_fn (id
->src_cfun
) == PROFILE_ABSENT
)
4781 profile_status_for_fn (dst_cfun
) = PROFILE_ABSENT
;
4783 /* If the src function contains an IFN_VA_ARG, then so will the dst
4784 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4785 prop_mask
= PROP_gimple_lva
| PROP_gimple_lomp_dev
;
4786 src_properties
= id
->src_cfun
->curr_properties
& prop_mask
;
4787 if (src_properties
!= prop_mask
)
4788 dst_cfun
->curr_properties
&= src_properties
| ~prop_mask
;
4789 dst_cfun
->calls_eh_return
|= id
->src_cfun
->calls_eh_return
;
4791 gcc_assert (!id
->src_cfun
->after_inlining
);
4794 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn
)))
4796 gimple_stmt_iterator si
= gsi_last_bb (bb
);
4797 gsi_insert_after (&si
, gimple_build_predict (PRED_COLD_FUNCTION
,
4801 initialize_inlined_parameters (id
, stmt
, fn
, bb
);
4802 if (debug_nonbind_markers_p
&& debug_inline_points
&& id
->block
4803 && inlined_function_outer_scope_p (id
->block
))
4805 gimple_stmt_iterator si
= gsi_last_bb (bb
);
4806 gsi_insert_after (&si
, gimple_build_debug_inline_entry
4807 (id
->block
, DECL_SOURCE_LOCATION (id
->src_fn
)),
4811 if (DECL_INITIAL (fn
))
4813 if (gimple_block (stmt
))
4817 prepend_lexical_block (id
->block
,
4818 remap_blocks (DECL_INITIAL (fn
), id
));
4819 gcc_checking_assert (BLOCK_SUBBLOCKS (id
->block
)
4820 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id
->block
))
4822 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4823 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4824 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4825 under it. The parameters can be then evaluated in the debugger,
4826 but don't show in backtraces. */
4827 for (var
= &BLOCK_VARS (BLOCK_SUBBLOCKS (id
->block
)); *var
; )
4828 if (TREE_CODE (DECL_ORIGIN (*var
)) == PARM_DECL
)
4831 *var
= TREE_CHAIN (v
);
4832 TREE_CHAIN (v
) = BLOCK_VARS (id
->block
);
4833 BLOCK_VARS (id
->block
) = v
;
4836 var
= &TREE_CHAIN (*var
);
4839 remap_blocks_to_null (DECL_INITIAL (fn
), id
);
4842 /* Return statements in the function body will be replaced by jumps
4843 to the RET_LABEL. */
4844 gcc_assert (DECL_INITIAL (fn
));
4845 gcc_assert (TREE_CODE (DECL_INITIAL (fn
)) == BLOCK
);
4847 /* Find the LHS to which the result of this call is assigned. */
4849 if (gimple_call_lhs (stmt
))
4851 modify_dest
= gimple_call_lhs (stmt
);
4853 /* The function which we are inlining might not return a value,
4854 in which case we should issue a warning that the function
4855 does not return a value. In that case the optimizers will
4856 see that the variable to which the value is assigned was not
4857 initialized. We do not want to issue a warning about that
4858 uninitialized variable. */
4859 if (DECL_P (modify_dest
))
4860 TREE_NO_WARNING (modify_dest
) = 1;
4862 if (gimple_call_return_slot_opt_p (call_stmt
))
4864 return_slot
= modify_dest
;
4871 /* If we are inlining a call to the C++ operator new, we don't want
4872 to use type based alias analysis on the return value. Otherwise
4873 we may get confused if the compiler sees that the inlined new
4874 function returns a pointer which was just deleted. See bug
4876 if (DECL_IS_OPERATOR_NEW (fn
))
4882 /* Declare the return variable for the function. */
4883 use_retvar
= declare_return_variable (id
, return_slot
, modify_dest
, bb
);
4885 /* Add local vars in this inlined callee to caller. */
4886 add_local_variables (id
->src_cfun
, cfun
, id
);
4888 if (dump_enabled_p ())
4891 snprintf (buf
, sizeof(buf
), "%4.2f",
4892 cg_edge
->sreal_frequency ().to_double ());
4893 dump_printf_loc (MSG_NOTE
| MSG_PRIORITY_INTERNALS
,
4895 "Inlining %C to %C with frequency %s\n",
4896 id
->src_node
, id
->dst_node
, buf
);
4897 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4899 id
->src_node
->dump (dump_file
);
4900 id
->dst_node
->dump (dump_file
);
4904 /* This is it. Duplicate the callee body. Assume callee is
4905 pre-gimplified. Note that we must not alter the caller
4906 function in any way before this point, as this CALL_EXPR may be
4907 a self-referential call; if we're calling ourselves, we need to
4908 duplicate our body before altering anything. */
4909 copy_body (id
, bb
, return_block
, NULL
);
4911 reset_debug_bindings (id
, stmt_gsi
);
4913 if (flag_stack_reuse
!= SR_NONE
)
4914 for (tree p
= DECL_ARGUMENTS (id
->src_fn
); p
; p
= DECL_CHAIN (p
))
4915 if (!TREE_THIS_VOLATILE (p
))
4917 tree
*varp
= id
->decl_map
->get (p
);
4918 if (varp
&& VAR_P (*varp
) && !is_gimple_reg (*varp
))
4920 tree clobber
= build_constructor (TREE_TYPE (*varp
), NULL
);
4921 gimple
*clobber_stmt
;
4922 TREE_THIS_VOLATILE (clobber
) = 1;
4923 clobber_stmt
= gimple_build_assign (*varp
, clobber
);
4924 gimple_set_location (clobber_stmt
, gimple_location (stmt
));
4925 gsi_insert_before (&stmt_gsi
, clobber_stmt
, GSI_SAME_STMT
);
4929 /* Reset the escaped solution. */
4930 if (cfun
->gimple_df
)
4931 pt_solution_reset (&cfun
->gimple_df
->escaped
);
4933 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4934 if (id
->dst_simt_vars
&& id
->dst_simt_vars
->length () > 0)
4936 size_t nargs
= gimple_call_num_args (simtenter_stmt
);
4937 vec
<tree
> *vars
= id
->dst_simt_vars
;
4938 auto_vec
<tree
> newargs (nargs
+ vars
->length ());
4939 for (size_t i
= 0; i
< nargs
; i
++)
4940 newargs
.quick_push (gimple_call_arg (simtenter_stmt
, i
));
4941 for (tree
*pvar
= vars
->begin (); pvar
!= vars
->end (); pvar
++)
4943 tree ptrtype
= build_pointer_type (TREE_TYPE (*pvar
));
4944 newargs
.quick_push (build1 (ADDR_EXPR
, ptrtype
, *pvar
));
4946 gcall
*g
= gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER
, newargs
);
4947 gimple_call_set_lhs (g
, gimple_call_lhs (simtenter_stmt
));
4948 gimple_stmt_iterator gsi
= gsi_for_stmt (simtenter_stmt
);
4949 gsi_replace (&gsi
, g
, false);
4951 vec_free (id
->dst_simt_vars
);
4952 id
->dst_simt_vars
= simtvars_save
;
4957 delete id
->debug_map
;
4958 id
->debug_map
= dst
;
4960 delete id
->decl_map
;
4963 /* Unlink the calls virtual operands before replacing it. */
4964 unlink_stmt_vdef (stmt
);
4965 if (gimple_vdef (stmt
)
4966 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
4967 release_ssa_name (gimple_vdef (stmt
));
4969 /* If the inlined function returns a result that we care about,
4970 substitute the GIMPLE_CALL with an assignment of the return
4971 variable to the LHS of the call. That is, if STMT was
4972 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4973 if (use_retvar
&& gimple_call_lhs (stmt
))
4975 gimple
*old_stmt
= stmt
;
4976 stmt
= gimple_build_assign (gimple_call_lhs (stmt
), use_retvar
);
4977 gimple_set_location (stmt
, gimple_location (old_stmt
));
4978 gsi_replace (&stmt_gsi
, stmt
, false);
4979 maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
);
4980 /* Append a clobber for id->retvar if easily possible. */
4981 if (flag_stack_reuse
!= SR_NONE
4983 && VAR_P (id
->retvar
)
4984 && id
->retvar
!= return_slot
4985 && id
->retvar
!= modify_dest
4986 && !TREE_THIS_VOLATILE (id
->retvar
)
4987 && !is_gimple_reg (id
->retvar
)
4988 && !stmt_ends_bb_p (stmt
))
4990 tree clobber
= build_constructor (TREE_TYPE (id
->retvar
), NULL
);
4991 gimple
*clobber_stmt
;
4992 TREE_THIS_VOLATILE (clobber
) = 1;
4993 clobber_stmt
= gimple_build_assign (id
->retvar
, clobber
);
4994 gimple_set_location (clobber_stmt
, gimple_location (old_stmt
));
4995 gsi_insert_after (&stmt_gsi
, clobber_stmt
, GSI_SAME_STMT
);
5000 /* Handle the case of inlining a function with no return
5001 statement, which causes the return value to become undefined. */
5002 if (gimple_call_lhs (stmt
)
5003 && TREE_CODE (gimple_call_lhs (stmt
)) == SSA_NAME
)
5005 tree name
= gimple_call_lhs (stmt
);
5006 tree var
= SSA_NAME_VAR (name
);
5007 tree def
= var
? ssa_default_def (cfun
, var
) : NULL
;
5011 /* If the variable is used undefined, make this name
5012 undefined via a move. */
5013 stmt
= gimple_build_assign (gimple_call_lhs (stmt
), def
);
5014 gsi_replace (&stmt_gsi
, stmt
, true);
5020 var
= create_tmp_reg_fn (cfun
, TREE_TYPE (name
), NULL
);
5021 SET_SSA_NAME_VAR_OR_IDENTIFIER (name
, var
);
5023 /* Otherwise make this variable undefined. */
5024 gsi_remove (&stmt_gsi
, true);
5025 set_ssa_default_def (cfun
, var
, name
);
5026 SSA_NAME_DEF_STMT (name
) = gimple_build_nop ();
5029 /* Replace with a clobber for id->retvar. */
5030 else if (flag_stack_reuse
!= SR_NONE
5032 && VAR_P (id
->retvar
)
5033 && id
->retvar
!= return_slot
5034 && id
->retvar
!= modify_dest
5035 && !TREE_THIS_VOLATILE (id
->retvar
)
5036 && !is_gimple_reg (id
->retvar
))
5038 tree clobber
= build_constructor (TREE_TYPE (id
->retvar
), NULL
);
5039 gimple
*clobber_stmt
;
5040 TREE_THIS_VOLATILE (clobber
) = 1;
5041 clobber_stmt
= gimple_build_assign (id
->retvar
, clobber
);
5042 gimple_set_location (clobber_stmt
, gimple_location (stmt
));
5043 gsi_replace (&stmt_gsi
, clobber_stmt
, false);
5044 maybe_clean_or_replace_eh_stmt (stmt
, clobber_stmt
);
5047 gsi_remove (&stmt_gsi
, true);
5050 if (purge_dead_abnormal_edges
)
5052 gimple_purge_dead_eh_edges (return_block
);
5053 gimple_purge_dead_abnormal_call_edges (return_block
);
5056 /* If the value of the new expression is ignored, that's OK. We
5057 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5058 the equivalent inlined version either. */
5059 if (is_gimple_assign (stmt
))
5061 gcc_assert (gimple_assign_single_p (stmt
)
5062 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)));
5063 TREE_USED (gimple_assign_rhs1 (stmt
)) = 1;
5066 id
->add_clobbers_to_eh_landing_pads
= 0;
5068 /* Output the inlining info for this abstract function, since it has been
5069 inlined. If we don't do this now, we can lose the information about the
5070 variables in the function when the blocks get blown away as soon as we
5071 remove the cgraph node. */
5072 if (gimple_block (stmt
))
5073 (*debug_hooks
->outlining_inline_function
) (fn
);
5075 /* Update callgraph if needed. */
5076 cg_edge
->callee
->remove ();
5078 id
->block
= NULL_TREE
;
5079 id
->retvar
= NULL_TREE
;
5080 successfully_inlined
= true;
5083 input_location
= saved_location
;
5084 return successfully_inlined
;
5087 /* Expand call statements reachable from STMT_P.
5088 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5089 in a MODIFY_EXPR. */
5092 gimple_expand_calls_inline (basic_block bb
, copy_body_data
*id
)
5094 gimple_stmt_iterator gsi
;
5095 bool inlined
= false;
5097 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
);)
5099 gimple
*stmt
= gsi_stmt (gsi
);
5102 if (is_gimple_call (stmt
)
5103 && !gimple_call_internal_p (stmt
))
5104 inlined
|= expand_call_inline (bb
, stmt
, id
);
5111 /* Walk all basic blocks created after FIRST and try to fold every statement
5112 in the STATEMENTS pointer set. */
5115 fold_marked_statements (int first
, hash_set
<gimple
*> *statements
)
5117 for (; first
< last_basic_block_for_fn (cfun
); first
++)
5118 if (BASIC_BLOCK_FOR_FN (cfun
, first
))
5120 gimple_stmt_iterator gsi
;
5122 for (gsi
= gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, first
));
5125 if (statements
->contains (gsi_stmt (gsi
)))
5127 gimple
*old_stmt
= gsi_stmt (gsi
);
5128 tree old_decl
= is_gimple_call (old_stmt
) ? gimple_call_fndecl (old_stmt
) : 0;
5130 if (old_decl
&& fndecl_built_in_p (old_decl
))
5132 /* Folding builtins can create multiple instructions,
5133 we need to look at all of them. */
5134 gimple_stmt_iterator i2
= gsi
;
5136 if (fold_stmt (&gsi
))
5139 /* If a builtin at the end of a bb folded into nothing,
5140 the following loop won't work. */
5141 if (gsi_end_p (gsi
))
5143 cgraph_update_edges_for_call_stmt (old_stmt
,
5148 i2
= gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun
, first
));
5153 new_stmt
= gsi_stmt (i2
);
5154 update_stmt (new_stmt
);
5155 cgraph_update_edges_for_call_stmt (old_stmt
, old_decl
,
5158 if (new_stmt
== gsi_stmt (gsi
))
5160 /* It is okay to check only for the very last
5161 of these statements. If it is a throwing
5162 statement nothing will change. If it isn't
5163 this can remove EH edges. If that weren't
5164 correct then because some intermediate stmts
5165 throw, but not the last one. That would mean
5166 we'd have to split the block, which we can't
5167 here and we'd loose anyway. And as builtins
5168 probably never throw, this all
5170 if (maybe_clean_or_replace_eh_stmt (old_stmt
,
5172 gimple_purge_dead_eh_edges (
5173 BASIC_BLOCK_FOR_FN (cfun
, first
));
5180 else if (fold_stmt (&gsi
))
5182 /* Re-read the statement from GSI as fold_stmt() may
5184 gimple
*new_stmt
= gsi_stmt (gsi
);
5185 update_stmt (new_stmt
);
5187 if (is_gimple_call (old_stmt
)
5188 || is_gimple_call (new_stmt
))
5189 cgraph_update_edges_for_call_stmt (old_stmt
, old_decl
,
5192 if (maybe_clean_or_replace_eh_stmt (old_stmt
, new_stmt
))
5193 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun
,
5200 /* Expand calls to inline functions in the body of FN. */
5203 optimize_inline_calls (tree fn
)
5207 int last
= n_basic_blocks_for_fn (cfun
);
5208 bool inlined_p
= false;
5211 memset (&id
, 0, sizeof (id
));
5213 id
.src_node
= id
.dst_node
= cgraph_node::get (fn
);
5214 gcc_assert (id
.dst_node
->definition
);
5216 /* Or any functions that aren't finished yet. */
5217 if (current_function_decl
)
5218 id
.dst_fn
= current_function_decl
;
5220 id
.copy_decl
= copy_decl_maybe_to_var
;
5221 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
5222 id
.transform_new_cfg
= false;
5223 id
.transform_return_to_modify
= true;
5224 id
.transform_parameter
= true;
5225 id
.transform_lang_insert_block
= NULL
;
5226 id
.statements_to_fold
= new hash_set
<gimple
*>;
5228 push_gimplify_context ();
5230 /* We make no attempts to keep dominance info up-to-date. */
5231 free_dominance_info (CDI_DOMINATORS
);
5232 free_dominance_info (CDI_POST_DOMINATORS
);
5234 /* Register specific gimple functions. */
5235 gimple_register_cfg_hooks ();
5237 /* Reach the trees by walking over the CFG, and note the
5238 enclosing basic-blocks in the call edges. */
5239 /* We walk the blocks going forward, because inlined function bodies
5240 will split id->current_basic_block, and the new blocks will
5241 follow it; we'll trudge through them, processing their CALL_EXPRs
5243 FOR_EACH_BB_FN (bb
, cfun
)
5244 inlined_p
|= gimple_expand_calls_inline (bb
, &id
);
5246 pop_gimplify_context (NULL
);
5250 struct cgraph_edge
*e
;
5252 id
.dst_node
->verify ();
5254 /* Double check that we inlined everything we are supposed to inline. */
5255 for (e
= id
.dst_node
->callees
; e
; e
= e
->next_callee
)
5256 gcc_assert (e
->inline_failed
);
5259 /* Fold queued statements. */
5260 update_max_bb_count ();
5261 fold_marked_statements (last
, id
.statements_to_fold
);
5262 delete id
.statements_to_fold
;
5264 gcc_assert (!id
.debug_stmts
.exists ());
5266 /* If we didn't inline into the function there is nothing to do. */
5270 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5273 delete_unreachable_blocks_update_callgraph (id
.dst_node
, false);
5276 id
.dst_node
->verify ();
5278 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5279 not possible yet - the IPA passes might make various functions to not
5280 throw and they don't care to proactively update local EH info. This is
5281 done later in fixup_cfg pass that also execute the verification. */
5282 return (TODO_update_ssa
5284 | (gimple_in_ssa_p (cfun
) ? TODO_remove_unused_locals
: 0)
5285 | (gimple_in_ssa_p (cfun
) ? TODO_update_address_taken
: 0)
5286 | (profile_status_for_fn (cfun
) != PROFILE_ABSENT
5287 ? TODO_rebuild_frequencies
: 0));
5290 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5293 copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
5295 enum tree_code code
= TREE_CODE (*tp
);
5296 enum tree_code_class cl
= TREE_CODE_CLASS (code
);
5298 /* We make copies of most nodes. */
5299 if (IS_EXPR_CODE_CLASS (cl
)
5300 || code
== TREE_LIST
5302 || code
== TYPE_DECL
5303 || code
== OMP_CLAUSE
)
5305 /* Because the chain gets clobbered when we make a copy, we save it
5307 tree chain
= NULL_TREE
, new_tree
;
5309 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
5310 chain
= TREE_CHAIN (*tp
);
5312 /* Copy the node. */
5313 new_tree
= copy_node (*tp
);
5317 /* Now, restore the chain, if appropriate. That will cause
5318 walk_tree to walk into the chain as well. */
5319 if (code
== PARM_DECL
5320 || code
== TREE_LIST
5321 || code
== OMP_CLAUSE
)
5322 TREE_CHAIN (*tp
) = chain
;
5324 /* For now, we don't update BLOCKs when we make copies. So, we
5325 have to nullify all BIND_EXPRs. */
5326 if (TREE_CODE (*tp
) == BIND_EXPR
)
5327 BIND_EXPR_BLOCK (*tp
) = NULL_TREE
;
5329 else if (code
== CONSTRUCTOR
)
5331 /* CONSTRUCTOR nodes need special handling because
5332 we need to duplicate the vector of elements. */
5335 new_tree
= copy_node (*tp
);
5336 CONSTRUCTOR_ELTS (new_tree
) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp
));
5339 else if (code
== STATEMENT_LIST
)
5340 /* We used to just abort on STATEMENT_LIST, but we can run into them
5341 with statement-expressions (c++/40975). */
5342 copy_statement_list (tp
);
5343 else if (TREE_CODE_CLASS (code
) == tcc_type
)
5345 else if (TREE_CODE_CLASS (code
) == tcc_declaration
)
5347 else if (TREE_CODE_CLASS (code
) == tcc_constant
)
5352 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5353 information indicating to what new SAVE_EXPR this one should be mapped,
5354 use that one. Otherwise, create a new node and enter it in ST. FN is
5355 the function into which the copy will be placed. */
5358 remap_save_expr (tree
*tp
, hash_map
<tree
, tree
> *st
, int *walk_subtrees
)
5363 /* See if we already encountered this SAVE_EXPR. */
5366 /* If we didn't already remap this SAVE_EXPR, do so now. */
5369 t
= copy_node (*tp
);
5371 /* Remember this SAVE_EXPR. */
5373 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5378 /* We've already walked into this SAVE_EXPR; don't do it again. */
5383 /* Replace this SAVE_EXPR with the copy. */
5387 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5388 label, copies the declaration and enters it in the splay_tree in DATA (which
5389 is really a 'copy_body_data *'. */
5392 mark_local_labels_stmt (gimple_stmt_iterator
*gsip
,
5393 bool *handled_ops_p ATTRIBUTE_UNUSED
,
5394 struct walk_stmt_info
*wi
)
5396 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
5397 glabel
*stmt
= dyn_cast
<glabel
*> (gsi_stmt (*gsip
));
5401 tree decl
= gimple_label_label (stmt
);
5403 /* Copy the decl and remember the copy. */
5404 insert_decl_map (id
, decl
, id
->copy_decl (decl
, id
));
5410 static gimple_seq
duplicate_remap_omp_clause_seq (gimple_seq seq
,
5411 struct walk_stmt_info
*wi
);
5413 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5414 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5415 remaps all local declarations to appropriate replacements in gimple
5419 replace_locals_op (tree
*tp
, int *walk_subtrees
, void *data
)
5421 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
5422 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
5423 hash_map
<tree
, tree
> *st
= id
->decl_map
;
5427 /* For recursive invocations this is no longer the LHS itself. */
5428 bool is_lhs
= wi
->is_lhs
;
5431 if (TREE_CODE (expr
) == SSA_NAME
)
5433 *tp
= remap_ssa_name (*tp
, id
);
5436 SSA_NAME_DEF_STMT (*tp
) = gsi_stmt (wi
->gsi
);
5438 /* Only a local declaration (variable or label). */
5439 else if ((VAR_P (expr
) && !TREE_STATIC (expr
))
5440 || TREE_CODE (expr
) == LABEL_DECL
)
5442 /* Lookup the declaration. */
5445 /* If it's there, remap it. */
5450 else if (TREE_CODE (expr
) == STATEMENT_LIST
5451 || TREE_CODE (expr
) == BIND_EXPR
5452 || TREE_CODE (expr
) == SAVE_EXPR
)
5454 else if (TREE_CODE (expr
) == TARGET_EXPR
)
5456 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5457 It's OK for this to happen if it was part of a subtree that
5458 isn't immediately expanded, such as operand 2 of another
5460 if (!TREE_OPERAND (expr
, 1))
5462 TREE_OPERAND (expr
, 1) = TREE_OPERAND (expr
, 3);
5463 TREE_OPERAND (expr
, 3) = NULL_TREE
;
5466 else if (TREE_CODE (expr
) == OMP_CLAUSE
)
5468 /* Before the omplower pass completes, some OMP clauses can contain
5469 sequences that are neither copied by gimple_seq_copy nor walked by
5470 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5471 in those situations, we have to copy and process them explicitely. */
5473 if (OMP_CLAUSE_CODE (expr
) == OMP_CLAUSE_LASTPRIVATE
)
5475 gimple_seq seq
= OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr
);
5476 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5477 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr
) = seq
;
5479 else if (OMP_CLAUSE_CODE (expr
) == OMP_CLAUSE_LINEAR
)
5481 gimple_seq seq
= OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr
);
5482 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5483 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr
) = seq
;
5485 else if (OMP_CLAUSE_CODE (expr
) == OMP_CLAUSE_REDUCTION
)
5487 gimple_seq seq
= OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr
);
5488 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5489 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr
) = seq
;
5490 seq
= OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr
);
5491 seq
= duplicate_remap_omp_clause_seq (seq
, wi
);
5492 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr
) = seq
;
5496 /* Keep iterating. */
5501 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5502 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5503 remaps all local declarations to appropriate replacements in gimple
5507 replace_locals_stmt (gimple_stmt_iterator
*gsip
,
5508 bool *handled_ops_p ATTRIBUTE_UNUSED
,
5509 struct walk_stmt_info
*wi
)
5511 copy_body_data
*id
= (copy_body_data
*) wi
->info
;
5512 gimple
*gs
= gsi_stmt (*gsip
);
5514 if (gbind
*stmt
= dyn_cast
<gbind
*> (gs
))
5516 tree block
= gimple_bind_block (stmt
);
5520 remap_block (&block
, id
);
5521 gimple_bind_set_block (stmt
, block
);
5524 /* This will remap a lot of the same decls again, but this should be
5526 if (gimple_bind_vars (stmt
))
5528 tree old_var
, decls
= gimple_bind_vars (stmt
);
5530 for (old_var
= decls
; old_var
; old_var
= DECL_CHAIN (old_var
))
5531 if (!can_be_nonlocal (old_var
, id
)
5532 && ! variably_modified_type_p (TREE_TYPE (old_var
), id
->src_fn
))
5533 remap_decl (old_var
, id
);
5535 gcc_checking_assert (!id
->prevent_decl_creation_for_types
);
5536 id
->prevent_decl_creation_for_types
= true;
5537 gimple_bind_set_vars (stmt
, remap_decls (decls
, NULL
, id
));
5538 id
->prevent_decl_creation_for_types
= false;
5542 /* Keep iterating. */
5546 /* Create a copy of SEQ and remap all decls in it. */
5549 duplicate_remap_omp_clause_seq (gimple_seq seq
, struct walk_stmt_info
*wi
)
5554 /* If there are any labels in OMP sequences, they can be only referred to in
5555 the sequence itself and therefore we can do both here. */
5556 walk_gimple_seq (seq
, mark_local_labels_stmt
, NULL
, wi
);
5557 gimple_seq copy
= gimple_seq_copy (seq
);
5558 walk_gimple_seq (copy
, replace_locals_stmt
, replace_locals_op
, wi
);
5562 /* Copies everything in SEQ and replaces variables and labels local to
5563 current_function_decl. */
5566 copy_gimple_seq_and_replace_locals (gimple_seq seq
)
5569 struct walk_stmt_info wi
;
5572 /* There's nothing to do for NULL_TREE. */
5577 memset (&id
, 0, sizeof (id
));
5578 id
.src_fn
= current_function_decl
;
5579 id
.dst_fn
= current_function_decl
;
5581 id
.decl_map
= new hash_map
<tree
, tree
>;
5582 id
.debug_map
= NULL
;
5584 id
.copy_decl
= copy_decl_no_change
;
5585 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
5586 id
.transform_new_cfg
= false;
5587 id
.transform_return_to_modify
= false;
5588 id
.transform_parameter
= false;
5589 id
.transform_lang_insert_block
= NULL
;
5591 /* Walk the tree once to find local labels. */
5592 memset (&wi
, 0, sizeof (wi
));
5593 hash_set
<tree
> visited
;
5596 walk_gimple_seq (seq
, mark_local_labels_stmt
, NULL
, &wi
);
5598 copy
= gimple_seq_copy (seq
);
5600 /* Walk the copy, remapping decls. */
5601 memset (&wi
, 0, sizeof (wi
));
5603 walk_gimple_seq (copy
, replace_locals_stmt
, replace_locals_op
, &wi
);
5608 delete id
.debug_map
;
5609 if (id
.dependence_map
)
5611 delete id
.dependence_map
;
5612 id
.dependence_map
= NULL
;
5619 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5622 debug_find_tree_1 (tree
*tp
, int *walk_subtrees ATTRIBUTE_UNUSED
, void *data
)
5631 debug_find_tree (tree top
, tree search
)
5633 return walk_tree_without_duplicates (&top
, debug_find_tree_1
, search
) != 0;
5637 /* Declare the variables created by the inliner. Add all the variables in
5638 VARS to BIND_EXPR. */
5641 declare_inline_vars (tree block
, tree vars
)
5644 for (t
= vars
; t
; t
= DECL_CHAIN (t
))
5646 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
5647 gcc_assert (!TREE_STATIC (t
) && !TREE_ASM_WRITTEN (t
));
5648 add_local_decl (cfun
, t
);
5652 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), vars
);
5655 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5656 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5657 VAR_DECL translation. */
5660 copy_decl_for_dup_finish (copy_body_data
*id
, tree decl
, tree copy
)
5662 /* Don't generate debug information for the copy if we wouldn't have
5663 generated it for the copy either. */
5664 DECL_ARTIFICIAL (copy
) = DECL_ARTIFICIAL (decl
);
5665 DECL_IGNORED_P (copy
) = DECL_IGNORED_P (decl
);
5667 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5668 declaration inspired this copy. */
5669 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (decl
);
5671 /* The new variable/label has no RTL, yet. */
5672 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy
), TS_DECL_WRTL
)
5673 && !TREE_STATIC (copy
) && !DECL_EXTERNAL (copy
))
5674 SET_DECL_RTL (copy
, 0);
5675 /* For vector typed decls make sure to update DECL_MODE according
5676 to the new function context. */
5677 if (VECTOR_TYPE_P (TREE_TYPE (copy
)))
5678 SET_DECL_MODE (copy
, TYPE_MODE (TREE_TYPE (copy
)));
5680 /* These args would always appear unused, if not for this. */
5681 TREE_USED (copy
) = 1;
5683 /* Set the context for the new declaration. */
5684 if (!DECL_CONTEXT (decl
))
5685 /* Globals stay global. */
5687 else if (DECL_CONTEXT (decl
) != id
->src_fn
)
5688 /* Things that weren't in the scope of the function we're inlining
5689 from aren't in the scope we're inlining to, either. */
5691 else if (TREE_STATIC (decl
))
5692 /* Function-scoped static variables should stay in the original
5697 /* Ordinary automatic local variables are now in the scope of the
5699 DECL_CONTEXT (copy
) = id
->dst_fn
;
5700 if (VAR_P (copy
) && id
->dst_simt_vars
&& !is_gimple_reg (copy
))
5702 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy
)))
5703 DECL_ATTRIBUTES (copy
)
5704 = tree_cons (get_identifier ("omp simt private"), NULL
,
5705 DECL_ATTRIBUTES (copy
));
5706 id
->dst_simt_vars
->safe_push (copy
);
5714 copy_decl_to_var (tree decl
, copy_body_data
*id
)
5718 gcc_assert (TREE_CODE (decl
) == PARM_DECL
5719 || TREE_CODE (decl
) == RESULT_DECL
);
5721 type
= TREE_TYPE (decl
);
5723 copy
= build_decl (DECL_SOURCE_LOCATION (id
->dst_fn
),
5724 VAR_DECL
, DECL_NAME (decl
), type
);
5725 if (DECL_PT_UID_SET_P (decl
))
5726 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
5727 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
5728 TREE_READONLY (copy
) = TREE_READONLY (decl
);
5729 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
5730 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (decl
);
5732 return copy_decl_for_dup_finish (id
, decl
, copy
);
5735 /* Like copy_decl_to_var, but create a return slot object instead of a
5736 pointer variable for return by invisible reference. */
5739 copy_result_decl_to_var (tree decl
, copy_body_data
*id
)
5743 gcc_assert (TREE_CODE (decl
) == PARM_DECL
5744 || TREE_CODE (decl
) == RESULT_DECL
);
5746 type
= TREE_TYPE (decl
);
5747 if (DECL_BY_REFERENCE (decl
))
5748 type
= TREE_TYPE (type
);
5750 copy
= build_decl (DECL_SOURCE_LOCATION (id
->dst_fn
),
5751 VAR_DECL
, DECL_NAME (decl
), type
);
5752 if (DECL_PT_UID_SET_P (decl
))
5753 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
5754 TREE_READONLY (copy
) = TREE_READONLY (decl
);
5755 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
5756 if (!DECL_BY_REFERENCE (decl
))
5758 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
5759 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (decl
);
5762 return copy_decl_for_dup_finish (id
, decl
, copy
);
5766 copy_decl_no_change (tree decl
, copy_body_data
*id
)
5770 copy
= copy_node (decl
);
5772 /* The COPY is not abstract; it will be generated in DST_FN. */
5773 DECL_ABSTRACT_P (copy
) = false;
5774 lang_hooks
.dup_lang_specific_decl (copy
);
5776 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5777 been taken; it's for internal bookkeeping in expand_goto_internal. */
5778 if (TREE_CODE (copy
) == LABEL_DECL
)
5780 TREE_ADDRESSABLE (copy
) = 0;
5781 LABEL_DECL_UID (copy
) = -1;
5784 return copy_decl_for_dup_finish (id
, decl
, copy
);
5788 copy_decl_maybe_to_var (tree decl
, copy_body_data
*id
)
5790 if (TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == RESULT_DECL
)
5791 return copy_decl_to_var (decl
, id
);
5793 return copy_decl_no_change (decl
, id
);
5796 /* Return a copy of the function's argument tree. */
5798 copy_arguments_for_versioning (tree orig_parm
, copy_body_data
* id
,
5799 bitmap args_to_skip
, tree
*vars
)
5802 tree new_parm
= NULL
;
5807 for (arg
= orig_parm
; arg
; arg
= DECL_CHAIN (arg
), i
++)
5808 if (!args_to_skip
|| !bitmap_bit_p (args_to_skip
, i
))
5810 tree new_tree
= remap_decl (arg
, id
);
5811 if (TREE_CODE (new_tree
) != PARM_DECL
)
5812 new_tree
= id
->copy_decl (arg
, id
);
5813 lang_hooks
.dup_lang_specific_decl (new_tree
);
5815 parg
= &DECL_CHAIN (new_tree
);
5817 else if (!id
->decl_map
->get (arg
))
5819 /* Make an equivalent VAR_DECL. If the argument was used
5820 as temporary variable later in function, the uses will be
5821 replaced by local variable. */
5822 tree var
= copy_decl_to_var (arg
, id
);
5823 insert_decl_map (id
, arg
, var
);
5824 /* Declare this new variable. */
5825 DECL_CHAIN (var
) = *vars
;
5831 /* Return a copy of the function's static chain. */
5833 copy_static_chain (tree static_chain
, copy_body_data
* id
)
5835 tree
*chain_copy
, *pvar
;
5837 chain_copy
= &static_chain
;
5838 for (pvar
= chain_copy
; *pvar
; pvar
= &DECL_CHAIN (*pvar
))
5840 tree new_tree
= remap_decl (*pvar
, id
);
5841 lang_hooks
.dup_lang_specific_decl (new_tree
);
5842 DECL_CHAIN (new_tree
) = DECL_CHAIN (*pvar
);
5845 return static_chain
;
5848 /* Return true if the function is allowed to be versioned.
5849 This is a guard for the versioning functionality. */
5852 tree_versionable_function_p (tree fndecl
)
5854 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl
))
5855 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl
)) == NULL
);
5858 /* Update clone info after duplication. */
5861 update_clone_info (copy_body_data
* id
)
5863 struct cgraph_node
*node
;
5864 if (!id
->dst_node
->clones
)
5866 for (node
= id
->dst_node
->clones
; node
!= id
->dst_node
;)
5868 /* First update replace maps to match the new body. */
5869 if (node
->clone
.tree_map
)
5872 for (i
= 0; i
< vec_safe_length (node
->clone
.tree_map
); i
++)
5874 struct ipa_replace_map
*replace_info
;
5875 replace_info
= (*node
->clone
.tree_map
)[i
];
5876 walk_tree (&replace_info
->old_tree
, copy_tree_body_r
, id
, NULL
);
5877 walk_tree (&replace_info
->new_tree
, copy_tree_body_r
, id
, NULL
);
5881 node
= node
->clones
;
5882 else if (node
->next_sibling_clone
)
5883 node
= node
->next_sibling_clone
;
5886 while (node
!= id
->dst_node
&& !node
->next_sibling_clone
)
5887 node
= node
->clone_of
;
5888 if (node
!= id
->dst_node
)
5889 node
= node
->next_sibling_clone
;
5894 /* Create a copy of a function's tree.
5895 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5896 of the original function and the new copied function
5897 respectively. In case we want to replace a DECL
5898 tree with another tree while duplicating the function's
5899 body, TREE_MAP represents the mapping between these
5900 trees. If UPDATE_CLONES is set, the call_stmt fields
5901 of edges of clones of the function will be updated.
5903 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5905 If SKIP_RETURN is true, the new version will return void.
5906 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5907 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5910 tree_function_versioning (tree old_decl
, tree new_decl
,
5911 vec
<ipa_replace_map
*, va_gc
> *tree_map
,
5912 bool update_clones
, bitmap args_to_skip
,
5913 bool skip_return
, bitmap blocks_to_copy
,
5914 basic_block new_entry
)
5916 struct cgraph_node
*old_version_node
;
5917 struct cgraph_node
*new_version_node
;
5921 struct ipa_replace_map
*replace_info
;
5922 basic_block old_entry_block
, bb
;
5923 auto_vec
<gimple
*, 10> init_stmts
;
5924 tree vars
= NULL_TREE
;
5925 bitmap debug_args_to_skip
= args_to_skip
;
5927 gcc_assert (TREE_CODE (old_decl
) == FUNCTION_DECL
5928 && TREE_CODE (new_decl
) == FUNCTION_DECL
);
5929 DECL_POSSIBLY_INLINED (old_decl
) = 1;
5931 old_version_node
= cgraph_node::get (old_decl
);
5932 gcc_checking_assert (old_version_node
);
5933 new_version_node
= cgraph_node::get (new_decl
);
5934 gcc_checking_assert (new_version_node
);
5936 /* Copy over debug args. */
5937 if (DECL_HAS_DEBUG_ARGS_P (old_decl
))
5939 vec
<tree
, va_gc
> **new_debug_args
, **old_debug_args
;
5940 gcc_checking_assert (decl_debug_args_lookup (new_decl
) == NULL
);
5941 DECL_HAS_DEBUG_ARGS_P (new_decl
) = 0;
5942 old_debug_args
= decl_debug_args_lookup (old_decl
);
5945 new_debug_args
= decl_debug_args_insert (new_decl
);
5946 *new_debug_args
= vec_safe_copy (*old_debug_args
);
5950 /* Output the inlining info for this abstract function, since it has been
5951 inlined. If we don't do this now, we can lose the information about the
5952 variables in the function when the blocks get blown away as soon as we
5953 remove the cgraph node. */
5954 (*debug_hooks
->outlining_inline_function
) (old_decl
);
5956 DECL_ARTIFICIAL (new_decl
) = 1;
5957 DECL_ABSTRACT_ORIGIN (new_decl
) = DECL_ORIGIN (old_decl
);
5958 if (DECL_ORIGIN (old_decl
) == old_decl
)
5959 old_version_node
->used_as_abstract_origin
= true;
5960 DECL_FUNCTION_PERSONALITY (new_decl
) = DECL_FUNCTION_PERSONALITY (old_decl
);
5962 /* Prepare the data structures for the tree copy. */
5963 memset (&id
, 0, sizeof (id
));
5965 /* Generate a new name for the new version. */
5966 id
.statements_to_fold
= new hash_set
<gimple
*>;
5968 id
.decl_map
= new hash_map
<tree
, tree
>;
5969 id
.debug_map
= NULL
;
5970 id
.src_fn
= old_decl
;
5971 id
.dst_fn
= new_decl
;
5972 id
.src_node
= old_version_node
;
5973 id
.dst_node
= new_version_node
;
5974 id
.src_cfun
= DECL_STRUCT_FUNCTION (old_decl
);
5975 id
.blocks_to_copy
= blocks_to_copy
;
5977 id
.copy_decl
= copy_decl_no_change
;
5978 id
.transform_call_graph_edges
5979 = update_clones
? CB_CGE_MOVE_CLONES
: CB_CGE_MOVE
;
5980 id
.transform_new_cfg
= true;
5981 id
.transform_return_to_modify
= false;
5982 id
.transform_parameter
= false;
5983 id
.transform_lang_insert_block
= NULL
;
5985 old_entry_block
= ENTRY_BLOCK_PTR_FOR_FN
5986 (DECL_STRUCT_FUNCTION (old_decl
));
5987 DECL_RESULT (new_decl
) = DECL_RESULT (old_decl
);
5988 DECL_ARGUMENTS (new_decl
) = DECL_ARGUMENTS (old_decl
);
5989 initialize_cfun (new_decl
, old_decl
,
5990 new_entry
? new_entry
->count
: old_entry_block
->count
);
5991 if (DECL_STRUCT_FUNCTION (new_decl
)->gimple_df
)
5992 DECL_STRUCT_FUNCTION (new_decl
)->gimple_df
->ipa_pta
5993 = id
.src_cfun
->gimple_df
->ipa_pta
;
5995 /* Copy the function's static chain. */
5996 p
= DECL_STRUCT_FUNCTION (old_decl
)->static_chain_decl
;
5998 DECL_STRUCT_FUNCTION (new_decl
)->static_chain_decl
5999 = copy_static_chain (p
, &id
);
6001 /* If there's a tree_map, prepare for substitution. */
6003 for (i
= 0; i
< tree_map
->length (); i
++)
6006 replace_info
= (*tree_map
)[i
];
6007 if (replace_info
->replace_p
)
6010 if (!replace_info
->old_tree
)
6012 int p
= replace_info
->parm_num
;
6014 tree req_type
, new_type
;
6016 for (parm
= DECL_ARGUMENTS (old_decl
); p
;
6017 parm
= DECL_CHAIN (parm
))
6019 replace_info
->old_tree
= parm
;
6020 parm_num
= replace_info
->parm_num
;
6021 req_type
= TREE_TYPE (parm
);
6022 new_type
= TREE_TYPE (replace_info
->new_tree
);
6023 if (!useless_type_conversion_p (req_type
, new_type
))
6025 if (fold_convertible_p (req_type
, replace_info
->new_tree
))
6026 replace_info
->new_tree
6027 = fold_build1 (NOP_EXPR
, req_type
,
6028 replace_info
->new_tree
);
6029 else if (TYPE_SIZE (req_type
) == TYPE_SIZE (new_type
))
6030 replace_info
->new_tree
6031 = fold_build1 (VIEW_CONVERT_EXPR
, req_type
,
6032 replace_info
->new_tree
);
6037 fprintf (dump_file
, " const ");
6038 print_generic_expr (dump_file
,
6039 replace_info
->new_tree
);
6041 " can't be converted to param ");
6042 print_generic_expr (dump_file
, parm
);
6043 fprintf (dump_file
, "\n");
6045 replace_info
->old_tree
= NULL
;
6050 gcc_assert (TREE_CODE (replace_info
->old_tree
) == PARM_DECL
);
6051 if (replace_info
->old_tree
)
6053 init
= setup_one_parameter (&id
, replace_info
->old_tree
,
6054 replace_info
->new_tree
, id
.src_fn
,
6058 init_stmts
.safe_push (init
);
6059 if (MAY_HAVE_DEBUG_BIND_STMTS
&& args_to_skip
)
6065 for (parm
= DECL_ARGUMENTS (old_decl
), p
= 0; parm
;
6066 parm
= DECL_CHAIN (parm
), p
++)
6067 if (parm
== replace_info
->old_tree
)
6075 if (debug_args_to_skip
== args_to_skip
)
6077 debug_args_to_skip
= BITMAP_ALLOC (NULL
);
6078 bitmap_copy (debug_args_to_skip
, args_to_skip
);
6080 bitmap_clear_bit (debug_args_to_skip
, parm_num
);
6086 /* Copy the function's arguments. */
6087 if (DECL_ARGUMENTS (old_decl
) != NULL_TREE
)
6088 DECL_ARGUMENTS (new_decl
)
6089 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl
), &id
,
6090 args_to_skip
, &vars
);
6092 DECL_INITIAL (new_decl
) = remap_blocks (DECL_INITIAL (id
.src_fn
), &id
);
6093 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl
)) = new_decl
;
6095 declare_inline_vars (DECL_INITIAL (new_decl
), vars
);
6097 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl
)->local_decls
))
6098 /* Add local vars. */
6099 add_local_variables (DECL_STRUCT_FUNCTION (old_decl
), cfun
, &id
);
6101 if (DECL_RESULT (old_decl
) == NULL_TREE
)
6103 else if (skip_return
&& !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl
))))
6105 DECL_RESULT (new_decl
)
6106 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl
)),
6107 RESULT_DECL
, NULL_TREE
, void_type_node
);
6108 DECL_CONTEXT (DECL_RESULT (new_decl
)) = new_decl
;
6109 cfun
->returns_struct
= 0;
6110 cfun
->returns_pcc_struct
= 0;
6115 DECL_RESULT (new_decl
) = remap_decl (DECL_RESULT (old_decl
), &id
);
6116 lang_hooks
.dup_lang_specific_decl (DECL_RESULT (new_decl
));
6117 if (gimple_in_ssa_p (id
.src_cfun
)
6118 && DECL_BY_REFERENCE (DECL_RESULT (old_decl
))
6119 && (old_name
= ssa_default_def (id
.src_cfun
, DECL_RESULT (old_decl
))))
6121 tree new_name
= make_ssa_name (DECL_RESULT (new_decl
));
6122 insert_decl_map (&id
, old_name
, new_name
);
6123 SSA_NAME_DEF_STMT (new_name
) = gimple_build_nop ();
6124 set_ssa_default_def (cfun
, DECL_RESULT (new_decl
), new_name
);
6128 /* Set up the destination functions loop tree. */
6129 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl
)) != NULL
)
6131 cfun
->curr_properties
&= ~PROP_loops
;
6132 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
6133 cfun
->curr_properties
|= PROP_loops
;
6136 /* Copy the Function's body. */
6137 copy_body (&id
, ENTRY_BLOCK_PTR_FOR_FN (cfun
), EXIT_BLOCK_PTR_FOR_FN (cfun
),
6140 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6141 number_blocks (new_decl
);
6143 /* We want to create the BB unconditionally, so that the addition of
6144 debug stmts doesn't affect BB count, which may in the end cause
6145 codegen differences. */
6146 bb
= split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
6147 while (init_stmts
.length ())
6148 insert_init_stmt (&id
, bb
, init_stmts
.pop ());
6149 update_clone_info (&id
);
6151 /* Remap the nonlocal_goto_save_area, if any. */
6152 if (cfun
->nonlocal_goto_save_area
)
6154 struct walk_stmt_info wi
;
6156 memset (&wi
, 0, sizeof (wi
));
6158 walk_tree (&cfun
->nonlocal_goto_save_area
, remap_gimple_op_r
, &wi
, NULL
);
6164 delete id
.debug_map
;
6165 free_dominance_info (CDI_DOMINATORS
);
6166 free_dominance_info (CDI_POST_DOMINATORS
);
6168 update_max_bb_count ();
6169 fold_marked_statements (0, id
.statements_to_fold
);
6170 delete id
.statements_to_fold
;
6171 delete_unreachable_blocks_update_callgraph (id
.dst_node
, update_clones
);
6172 if (id
.dst_node
->definition
)
6173 cgraph_edge::rebuild_references ();
6174 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP
))
6176 calculate_dominance_info (CDI_DOMINATORS
);
6177 fix_loop_structure (NULL
);
6179 update_ssa (TODO_update_ssa
);
6181 /* After partial cloning we need to rescale frequencies, so they are
6182 within proper range in the cloned function. */
6185 struct cgraph_edge
*e
;
6186 rebuild_frequencies ();
6188 new_version_node
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
6189 for (e
= new_version_node
->callees
; e
; e
= e
->next_callee
)
6191 basic_block bb
= gimple_bb (e
->call_stmt
);
6192 e
->count
= bb
->count
;
6194 for (e
= new_version_node
->indirect_calls
; e
; e
= e
->next_callee
)
6196 basic_block bb
= gimple_bb (e
->call_stmt
);
6197 e
->count
= bb
->count
;
6201 if (debug_args_to_skip
&& MAY_HAVE_DEBUG_BIND_STMTS
)
6204 vec
<tree
, va_gc
> **debug_args
= NULL
;
6205 unsigned int len
= 0;
6206 for (parm
= DECL_ARGUMENTS (old_decl
), i
= 0;
6207 parm
; parm
= DECL_CHAIN (parm
), i
++)
6208 if (bitmap_bit_p (debug_args_to_skip
, i
) && is_gimple_reg (parm
))
6212 if (debug_args
== NULL
)
6214 debug_args
= decl_debug_args_insert (new_decl
);
6215 len
= vec_safe_length (*debug_args
);
6217 ddecl
= make_node (DEBUG_EXPR_DECL
);
6218 DECL_ARTIFICIAL (ddecl
) = 1;
6219 TREE_TYPE (ddecl
) = TREE_TYPE (parm
);
6220 SET_DECL_MODE (ddecl
, DECL_MODE (parm
));
6221 vec_safe_push (*debug_args
, DECL_ORIGIN (parm
));
6222 vec_safe_push (*debug_args
, ddecl
);
6224 if (debug_args
!= NULL
)
6226 /* On the callee side, add
6229 stmts to the first bb where var is a VAR_DECL created for the
6230 optimized away parameter in DECL_INITIAL block. This hints
6231 in the debug info that var (whole DECL_ORIGIN is the parm
6232 PARM_DECL) is optimized away, but could be looked up at the
6233 call site as value of D#X there. */
6234 tree var
= vars
, vexpr
;
6235 gimple_stmt_iterator cgsi
6236 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
6239 i
= vec_safe_length (*debug_args
);
6243 while (var
!= NULL_TREE
6244 && DECL_ABSTRACT_ORIGIN (var
) != (**debug_args
)[i
])
6245 var
= TREE_CHAIN (var
);
6246 if (var
== NULL_TREE
)
6248 vexpr
= make_node (DEBUG_EXPR_DECL
);
6249 parm
= (**debug_args
)[i
];
6250 DECL_ARTIFICIAL (vexpr
) = 1;
6251 TREE_TYPE (vexpr
) = TREE_TYPE (parm
);
6252 SET_DECL_MODE (vexpr
, DECL_MODE (parm
));
6253 def_temp
= gimple_build_debug_bind (var
, vexpr
, NULL
);
6254 gsi_insert_before (&cgsi
, def_temp
, GSI_NEW_STMT
);
6255 def_temp
= gimple_build_debug_source_bind (vexpr
, parm
, NULL
);
6256 gsi_insert_before (&cgsi
, def_temp
, GSI_NEW_STMT
);
6262 if (debug_args_to_skip
&& debug_args_to_skip
!= args_to_skip
)
6263 BITMAP_FREE (debug_args_to_skip
);
6264 free_dominance_info (CDI_DOMINATORS
);
6265 free_dominance_info (CDI_POST_DOMINATORS
);
6267 gcc_assert (!id
.debug_stmts
.exists ());
6272 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6273 the callee and return the inlined body on success. */
6276 maybe_inline_call_in_expr (tree exp
)
6278 tree fn
= get_callee_fndecl (exp
);
6280 /* We can only try to inline "const" functions. */
6281 if (fn
&& TREE_READONLY (fn
) && DECL_SAVED_TREE (fn
))
6283 call_expr_arg_iterator iter
;
6286 hash_map
<tree
, tree
> decl_map
;
6288 /* Remap the parameters. */
6289 for (param
= DECL_ARGUMENTS (fn
), arg
= first_call_expr_arg (exp
, &iter
);
6291 param
= DECL_CHAIN (param
), arg
= next_call_expr_arg (&iter
))
6292 decl_map
.put (param
, arg
);
6294 memset (&id
, 0, sizeof (id
));
6296 id
.dst_fn
= current_function_decl
;
6297 id
.src_cfun
= DECL_STRUCT_FUNCTION (fn
);
6298 id
.decl_map
= &decl_map
;
6300 id
.copy_decl
= copy_decl_no_change
;
6301 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
6302 id
.transform_new_cfg
= false;
6303 id
.transform_return_to_modify
= true;
6304 id
.transform_parameter
= true;
6305 id
.transform_lang_insert_block
= NULL
;
6307 /* Make sure not to unshare trees behind the front-end's back
6308 since front-end specific mechanisms may rely on sharing. */
6309 id
.regimplify
= false;
6310 id
.do_not_unshare
= true;
6312 /* We're not inside any EH region. */
6315 t
= copy_tree_body (&id
);
6317 /* We can only return something suitable for use in a GENERIC
6319 if (TREE_CODE (t
) == MODIFY_EXPR
)
6320 return TREE_OPERAND (t
, 1);
6326 /* Duplicate a type, fields and all. */
6329 build_duplicate_type (tree type
)
6331 struct copy_body_data id
;
6333 memset (&id
, 0, sizeof (id
));
6334 id
.src_fn
= current_function_decl
;
6335 id
.dst_fn
= current_function_decl
;
6337 id
.decl_map
= new hash_map
<tree
, tree
>;
6338 id
.debug_map
= NULL
;
6339 id
.copy_decl
= copy_decl_no_change
;
6341 type
= remap_type_1 (type
, &id
);
6345 delete id
.debug_map
;
6347 TYPE_CANONICAL (type
) = type
;
6352 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6353 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6357 copy_fn (tree fn
, tree
& parms
, tree
& result
)
6361 hash_map
<tree
, tree
> decl_map
;
6366 memset (&id
, 0, sizeof (id
));
6368 id
.dst_fn
= current_function_decl
;
6369 id
.src_cfun
= DECL_STRUCT_FUNCTION (fn
);
6370 id
.decl_map
= &decl_map
;
6372 id
.copy_decl
= copy_decl_no_change
;
6373 id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
6374 id
.transform_new_cfg
= false;
6375 id
.transform_return_to_modify
= false;
6376 id
.transform_parameter
= true;
6377 id
.transform_lang_insert_block
= NULL
;
6379 /* Make sure not to unshare trees behind the front-end's back
6380 since front-end specific mechanisms may rely on sharing. */
6381 id
.regimplify
= false;
6382 id
.do_not_unshare
= true;
6383 id
.do_not_fold
= true;
6385 /* We're not inside any EH region. */
6388 /* Remap the parameters and result and return them to the caller. */
6389 for (param
= DECL_ARGUMENTS (fn
);
6391 param
= DECL_CHAIN (param
))
6393 *p
= remap_decl (param
, &id
);
6394 p
= &DECL_CHAIN (*p
);
6397 if (DECL_RESULT (fn
))
6398 result
= remap_decl (DECL_RESULT (fn
), &id
);
6402 return copy_tree_body (&id
);