]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-inline.c
Update copyright years.
[thirdparty/gcc.git] / gcc / tree-inline.c
CommitLineData
ac534736 1/* Tree inlining.
818ab71a 2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
588d3ade
AO
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
54a7b573 5This file is part of GCC.
588d3ade 6
54a7b573 7GCC is free software; you can redistribute it and/or modify
588d3ade 8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
588d3ade
AO
10any later version.
11
54a7b573 12GCC is distributed in the hope that it will be useful,
588d3ade
AO
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
588d3ade
AO
20
21#include "config.h"
22#include "system.h"
4977bab6 23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
c7131fb2
AM
27#include "tree.h"
28#include "gimple.h"
957060b5 29#include "cfghooks.h"
957060b5 30#include "tree-pass.h"
c7131fb2 31#include "ssa.h"
957060b5
AM
32#include "cgraph.h"
33#include "tree-pretty-print.h"
718f9c0f 34#include "diagnostic-core.h"
957060b5 35#include "gimple-predict.h"
40e23961 36#include "fold-const.h"
d8a2d370
DN
37#include "stor-layout.h"
38#include "calls.h"
588d3ade 39#include "tree-inline.h"
d23c55c2 40#include "langhooks.h"
60393bbc 41#include "cfganal.h"
e21aff8a 42#include "tree-iterator.h"
ddd2d57e 43#include "intl.h"
2fb9a547
AM
44#include "gimple-fold.h"
45#include "tree-eh.h"
45b0be94 46#include "gimplify.h"
5be5c238 47#include "gimple-iterator.h"
18f429e2 48#include "gimplify-me.h"
5be5c238 49#include "gimple-walk.h"
442b4905 50#include "tree-cfg.h"
442b4905
AM
51#include "tree-into-ssa.h"
52#include "tree-dfa.h"
7a300452 53#include "tree-ssa.h"
e21aff8a 54#include "except.h"
1eb3331e 55#include "debug.h"
6946b3f7 56#include "value-prof.h"
a9e0d843 57#include "cfgloop.h"
9b2b7279 58#include "builtins.h"
d5e254e1 59#include "tree-chkp.h"
d4e4baa9 60
2eb79bbb 61
6de9cd9a
DN
62/* I'm not real happy about this, but we need to handle gimple and
63 non-gimple trees. */
588d3ade 64
1b369fae 65/* Inlining, Cloning, Versioning, Parallelization
e21aff8a
SB
66
67 Inlining: a function body is duplicated, but the PARM_DECLs are
68 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
726a989a 69 MODIFY_EXPRs that store to a dedicated returned-value variable.
e21aff8a
SB
70 The duplicated eh_region info of the copy will later be appended
71 to the info for the caller; the eh_region info in copied throwing
1d65f45c 72 statements and RESX statements are adjusted accordingly.
e21aff8a 73
e21aff8a
SB
74 Cloning: (only in C++) We have one body for a con/de/structor, and
75 multiple function decls, each with a unique parameter list.
76 Duplicate the body, using the given splay tree; some parameters
77 will become constants (like 0 or 1).
78
1b369fae
RH
79 Versioning: a function body is duplicated and the result is a new
80 function rather than into blocks of an existing function as with
81 inlining. Some parameters will become constants.
82
83 Parallelization: a region of a function is duplicated resulting in
84 a new function. Variables may be replaced with complex expressions
85 to enable shared variable semantics.
86
e21aff8a
SB
87 All of these will simultaneously lookup any callgraph edges. If
88 we're going to inline the duplicated function body, and the given
89 function has some cloned callgraph nodes (one for each place this
90 function will be inlined) those callgraph edges will be duplicated.
1b369fae 91 If we're cloning the body, those callgraph edges will be
e21aff8a
SB
92 updated to point into the new body. (Note that the original
93 callgraph node and edge list will not be altered.)
94
726a989a 95 See the CALL_EXPR handling case in copy_tree_body_r (). */
e21aff8a 96
d4e4baa9
AO
97/* To Do:
98
99 o In order to make inlining-on-trees work, we pessimized
100 function-local static constants. In particular, they are now
101 always output, even when not addressed. Fix this by treating
102 function-local static constants just like global static
103 constants; the back-end already knows not to output them if they
104 are not needed.
105
106 o Provide heuristics to clamp inlining of recursive template
107 calls? */
108
7f9bc51b 109
7f9bc51b
ZD
110/* Weights that estimate_num_insns uses to estimate the size of the
111 produced code. */
112
113eni_weights eni_size_weights;
114
115/* Weights that estimate_num_insns uses to estimate the time necessary
116 to execute the produced code. */
117
118eni_weights eni_time_weights;
119
d4e4baa9
AO
120/* Prototypes. */
121
d5e254e1
IE
122static tree declare_return_variable (copy_body_data *, tree, tree, tree,
123 basic_block);
1b369fae 124static void remap_block (tree *, copy_body_data *);
1b369fae 125static void copy_bind_expr (tree *, int *, copy_body_data *);
e21aff8a 126static void declare_inline_vars (tree, tree);
b787e7a2 127static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
4a283090 128static void prepend_lexical_block (tree current_block, tree new_block);
1b369fae 129static tree copy_decl_to_var (tree, copy_body_data *);
c08cd4c1 130static tree copy_result_decl_to_var (tree, copy_body_data *);
1b369fae 131static tree copy_decl_maybe_to_var (tree, copy_body_data *);
355fe088 132static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
078c3644 133static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
355fe088 134static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
e21aff8a 135
5e20bdd7
JZ
136/* Insert a tree->tree mapping for ID. Despite the name suggests
137 that the trees should be variables, it is used for more than that. */
138
1b369fae
RH
139void
140insert_decl_map (copy_body_data *id, tree key, tree value)
5e20bdd7 141{
b787e7a2 142 id->decl_map->put (key, value);
5e20bdd7
JZ
143
144 /* Always insert an identity map as well. If we see this same new
145 node again, we won't want to duplicate it a second time. */
146 if (key != value)
b787e7a2 147 id->decl_map->put (value, value);
5e20bdd7
JZ
148}
149
b5b8b0ac
AO
150/* Insert a tree->tree mapping for ID. This is only used for
151 variables. */
152
153static void
154insert_debug_decl_map (copy_body_data *id, tree key, tree value)
155{
156 if (!gimple_in_ssa_p (id->src_cfun))
157 return;
158
683750ce 159 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
b5b8b0ac
AO
160 return;
161
162 if (!target_for_debug_bind (key))
163 return;
164
165 gcc_assert (TREE_CODE (key) == PARM_DECL);
166 gcc_assert (TREE_CODE (value) == VAR_DECL);
167
168 if (!id->debug_map)
b787e7a2 169 id->debug_map = new hash_map<tree, tree>;
b5b8b0ac 170
b787e7a2 171 id->debug_map->put (key, value);
b5b8b0ac
AO
172}
173
082ab5ff
JJ
174/* If nonzero, we're remapping the contents of inlined debug
175 statements. If negative, an error has occurred, such as a
176 reference to a variable that isn't available in the inlined
177 context. */
178static int processing_debug_stmt = 0;
179
110cfe1c
JH
180/* Construct new SSA name for old NAME. ID is the inline context. */
181
182static tree
183remap_ssa_name (tree name, copy_body_data *id)
184{
70b5e7dc 185 tree new_tree, var;
6be42dd4 186 tree *n;
110cfe1c
JH
187
188 gcc_assert (TREE_CODE (name) == SSA_NAME);
189
b787e7a2 190 n = id->decl_map->get (name);
110cfe1c 191 if (n)
129a37fc 192 return unshare_expr (*n);
110cfe1c 193
082ab5ff
JJ
194 if (processing_debug_stmt)
195 {
67386041
RG
196 if (SSA_NAME_IS_DEFAULT_DEF (name)
197 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
ddb555ed 198 && id->entry_bb == NULL
fefa31b5 199 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
ddb555ed
JJ
200 {
201 tree vexpr = make_node (DEBUG_EXPR_DECL);
355fe088 202 gimple *def_temp;
ddb555ed
JJ
203 gimple_stmt_iterator gsi;
204 tree val = SSA_NAME_VAR (name);
205
b787e7a2 206 n = id->decl_map->get (val);
ddb555ed
JJ
207 if (n != NULL)
208 val = *n;
209 if (TREE_CODE (val) != PARM_DECL)
210 {
211 processing_debug_stmt = -1;
212 return name;
213 }
214 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
215 DECL_ARTIFICIAL (vexpr) = 1;
216 TREE_TYPE (vexpr) = TREE_TYPE (name);
217 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
fefa31b5 218 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
ddb555ed
JJ
219 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
220 return vexpr;
221 }
222
082ab5ff
JJ
223 processing_debug_stmt = -1;
224 return name;
225 }
226
70b5e7dc
RG
227 /* Remap anonymous SSA names or SSA names of anonymous decls. */
228 var = SSA_NAME_VAR (name);
229 if (!var
230 || (!SSA_NAME_IS_DEFAULT_DEF (name)
231 && TREE_CODE (var) == VAR_DECL
232 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
233 && DECL_ARTIFICIAL (var)
234 && DECL_IGNORED_P (var)
235 && !DECL_NAME (var)))
236 {
237 struct ptr_info_def *pi;
b731b390 238 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
70b5e7dc
RG
239 if (!var && SSA_NAME_IDENTIFIER (name))
240 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
241 insert_decl_map (id, name, new_tree);
242 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
243 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
244 /* At least IPA points-to info can be directly transferred. */
245 if (id->src_cfun->gimple_df
246 && id->src_cfun->gimple_df->ipa_pta
247 && (pi = SSA_NAME_PTR_INFO (name))
248 && !pi->pt.anything)
249 {
250 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
251 new_pi->pt = pi->pt;
252 }
253 return new_tree;
254 }
255
110cfe1c
JH
256 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
257 in copy_bb. */
70b5e7dc 258 new_tree = remap_decl (var, id);
726a989a 259
110cfe1c 260 /* We might've substituted constant or another SSA_NAME for
b8698a0f 261 the variable.
110cfe1c
JH
262
263 Replace the SSA name representing RESULT_DECL by variable during
264 inlining: this saves us from need to introduce PHI node in a case
265 return value is just partly initialized. */
82d6e6fc 266 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
70b5e7dc
RG
267 && (!SSA_NAME_VAR (name)
268 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
110cfe1c
JH
269 || !id->transform_return_to_modify))
270 {
25a6a873 271 struct ptr_info_def *pi;
b731b390 272 new_tree = make_ssa_name (new_tree);
82d6e6fc
KG
273 insert_decl_map (id, name, new_tree);
274 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
110cfe1c 275 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
25a6a873
RG
276 /* At least IPA points-to info can be directly transferred. */
277 if (id->src_cfun->gimple_df
278 && id->src_cfun->gimple_df->ipa_pta
279 && (pi = SSA_NAME_PTR_INFO (name))
280 && !pi->pt.anything)
281 {
282 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
283 new_pi->pt = pi->pt;
284 }
32244553 285 if (SSA_NAME_IS_DEFAULT_DEF (name))
045685a9
JH
286 {
287 /* By inlining function having uninitialized variable, we might
288 extend the lifetime (variable might get reused). This cause
289 ICE in the case we end up extending lifetime of SSA name across
fa10beec 290 abnormal edge, but also increase register pressure.
045685a9 291
726a989a
RB
292 We simply initialize all uninitialized vars by 0 except
293 for case we are inlining to very first BB. We can avoid
294 this for all BBs that are not inside strongly connected
295 regions of the CFG, but this is expensive to test. */
296 if (id->entry_bb
dcad005d 297 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
70b5e7dc
RG
298 && (!SSA_NAME_VAR (name)
299 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
fefa31b5
DM
300 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
301 0)->dest
045685a9
JH
302 || EDGE_COUNT (id->entry_bb->preds) != 1))
303 {
726a989a 304 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
355fe088 305 gimple *init_stmt;
e8160c9a 306 tree zero = build_zero_cst (TREE_TYPE (new_tree));
b8698a0f 307
e8160c9a 308 init_stmt = gimple_build_assign (new_tree, zero);
726a989a 309 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
82d6e6fc 310 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
045685a9
JH
311 }
312 else
313 {
82d6e6fc 314 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
32244553 315 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
045685a9
JH
316 }
317 }
110cfe1c
JH
318 }
319 else
82d6e6fc
KG
320 insert_decl_map (id, name, new_tree);
321 return new_tree;
110cfe1c
JH
322}
323
e21aff8a 324/* Remap DECL during the copying of the BLOCK tree for the function. */
d4e4baa9 325
1b369fae
RH
326tree
327remap_decl (tree decl, copy_body_data *id)
d4e4baa9 328{
6be42dd4 329 tree *n;
e21aff8a
SB
330
331 /* We only remap local variables in the current function. */
3c2a7a6a 332
e21aff8a
SB
333 /* See if we have remapped this declaration. */
334
b787e7a2 335 n = id->decl_map->get (decl);
e21aff8a 336
b5b8b0ac
AO
337 if (!n && processing_debug_stmt)
338 {
339 processing_debug_stmt = -1;
340 return decl;
341 }
342
e21aff8a
SB
343 /* If we didn't already have an equivalent for this declaration,
344 create one now. */
d4e4baa9
AO
345 if (!n)
346 {
d4e4baa9 347 /* Make a copy of the variable or label. */
1b369fae 348 tree t = id->copy_decl (decl, id);
b8698a0f 349
596b98ce
AO
350 /* Remember it, so that if we encounter this local entity again
351 we can reuse this copy. Do this early because remap_type may
352 need this decl for TYPE_STUB_DECL. */
353 insert_decl_map (id, decl, t);
354
1b369fae
RH
355 if (!DECL_P (t))
356 return t;
357
3c2a7a6a
RH
358 /* Remap types, if necessary. */
359 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
360 if (TREE_CODE (t) == TYPE_DECL)
361 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
3c2a7a6a
RH
362
363 /* Remap sizes as necessary. */
726a989a
RB
364 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
365 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
d4e4baa9 366
8c27b7d4 367 /* If fields, do likewise for offset and qualifier. */
5377d5ba
RK
368 if (TREE_CODE (t) == FIELD_DECL)
369 {
726a989a 370 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
5377d5ba 371 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
726a989a 372 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
5377d5ba
RK
373 }
374
5e20bdd7 375 return t;
d4e4baa9
AO
376 }
377
f82a627c
EB
378 if (id->do_not_unshare)
379 return *n;
380 else
381 return unshare_expr (*n);
d4e4baa9
AO
382}
383
3c2a7a6a 384static tree
1b369fae 385remap_type_1 (tree type, copy_body_data *id)
3c2a7a6a 386{
82d6e6fc 387 tree new_tree, t;
3c2a7a6a 388
ed397c43
RK
389 /* We do need a copy. build and register it now. If this is a pointer or
390 reference type, remap the designated type and make a new pointer or
391 reference type. */
392 if (TREE_CODE (type) == POINTER_TYPE)
393 {
82d6e6fc 394 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
ed397c43
RK
395 TYPE_MODE (type),
396 TYPE_REF_CAN_ALIAS_ALL (type));
040c6d51
MM
397 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
398 new_tree = build_type_attribute_qual_variant (new_tree,
399 TYPE_ATTRIBUTES (type),
400 TYPE_QUALS (type));
82d6e6fc
KG
401 insert_decl_map (id, type, new_tree);
402 return new_tree;
ed397c43
RK
403 }
404 else if (TREE_CODE (type) == REFERENCE_TYPE)
405 {
82d6e6fc 406 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
ed397c43
RK
407 TYPE_MODE (type),
408 TYPE_REF_CAN_ALIAS_ALL (type));
040c6d51
MM
409 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
410 new_tree = build_type_attribute_qual_variant (new_tree,
411 TYPE_ATTRIBUTES (type),
412 TYPE_QUALS (type));
82d6e6fc
KG
413 insert_decl_map (id, type, new_tree);
414 return new_tree;
ed397c43
RK
415 }
416 else
82d6e6fc 417 new_tree = copy_node (type);
ed397c43 418
82d6e6fc 419 insert_decl_map (id, type, new_tree);
3c2a7a6a
RH
420
421 /* This is a new type, not a copy of an old type. Need to reassociate
422 variants. We can handle everything except the main variant lazily. */
423 t = TYPE_MAIN_VARIANT (type);
424 if (type != t)
425 {
426 t = remap_type (t, id);
82d6e6fc
KG
427 TYPE_MAIN_VARIANT (new_tree) = t;
428 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
429 TYPE_NEXT_VARIANT (t) = new_tree;
3c2a7a6a
RH
430 }
431 else
432 {
82d6e6fc
KG
433 TYPE_MAIN_VARIANT (new_tree) = new_tree;
434 TYPE_NEXT_VARIANT (new_tree) = NULL;
3c2a7a6a
RH
435 }
436
596b98ce 437 if (TYPE_STUB_DECL (type))
82d6e6fc 438 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
596b98ce 439
3c2a7a6a 440 /* Lazily create pointer and reference types. */
82d6e6fc
KG
441 TYPE_POINTER_TO (new_tree) = NULL;
442 TYPE_REFERENCE_TO (new_tree) = NULL;
3c2a7a6a 443
8910466a
JH
444 /* Copy all types that may contain references to local variables; be sure to
445 preserve sharing in between type and its main variant when possible. */
82d6e6fc 446 switch (TREE_CODE (new_tree))
3c2a7a6a
RH
447 {
448 case INTEGER_TYPE:
449 case REAL_TYPE:
325217ed 450 case FIXED_POINT_TYPE:
3c2a7a6a
RH
451 case ENUMERAL_TYPE:
452 case BOOLEAN_TYPE:
8910466a
JH
453 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
454 {
455 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
456 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
1c9766da 457
8910466a
JH
458 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
459 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
460 }
461 else
462 {
463 t = TYPE_MIN_VALUE (new_tree);
464 if (t && TREE_CODE (t) != INTEGER_CST)
465 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
466
467 t = TYPE_MAX_VALUE (new_tree);
468 if (t && TREE_CODE (t) != INTEGER_CST)
469 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
470 }
82d6e6fc 471 return new_tree;
9f63daea 472
3c2a7a6a 473 case FUNCTION_TYPE:
8910466a
JH
474 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
475 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
476 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
477 else
478 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
479 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
480 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
481 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
482 else
483 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
82d6e6fc 484 return new_tree;
3c2a7a6a
RH
485
486 case ARRAY_TYPE:
8910466a
JH
487 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
488 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
489 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
daaf6209
JH
490 else
491 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
8910466a
JH
492
493 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
494 {
495 gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
496 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
497 }
498 else
499 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
3c2a7a6a
RH
500 break;
501
502 case RECORD_TYPE:
503 case UNION_TYPE:
504 case QUAL_UNION_TYPE:
8910466a
JH
505 if (TYPE_MAIN_VARIANT (type) != type
506 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
507 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
508 else
509 {
510 tree f, nf = NULL;
511
512 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
513 {
514 t = remap_decl (f, id);
515 DECL_CONTEXT (t) = new_tree;
516 DECL_CHAIN (t) = nf;
517 nf = t;
518 }
519 TYPE_FIELDS (new_tree) = nreverse (nf);
520 }
3c2a7a6a
RH
521 break;
522
3c2a7a6a
RH
523 case OFFSET_TYPE:
524 default:
525 /* Shouldn't have been thought variable sized. */
1e128c5f 526 gcc_unreachable ();
3c2a7a6a
RH
527 }
528
8910466a
JH
529 /* All variants of type share the same size, so use the already remaped data. */
530 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
531 {
532 gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
533 gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
534
535 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
536 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
537 }
538 else
539 {
540 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
541 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
542 }
3c2a7a6a 543
82d6e6fc 544 return new_tree;
3c2a7a6a
RH
545}
546
1b369fae
RH
547tree
548remap_type (tree type, copy_body_data *id)
52dd234b 549{
6be42dd4 550 tree *node;
4f5c64b8 551 tree tmp;
52dd234b
RH
552
553 if (type == NULL)
554 return type;
555
556 /* See if we have remapped this type. */
b787e7a2 557 node = id->decl_map->get (type);
52dd234b 558 if (node)
6be42dd4 559 return *node;
52dd234b
RH
560
561 /* The type only needs remapping if it's variably modified. */
1b369fae 562 if (! variably_modified_type_p (type, id->src_fn))
52dd234b
RH
563 {
564 insert_decl_map (id, type, type);
565 return type;
566 }
567
4f5c64b8
RG
568 id->remapping_type_depth++;
569 tmp = remap_type_1 (type, id);
570 id->remapping_type_depth--;
571
572 return tmp;
52dd234b
RH
573}
574
526d73ab 575/* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
b8698a0f 576
526d73ab
JH
577static bool
578can_be_nonlocal (tree decl, copy_body_data *id)
579{
580 /* We can not duplicate function decls. */
581 if (TREE_CODE (decl) == FUNCTION_DECL)
582 return true;
583
584 /* Local static vars must be non-local or we get multiple declaration
585 problems. */
586 if (TREE_CODE (decl) == VAR_DECL
587 && !auto_var_in_fn_p (decl, id->src_fn))
588 return true;
589
5f564b8f 590 return false;
526d73ab
JH
591}
592
6de9cd9a 593static tree
8318d4ce 594remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
9771b263 595 copy_body_data *id)
d4e4baa9 596{
6de9cd9a
DN
597 tree old_var;
598 tree new_decls = NULL_TREE;
d4e4baa9 599
6de9cd9a 600 /* Remap its variables. */
910ad8de 601 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
d4e4baa9 602 {
6de9cd9a
DN
603 tree new_var;
604
526d73ab 605 if (can_be_nonlocal (old_var, id))
30be951a 606 {
5f564b8f
MM
607 /* We need to add this variable to the local decls as otherwise
608 nothing else will do so. */
526d73ab 609 if (TREE_CODE (old_var) == VAR_DECL
5f564b8f 610 && ! DECL_EXTERNAL (old_var))
c021f10b 611 add_local_decl (cfun, old_var);
9e6aced0 612 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
526d73ab
JH
613 && !DECL_IGNORED_P (old_var)
614 && nonlocalized_list)
8318d4ce 615 vec_safe_push (*nonlocalized_list, old_var);
30be951a
JH
616 continue;
617 }
618
6de9cd9a
DN
619 /* Remap the variable. */
620 new_var = remap_decl (old_var, id);
621
726a989a 622 /* If we didn't remap this variable, we can't mess with its
6de9cd9a
DN
623 TREE_CHAIN. If we remapped this variable to the return slot, it's
624 already declared somewhere else, so don't declare it here. */
b8698a0f 625
526d73ab 626 if (new_var == id->retvar)
6de9cd9a 627 ;
526d73ab
JH
628 else if (!new_var)
629 {
9e6aced0 630 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
526d73ab
JH
631 && !DECL_IGNORED_P (old_var)
632 && nonlocalized_list)
8318d4ce 633 vec_safe_push (*nonlocalized_list, old_var);
526d73ab 634 }
d4e4baa9
AO
635 else
636 {
1e128c5f 637 gcc_assert (DECL_P (new_var));
910ad8de 638 DECL_CHAIN (new_var) = new_decls;
6de9cd9a 639 new_decls = new_var;
60a5d78a
JJ
640
641 /* Also copy value-expressions. */
642 if (TREE_CODE (new_var) == VAR_DECL
643 && DECL_HAS_VALUE_EXPR_P (new_var))
644 {
645 tree tem = DECL_VALUE_EXPR (new_var);
646 bool old_regimplify = id->regimplify;
647 id->remapping_type_depth++;
648 walk_tree (&tem, copy_tree_body_r, id, NULL);
649 id->remapping_type_depth--;
650 id->regimplify = old_regimplify;
651 SET_DECL_VALUE_EXPR (new_var, tem);
652 }
d4e4baa9 653 }
d4e4baa9 654 }
d4e4baa9 655
6de9cd9a
DN
656 return nreverse (new_decls);
657}
658
659/* Copy the BLOCK to contain remapped versions of the variables
660 therein. And hook the new block into the block-tree. */
661
662static void
1b369fae 663remap_block (tree *block, copy_body_data *id)
6de9cd9a 664{
d436bff8
AH
665 tree old_block;
666 tree new_block;
d436bff8
AH
667
668 /* Make the new block. */
669 old_block = *block;
670 new_block = make_node (BLOCK);
671 TREE_USED (new_block) = TREE_USED (old_block);
672 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
3e2844cb 673 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
526d73ab 674 BLOCK_NONLOCALIZED_VARS (new_block)
9771b263 675 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
d436bff8
AH
676 *block = new_block;
677
678 /* Remap its variables. */
526d73ab 679 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
8318d4ce 680 &BLOCK_NONLOCALIZED_VARS (new_block),
526d73ab 681 id);
d436bff8 682
1b369fae 683 if (id->transform_lang_insert_block)
9ff420f1 684 id->transform_lang_insert_block (new_block);
1b369fae 685
d436bff8 686 /* Remember the remapped block. */
6de9cd9a 687 insert_decl_map (id, old_block, new_block);
d4e4baa9
AO
688}
689
acb8f212
JH
690/* Copy the whole block tree and root it in id->block. */
691static tree
1b369fae 692remap_blocks (tree block, copy_body_data *id)
acb8f212
JH
693{
694 tree t;
82d6e6fc 695 tree new_tree = block;
acb8f212
JH
696
697 if (!block)
698 return NULL;
699
82d6e6fc
KG
700 remap_block (&new_tree, id);
701 gcc_assert (new_tree != block);
acb8f212 702 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
4a283090
JH
703 prepend_lexical_block (new_tree, remap_blocks (t, id));
704 /* Blocks are in arbitrary order, but make things slightly prettier and do
705 not swap order when producing a copy. */
706 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
82d6e6fc 707 return new_tree;
acb8f212
JH
708}
709
3e492e9c
RB
710/* Remap the block tree rooted at BLOCK to nothing. */
711static void
712remap_blocks_to_null (tree block, copy_body_data *id)
713{
714 tree t;
715 insert_decl_map (id, block, NULL_TREE);
716 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
717 remap_blocks_to_null (t, id);
718}
719
d4e4baa9 720static void
6de9cd9a 721copy_statement_list (tree *tp)
d4e4baa9 722{
6de9cd9a 723 tree_stmt_iterator oi, ni;
82d6e6fc 724 tree new_tree;
6de9cd9a 725
82d6e6fc
KG
726 new_tree = alloc_stmt_list ();
727 ni = tsi_start (new_tree);
6de9cd9a 728 oi = tsi_start (*tp);
b1d82db0 729 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
82d6e6fc 730 *tp = new_tree;
6de9cd9a
DN
731
732 for (; !tsi_end_p (oi); tsi_next (&oi))
a406865a
RG
733 {
734 tree stmt = tsi_stmt (oi);
62e36382
JM
735 if (TREE_CODE (stmt) == STATEMENT_LIST)
736 /* This copy is not redundant; tsi_link_after will smash this
737 STATEMENT_LIST into the end of the one we're building, and we
738 don't want to do that with the original. */
739 copy_statement_list (&stmt);
a406865a
RG
740 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
741 }
6de9cd9a 742}
d4e4baa9 743
6de9cd9a 744static void
1b369fae 745copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
6de9cd9a
DN
746{
747 tree block = BIND_EXPR_BLOCK (*tp);
d4e4baa9
AO
748 /* Copy (and replace) the statement. */
749 copy_tree_r (tp, walk_subtrees, NULL);
6de9cd9a
DN
750 if (block)
751 {
752 remap_block (&block, id);
753 BIND_EXPR_BLOCK (*tp) = block;
754 }
d4e4baa9 755
6de9cd9a 756 if (BIND_EXPR_VARS (*tp))
60a5d78a
JJ
757 /* This will remap a lot of the same decls again, but this should be
758 harmless. */
759 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
d4e4baa9
AO
760}
761
726a989a
RB
762
763/* Create a new gimple_seq by remapping all the statements in BODY
764 using the inlining information in ID. */
765
b34fd25c 766static gimple_seq
726a989a
RB
767remap_gimple_seq (gimple_seq body, copy_body_data *id)
768{
769 gimple_stmt_iterator si;
770 gimple_seq new_body = NULL;
771
772 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
773 {
d5e254e1
IE
774 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
775 gimple_seq_add_seq (&new_body, new_stmts);
726a989a
RB
776 }
777
778 return new_body;
779}
780
781
782/* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
783 block using the mapping information in ID. */
784
355fe088 785static gimple *
538dd0b7 786copy_gimple_bind (gbind *stmt, copy_body_data *id)
726a989a 787{
355fe088 788 gimple *new_bind;
726a989a
RB
789 tree new_block, new_vars;
790 gimple_seq body, new_body;
791
792 /* Copy the statement. Note that we purposely don't use copy_stmt
793 here because we need to remap statements as we copy. */
794 body = gimple_bind_body (stmt);
795 new_body = remap_gimple_seq (body, id);
796
797 new_block = gimple_bind_block (stmt);
798 if (new_block)
799 remap_block (&new_block, id);
800
801 /* This will remap a lot of the same decls again, but this should be
802 harmless. */
803 new_vars = gimple_bind_vars (stmt);
804 if (new_vars)
526d73ab 805 new_vars = remap_decls (new_vars, NULL, id);
726a989a
RB
806
807 new_bind = gimple_build_bind (new_vars, new_body, new_block);
808
809 return new_bind;
810}
811
78bbd765
EB
812/* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
813
814static bool
815is_parm (tree decl)
816{
817 if (TREE_CODE (decl) == SSA_NAME)
818 {
819 decl = SSA_NAME_VAR (decl);
820 if (!decl)
821 return false;
822 }
823
824 return (TREE_CODE (decl) == PARM_DECL);
825}
726a989a 826
f3dccf50
RB
827/* Remap the dependence CLIQUE from the source to the destination function
828 as specified in ID. */
829
830static unsigned short
831remap_dependence_clique (copy_body_data *id, unsigned short clique)
832{
833 if (clique == 0)
834 return 0;
835 if (!id->dependence_map)
fb5c464a 836 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
f3dccf50
RB
837 bool existed;
838 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
839 if (!existed)
840 newc = ++cfun->last_clique;
841 return newc;
842}
843
726a989a
RB
844/* Remap the GIMPLE operand pointed to by *TP. DATA is really a
845 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
846 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
847 recursing into the children nodes of *TP. */
848
849static tree
850remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
851{
852 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
853 copy_body_data *id = (copy_body_data *) wi_p->info;
854 tree fn = id->src_fn;
855
856 if (TREE_CODE (*tp) == SSA_NAME)
857 {
858 *tp = remap_ssa_name (*tp, id);
859 *walk_subtrees = 0;
860 return NULL;
861 }
862 else if (auto_var_in_fn_p (*tp, fn))
863 {
864 /* Local variables and labels need to be replaced by equivalent
865 variables. We don't want to copy static variables; there's
866 only one of those, no matter how many times we inline the
867 containing function. Similarly for globals from an outer
868 function. */
869 tree new_decl;
870
871 /* Remap the declaration. */
872 new_decl = remap_decl (*tp, id);
873 gcc_assert (new_decl);
874 /* Replace this variable with the copy. */
875 STRIP_TYPE_NOPS (new_decl);
211ca15c
RG
876 /* ??? The C++ frontend uses void * pointer zero to initialize
877 any other type. This confuses the middle-end type verification.
878 As cloned bodies do not go through gimplification again the fixup
879 there doesn't trigger. */
880 if (TREE_CODE (new_decl) == INTEGER_CST
881 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
882 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
726a989a
RB
883 *tp = new_decl;
884 *walk_subtrees = 0;
885 }
886 else if (TREE_CODE (*tp) == STATEMENT_LIST)
887 gcc_unreachable ();
888 else if (TREE_CODE (*tp) == SAVE_EXPR)
889 gcc_unreachable ();
890 else if (TREE_CODE (*tp) == LABEL_DECL
891 && (!DECL_CONTEXT (*tp)
892 || decl_function_context (*tp) == id->src_fn))
893 /* These may need to be remapped for EH handling. */
894 *tp = remap_decl (*tp, id);
37c59e69
EB
895 else if (TREE_CODE (*tp) == FIELD_DECL)
896 {
897 /* If the enclosing record type is variably_modified_type_p, the field
898 has already been remapped. Otherwise, it need not be. */
b787e7a2 899 tree *n = id->decl_map->get (*tp);
37c59e69
EB
900 if (n)
901 *tp = *n;
902 *walk_subtrees = 0;
903 }
726a989a
RB
904 else if (TYPE_P (*tp))
905 /* Types may need remapping as well. */
906 *tp = remap_type (*tp, id);
907 else if (CONSTANT_CLASS_P (*tp))
908 {
909 /* If this is a constant, we have to copy the node iff the type
910 will be remapped. copy_tree_r will not copy a constant. */
911 tree new_type = remap_type (TREE_TYPE (*tp), id);
912
913 if (new_type == TREE_TYPE (*tp))
914 *walk_subtrees = 0;
915
916 else if (TREE_CODE (*tp) == INTEGER_CST)
807e902e 917 *tp = wide_int_to_tree (new_type, *tp);
726a989a
RB
918 else
919 {
920 *tp = copy_node (*tp);
921 TREE_TYPE (*tp) = new_type;
922 }
923 }
924 else
925 {
926 /* Otherwise, just copy the node. Note that copy_tree_r already
927 knows not to copy VAR_DECLs, etc., so this is safe. */
41a58a92 928
70f34814 929 if (TREE_CODE (*tp) == MEM_REF)
726a989a 930 {
70f34814 931 /* We need to re-canonicalize MEM_REFs from inline substitutions
93e452ed
RG
932 that can happen when a pointer argument is an ADDR_EXPR.
933 Recurse here manually to allow that. */
78bbd765
EB
934 tree ptr = TREE_OPERAND (*tp, 0);
935 tree type = remap_type (TREE_TYPE (*tp), id);
936 tree old = *tp;
93e452ed 937 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
78bbd765 938 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
0de204de 939 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
31a47f1a 940 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
93e452ed 941 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
f3dccf50
RB
942 if (MR_DEPENDENCE_CLIQUE (old) != 0)
943 {
944 MR_DEPENDENCE_CLIQUE (*tp)
945 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
946 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
947 }
78bbd765
EB
948 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
949 remapped a parameter as the property might be valid only
950 for the parameter itself. */
951 if (TREE_THIS_NOTRAP (old)
952 && (!is_parm (TREE_OPERAND (old, 0))
953 || (!id->transform_parameter && is_parm (ptr))))
954 TREE_THIS_NOTRAP (*tp) = 1;
ee45a32d 955 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
93e452ed
RG
956 *walk_subtrees = 0;
957 return NULL;
726a989a
RB
958 }
959
960 /* Here is the "usual case". Copy this tree node, and then
961 tweak some special cases. */
962 copy_tree_r (tp, walk_subtrees, NULL);
963
41a58a92
RG
964 if (TREE_CODE (*tp) != OMP_CLAUSE)
965 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
966
726a989a
RB
967 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
968 {
969 /* The copied TARGET_EXPR has never been expanded, even if the
970 original node was expanded already. */
971 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
972 TREE_OPERAND (*tp, 3) = NULL_TREE;
973 }
974 else if (TREE_CODE (*tp) == ADDR_EXPR)
975 {
976 /* Variable substitution need not be simple. In particular,
70f34814 977 the MEM_REF substitution above. Make sure that
5368224f 978 TREE_CONSTANT and friends are up-to-date. */
726a989a 979 int invariant = is_gimple_min_invariant (*tp);
f1071b12 980 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
70f34814 981 recompute_tree_invariant_for_addr_expr (*tp);
726a989a
RB
982
983 /* If this used to be invariant, but is not any longer,
984 then regimplification is probably needed. */
985 if (invariant && !is_gimple_min_invariant (*tp))
986 id->regimplify = true;
987
988 *walk_subtrees = 0;
989 }
990 }
991
5368224f
DC
992 /* Update the TREE_BLOCK for the cloned expr. */
993 if (EXPR_P (*tp))
994 {
995 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
996 tree old_block = TREE_BLOCK (*tp);
997 if (old_block)
998 {
999 tree *n;
b787e7a2 1000 n = id->decl_map->get (TREE_BLOCK (*tp));
5368224f
DC
1001 if (n)
1002 new_block = *n;
1003 }
1004 TREE_SET_BLOCK (*tp, new_block);
1005 }
1006
726a989a
RB
1007 /* Keep iterating. */
1008 return NULL_TREE;
1009}
1010
1011
1012/* Called from copy_body_id via walk_tree. DATA is really a
1b369fae 1013 `copy_body_data *'. */
aa4a53af 1014
1b369fae 1015tree
726a989a 1016copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
d4e4baa9 1017{
1b369fae
RH
1018 copy_body_data *id = (copy_body_data *) data;
1019 tree fn = id->src_fn;
acb8f212 1020 tree new_block;
d4e4baa9 1021
e21aff8a
SB
1022 /* Begin by recognizing trees that we'll completely rewrite for the
1023 inlining context. Our output for these trees is completely
1024 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1025 into an edge). Further down, we'll handle trees that get
1026 duplicated and/or tweaked. */
d4e4baa9 1027
1b369fae 1028 /* When requested, RETURN_EXPRs should be transformed to just the
726a989a 1029 contained MODIFY_EXPR. The branch semantics of the return will
1b369fae
RH
1030 be handled elsewhere by manipulating the CFG rather than a statement. */
1031 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
d4e4baa9 1032 {
e21aff8a 1033 tree assignment = TREE_OPERAND (*tp, 0);
d4e4baa9
AO
1034
1035 /* If we're returning something, just turn that into an
e21aff8a
SB
1036 assignment into the equivalent of the original RESULT_DECL.
1037 If the "assignment" is just the result decl, the result
1038 decl has already been set (e.g. a recent "foo (&result_decl,
1039 ...)"); just toss the entire RETURN_EXPR. */
726a989a 1040 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
e21aff8a
SB
1041 {
1042 /* Replace the RETURN_EXPR with (a copy of) the
726a989a 1043 MODIFY_EXPR hanging underneath. */
e21aff8a
SB
1044 *tp = copy_node (assignment);
1045 }
1046 else /* Else the RETURN_EXPR returns no value. */
1047 {
1048 *tp = NULL;
cceb1885 1049 return (tree) (void *)1;
e21aff8a 1050 }
d4e4baa9 1051 }
110cfe1c
JH
1052 else if (TREE_CODE (*tp) == SSA_NAME)
1053 {
1054 *tp = remap_ssa_name (*tp, id);
1055 *walk_subtrees = 0;
1056 return NULL;
1057 }
e21aff8a 1058
d4e4baa9
AO
1059 /* Local variables and labels need to be replaced by equivalent
1060 variables. We don't want to copy static variables; there's only
1061 one of those, no matter how many times we inline the containing
5377d5ba 1062 function. Similarly for globals from an outer function. */
50886bf1 1063 else if (auto_var_in_fn_p (*tp, fn))
d4e4baa9
AO
1064 {
1065 tree new_decl;
1066
1067 /* Remap the declaration. */
1068 new_decl = remap_decl (*tp, id);
1e128c5f 1069 gcc_assert (new_decl);
d4e4baa9
AO
1070 /* Replace this variable with the copy. */
1071 STRIP_TYPE_NOPS (new_decl);
1072 *tp = new_decl;
e4cf29ae 1073 *walk_subtrees = 0;
d4e4baa9 1074 }
6de9cd9a
DN
1075 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1076 copy_statement_list (tp);
a406865a
RG
1077 else if (TREE_CODE (*tp) == SAVE_EXPR
1078 || TREE_CODE (*tp) == TARGET_EXPR)
82c82743 1079 remap_save_expr (tp, id->decl_map, walk_subtrees);
17acc01a
JH
1080 else if (TREE_CODE (*tp) == LABEL_DECL
1081 && (! DECL_CONTEXT (*tp)
1b369fae 1082 || decl_function_context (*tp) == id->src_fn))
e21aff8a 1083 /* These may need to be remapped for EH handling. */
17acc01a 1084 *tp = remap_decl (*tp, id);
6de9cd9a
DN
1085 else if (TREE_CODE (*tp) == BIND_EXPR)
1086 copy_bind_expr (tp, walk_subtrees, id);
3c2a7a6a
RH
1087 /* Types may need remapping as well. */
1088 else if (TYPE_P (*tp))
1089 *tp = remap_type (*tp, id);
1090
bb04998a
RK
1091 /* If this is a constant, we have to copy the node iff the type will be
1092 remapped. copy_tree_r will not copy a constant. */
3cf11075 1093 else if (CONSTANT_CLASS_P (*tp))
bb04998a
RK
1094 {
1095 tree new_type = remap_type (TREE_TYPE (*tp), id);
1096
1097 if (new_type == TREE_TYPE (*tp))
1098 *walk_subtrees = 0;
1099
1100 else if (TREE_CODE (*tp) == INTEGER_CST)
807e902e 1101 *tp = wide_int_to_tree (new_type, *tp);
bb04998a
RK
1102 else
1103 {
1104 *tp = copy_node (*tp);
1105 TREE_TYPE (*tp) = new_type;
1106 }
1107 }
1108
d4e4baa9
AO
1109 /* Otherwise, just copy the node. Note that copy_tree_r already
1110 knows not to copy VAR_DECLs, etc., so this is safe. */
1111 else
1112 {
e21aff8a
SB
1113 /* Here we handle trees that are not completely rewritten.
1114 First we detect some inlining-induced bogosities for
1115 discarding. */
726a989a
RB
1116 if (TREE_CODE (*tp) == MODIFY_EXPR
1117 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1118 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
d4e4baa9
AO
1119 {
1120 /* Some assignments VAR = VAR; don't generate any rtl code
1121 and thus don't count as variable modification. Avoid
1122 keeping bogosities like 0 = 0. */
726a989a 1123 tree decl = TREE_OPERAND (*tp, 0), value;
6be42dd4 1124 tree *n;
d4e4baa9 1125
b787e7a2 1126 n = id->decl_map->get (decl);
d4e4baa9
AO
1127 if (n)
1128 {
6be42dd4 1129 value = *n;
d4e4baa9 1130 STRIP_TYPE_NOPS (value);
becfd6e5 1131 if (TREE_CONSTANT (value) || TREE_READONLY (value))
68594ce7 1132 {
c2255bc4 1133 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
726a989a 1134 return copy_tree_body_r (tp, walk_subtrees, data);
68594ce7 1135 }
d4e4baa9
AO
1136 }
1137 }
1b369fae 1138 else if (TREE_CODE (*tp) == INDIRECT_REF)
6de9cd9a
DN
1139 {
1140 /* Get rid of *& from inline substitutions that can happen when a
1141 pointer argument is an ADDR_EXPR. */
81cfbbc2 1142 tree decl = TREE_OPERAND (*tp, 0);
b787e7a2 1143 tree *n = id->decl_map->get (decl);
6de9cd9a
DN
1144 if (n)
1145 {
30d2e943
RG
1146 /* If we happen to get an ADDR_EXPR in n->value, strip
1147 it manually here as we'll eventually get ADDR_EXPRs
1148 which lie about their types pointed to. In this case
1149 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
095ecc24
RG
1150 but we absolutely rely on that. As fold_indirect_ref
1151 does other useful transformations, try that first, though. */
78bbd765
EB
1152 tree type = TREE_TYPE (*tp);
1153 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1154 tree old = *tp;
1155 *tp = gimple_fold_indirect_ref (ptr);
095ecc24
RG
1156 if (! *tp)
1157 {
78bbd765 1158 if (TREE_CODE (ptr) == ADDR_EXPR)
de4af523 1159 {
78bbd765
EB
1160 *tp
1161 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
de4af523
JJ
1162 /* ??? We should either assert here or build
1163 a VIEW_CONVERT_EXPR instead of blindly leaking
1164 incompatible types to our IL. */
1165 if (! *tp)
78bbd765 1166 *tp = TREE_OPERAND (ptr, 0);
de4af523 1167 }
095ecc24 1168 else
d84b37b0 1169 {
78bbd765 1170 *tp = build1 (INDIRECT_REF, type, ptr);
d84b37b0 1171 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
955f6531 1172 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
a61c3633 1173 TREE_READONLY (*tp) = TREE_READONLY (old);
78bbd765
EB
1174 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1175 have remapped a parameter as the property might be
1176 valid only for the parameter itself. */
1177 if (TREE_THIS_NOTRAP (old)
1178 && (!is_parm (TREE_OPERAND (old, 0))
1179 || (!id->transform_parameter && is_parm (ptr))))
1180 TREE_THIS_NOTRAP (*tp) = 1;
d84b37b0 1181 }
095ecc24 1182 }
81cfbbc2
JH
1183 *walk_subtrees = 0;
1184 return NULL;
68594ce7
JM
1185 }
1186 }
70f34814
RG
1187 else if (TREE_CODE (*tp) == MEM_REF)
1188 {
54714c68
RB
1189 /* We need to re-canonicalize MEM_REFs from inline substitutions
1190 that can happen when a pointer argument is an ADDR_EXPR.
1191 Recurse here manually to allow that. */
78bbd765
EB
1192 tree ptr = TREE_OPERAND (*tp, 0);
1193 tree type = remap_type (TREE_TYPE (*tp), id);
1194 tree old = *tp;
54714c68 1195 walk_tree (&ptr, copy_tree_body_r, data, NULL);
78bbd765 1196 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
54714c68
RB
1197 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1198 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1199 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
f3dccf50
RB
1200 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1201 {
1202 MR_DEPENDENCE_CLIQUE (*tp)
1203 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1204 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1205 }
78bbd765
EB
1206 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1207 remapped a parameter as the property might be valid only
1208 for the parameter itself. */
1209 if (TREE_THIS_NOTRAP (old)
1210 && (!is_parm (TREE_OPERAND (old, 0))
1211 || (!id->transform_parameter && is_parm (ptr))))
1212 TREE_THIS_NOTRAP (*tp) = 1;
ee45a32d 1213 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
54714c68
RB
1214 *walk_subtrees = 0;
1215 return NULL;
70f34814 1216 }
68594ce7 1217
e21aff8a
SB
1218 /* Here is the "usual case". Copy this tree node, and then
1219 tweak some special cases. */
1b369fae 1220 copy_tree_r (tp, walk_subtrees, NULL);
110cfe1c 1221
acb8f212
JH
1222 /* If EXPR has block defined, map it to newly constructed block.
1223 When inlining we want EXPRs without block appear in the block
ee0192a2 1224 of function call if we are not remapping a type. */
726a989a 1225 if (EXPR_P (*tp))
acb8f212 1226 {
ee0192a2 1227 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
acb8f212
JH
1228 if (TREE_BLOCK (*tp))
1229 {
6be42dd4 1230 tree *n;
b787e7a2 1231 n = id->decl_map->get (TREE_BLOCK (*tp));
60a5d78a
JJ
1232 if (n)
1233 new_block = *n;
acb8f212 1234 }
5368224f 1235 TREE_SET_BLOCK (*tp, new_block);
acb8f212 1236 }
68594ce7 1237
726a989a 1238 if (TREE_CODE (*tp) != OMP_CLAUSE)
07beea0d 1239 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
3c2a7a6a 1240
68594ce7
JM
1241 /* The copied TARGET_EXPR has never been expanded, even if the
1242 original node was expanded already. */
1243 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1244 {
1245 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1246 TREE_OPERAND (*tp, 3) = NULL_TREE;
1247 }
84cce55d
RH
1248
1249 /* Variable substitution need not be simple. In particular, the
1250 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1251 and friends are up-to-date. */
1252 else if (TREE_CODE (*tp) == ADDR_EXPR)
1253 {
ad6003f2 1254 int invariant = is_gimple_min_invariant (*tp);
726a989a
RB
1255 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1256
8e85fd14
RG
1257 /* Handle the case where we substituted an INDIRECT_REF
1258 into the operand of the ADDR_EXPR. */
1259 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1260 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1261 else
1262 recompute_tree_invariant_for_addr_expr (*tp);
726a989a 1263
416c991f
JJ
1264 /* If this used to be invariant, but is not any longer,
1265 then regimplification is probably needed. */
ad6003f2 1266 if (invariant && !is_gimple_min_invariant (*tp))
416c991f 1267 id->regimplify = true;
726a989a 1268
84cce55d
RH
1269 *walk_subtrees = 0;
1270 }
d4e4baa9
AO
1271 }
1272
1273 /* Keep iterating. */
1274 return NULL_TREE;
1275}
1276
1d65f45c
RH
1277/* Helper for remap_gimple_stmt. Given an EH region number for the
1278 source function, map that to the duplicate EH region number in
1279 the destination function. */
1280
1281static int
1282remap_eh_region_nr (int old_nr, copy_body_data *id)
1283{
1284 eh_region old_r, new_r;
1d65f45c
RH
1285
1286 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
b787e7a2 1287 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1d65f45c
RH
1288
1289 return new_r->index;
1290}
1291
1292/* Similar, but operate on INTEGER_CSTs. */
1293
1294static tree
1295remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1296{
1297 int old_nr, new_nr;
1298
9439e9a1 1299 old_nr = tree_to_shwi (old_t_nr);
1d65f45c
RH
1300 new_nr = remap_eh_region_nr (old_nr, id);
1301
9f616812 1302 return build_int_cst (integer_type_node, new_nr);
1d65f45c 1303}
726a989a
RB
1304
1305/* Helper for copy_bb. Remap statement STMT using the inlining
1306 information in ID. Return the new statement copy. */
1307
d5e254e1 1308static gimple_seq
355fe088 1309remap_gimple_stmt (gimple *stmt, copy_body_data *id)
726a989a 1310{
355fe088 1311 gimple *copy = NULL;
726a989a 1312 struct walk_stmt_info wi;
5a6e26b7 1313 bool skip_first = false;
d5e254e1 1314 gimple_seq stmts = NULL;
726a989a 1315
683750ce
RB
1316 if (is_gimple_debug (stmt)
1317 && !opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
1318 return stmts;
1319
726a989a
RB
1320 /* Begin by recognizing trees that we'll completely rewrite for the
1321 inlining context. Our output for these trees is completely
1322 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1323 into an edge). Further down, we'll handle trees that get
1324 duplicated and/or tweaked. */
1325
1326 /* When requested, GIMPLE_RETURNs should be transformed to just the
1327 contained GIMPLE_ASSIGN. The branch semantics of the return will
1328 be handled elsewhere by manipulating the CFG rather than the
1329 statement. */
1330 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1331 {
538dd0b7 1332 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
d5e254e1
IE
1333 tree retbnd = gimple_return_retbnd (stmt);
1334 tree bndslot = id->retbnd;
1335
1336 if (retbnd && bndslot)
1337 {
355fe088 1338 gimple *bndcopy = gimple_build_assign (bndslot, retbnd);
d5e254e1
IE
1339 memset (&wi, 0, sizeof (wi));
1340 wi.info = id;
1341 walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1342 gimple_seq_add_stmt (&stmts, bndcopy);
1343 }
726a989a
RB
1344
1345 /* If we're returning something, just turn that into an
1346 assignment into the equivalent of the original RESULT_DECL.
1347 If RETVAL is just the result decl, the result decl has
1348 already been set (e.g. a recent "foo (&result_decl, ...)");
1349 just toss the entire GIMPLE_RETURN. */
6938f93f
JH
1350 if (retval
1351 && (TREE_CODE (retval) != RESULT_DECL
1352 && (TREE_CODE (retval) != SSA_NAME
70b5e7dc 1353 || ! SSA_NAME_VAR (retval)
6938f93f 1354 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
5a6e26b7 1355 {
f8cb36a9
JJ
1356 copy = gimple_build_assign (id->do_not_unshare
1357 ? id->retvar : unshare_expr (id->retvar),
1358 retval);
5a6e26b7
JH
1359 /* id->retvar is already substituted. Skip it on later remapping. */
1360 skip_first = true;
d5e254e1
IE
1361
1362 /* We need to copy bounds if return structure with pointers into
1363 instrumented function. */
1364 if (chkp_function_instrumented_p (id->dst_fn)
1365 && !bndslot
1366 && !BOUNDED_P (id->retvar)
1367 && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1368 id->assign_stmts.safe_push (copy);
1369
5a6e26b7 1370 }
726a989a 1371 else
d5e254e1 1372 return stmts;
726a989a
RB
1373 }
1374 else if (gimple_has_substatements (stmt))
1375 {
1376 gimple_seq s1, s2;
1377
1378 /* When cloning bodies from the C++ front end, we will be handed bodies
1379 in High GIMPLE form. Handle here all the High GIMPLE statements that
1380 have embedded statements. */
1381 switch (gimple_code (stmt))
1382 {
1383 case GIMPLE_BIND:
538dd0b7 1384 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
726a989a
RB
1385 break;
1386
1387 case GIMPLE_CATCH:
538dd0b7
DM
1388 {
1389 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1390 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1391 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1392 }
726a989a
RB
1393 break;
1394
1395 case GIMPLE_EH_FILTER:
1396 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1397 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1398 break;
1399
1400 case GIMPLE_TRY:
1401 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1402 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
b8698a0f 1403 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
726a989a
RB
1404 break;
1405
1406 case GIMPLE_WITH_CLEANUP_EXPR:
1407 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1408 copy = gimple_build_wce (s1);
1409 break;
1410
1411 case GIMPLE_OMP_PARALLEL:
538dd0b7
DM
1412 {
1413 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1414 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1415 copy = gimple_build_omp_parallel
1416 (s1,
1417 gimple_omp_parallel_clauses (omp_par_stmt),
1418 gimple_omp_parallel_child_fn (omp_par_stmt),
1419 gimple_omp_parallel_data_arg (omp_par_stmt));
1420 }
726a989a
RB
1421 break;
1422
1423 case GIMPLE_OMP_TASK:
1424 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1425 copy = gimple_build_omp_task
1426 (s1,
1427 gimple_omp_task_clauses (stmt),
1428 gimple_omp_task_child_fn (stmt),
1429 gimple_omp_task_data_arg (stmt),
1430 gimple_omp_task_copy_fn (stmt),
1431 gimple_omp_task_arg_size (stmt),
1432 gimple_omp_task_arg_align (stmt));
1433 break;
1434
1435 case GIMPLE_OMP_FOR:
1436 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1437 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
74bf76ed
JJ
1438 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1439 gimple_omp_for_clauses (stmt),
726a989a
RB
1440 gimple_omp_for_collapse (stmt), s2);
1441 {
1442 size_t i;
1443 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1444 {
1445 gimple_omp_for_set_index (copy, i,
1446 gimple_omp_for_index (stmt, i));
1447 gimple_omp_for_set_initial (copy, i,
1448 gimple_omp_for_initial (stmt, i));
1449 gimple_omp_for_set_final (copy, i,
1450 gimple_omp_for_final (stmt, i));
1451 gimple_omp_for_set_incr (copy, i,
1452 gimple_omp_for_incr (stmt, i));
1453 gimple_omp_for_set_cond (copy, i,
1454 gimple_omp_for_cond (stmt, i));
1455 }
1456 }
1457 break;
1458
1459 case GIMPLE_OMP_MASTER:
1460 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1461 copy = gimple_build_omp_master (s1);
1462 break;
1463
acf0174b
JJ
1464 case GIMPLE_OMP_TASKGROUP:
1465 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1466 copy = gimple_build_omp_taskgroup (s1);
1467 break;
1468
726a989a
RB
1469 case GIMPLE_OMP_ORDERED:
1470 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
d9a6bd32
JJ
1471 copy = gimple_build_omp_ordered
1472 (s1,
1473 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
726a989a
RB
1474 break;
1475
1476 case GIMPLE_OMP_SECTION:
1477 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1478 copy = gimple_build_omp_section (s1);
1479 break;
1480
1481 case GIMPLE_OMP_SECTIONS:
1482 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1483 copy = gimple_build_omp_sections
1484 (s1, gimple_omp_sections_clauses (stmt));
1485 break;
1486
1487 case GIMPLE_OMP_SINGLE:
1488 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1489 copy = gimple_build_omp_single
1490 (s1, gimple_omp_single_clauses (stmt));
1491 break;
1492
acf0174b
JJ
1493 case GIMPLE_OMP_TARGET:
1494 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1495 copy = gimple_build_omp_target
1496 (s1, gimple_omp_target_kind (stmt),
1497 gimple_omp_target_clauses (stmt));
1498 break;
1499
1500 case GIMPLE_OMP_TEAMS:
1501 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1502 copy = gimple_build_omp_teams
1503 (s1, gimple_omp_teams_clauses (stmt));
1504 break;
1505
05a26161
JJ
1506 case GIMPLE_OMP_CRITICAL:
1507 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
538dd0b7 1508 copy = gimple_build_omp_critical (s1,
d9a6bd32
JJ
1509 gimple_omp_critical_name
1510 (as_a <gomp_critical *> (stmt)),
1511 gimple_omp_critical_clauses
1512 (as_a <gomp_critical *> (stmt)));
05a26161
JJ
1513 break;
1514
0a35513e 1515 case GIMPLE_TRANSACTION:
538dd0b7
DM
1516 {
1517 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1518 gtransaction *new_trans_stmt;
1519 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1520 id);
7c11b0fe
RH
1521 copy = new_trans_stmt = gimple_build_transaction (s1);
1522 gimple_transaction_set_subcode (new_trans_stmt,
538dd0b7 1523 gimple_transaction_subcode (old_trans_stmt));
7c11b0fe
RH
1524 gimple_transaction_set_label_norm (new_trans_stmt,
1525 gimple_transaction_label_norm (old_trans_stmt));
1526 gimple_transaction_set_label_uninst (new_trans_stmt,
1527 gimple_transaction_label_uninst (old_trans_stmt));
1528 gimple_transaction_set_label_over (new_trans_stmt,
1529 gimple_transaction_label_over (old_trans_stmt));
538dd0b7 1530 }
0a35513e
AH
1531 break;
1532
726a989a
RB
1533 default:
1534 gcc_unreachable ();
1535 }
1536 }
1537 else
1538 {
1539 if (gimple_assign_copy_p (stmt)
1540 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1541 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1542 {
1543 /* Here we handle statements that are not completely rewritten.
1544 First we detect some inlining-induced bogosities for
1545 discarding. */
1546
1547 /* Some assignments VAR = VAR; don't generate any rtl code
1548 and thus don't count as variable modification. Avoid
1549 keeping bogosities like 0 = 0. */
1550 tree decl = gimple_assign_lhs (stmt), value;
1551 tree *n;
1552
b787e7a2 1553 n = id->decl_map->get (decl);
726a989a
RB
1554 if (n)
1555 {
1556 value = *n;
1557 STRIP_TYPE_NOPS (value);
1558 if (TREE_CONSTANT (value) || TREE_READONLY (value))
d5e254e1 1559 return NULL;
726a989a
RB
1560 }
1561 }
1562
4029a5e0
JJ
1563 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1564 in a block that we aren't copying during tree_function_versioning,
1565 just drop the clobber stmt. */
1566 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1567 {
1568 tree lhs = gimple_assign_lhs (stmt);
1569 if (TREE_CODE (lhs) == MEM_REF
1570 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1571 {
355fe088 1572 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
4029a5e0
JJ
1573 if (gimple_bb (def_stmt)
1574 && !bitmap_bit_p (id->blocks_to_copy,
1575 gimple_bb (def_stmt)->index))
d5e254e1 1576 return NULL;
4029a5e0
JJ
1577 }
1578 }
1579
b5b8b0ac
AO
1580 if (gimple_debug_bind_p (stmt))
1581 {
538dd0b7
DM
1582 gdebug *copy
1583 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1584 gimple_debug_bind_get_value (stmt),
1585 stmt);
9771b263 1586 id->debug_stmts.safe_push (copy);
d5e254e1
IE
1587 gimple_seq_add_stmt (&stmts, copy);
1588 return stmts;
b5b8b0ac 1589 }
ddb555ed
JJ
1590 if (gimple_debug_source_bind_p (stmt))
1591 {
538dd0b7
DM
1592 gdebug *copy = gimple_build_debug_source_bind
1593 (gimple_debug_source_bind_get_var (stmt),
1594 gimple_debug_source_bind_get_value (stmt),
1595 stmt);
9771b263 1596 id->debug_stmts.safe_push (copy);
d5e254e1
IE
1597 gimple_seq_add_stmt (&stmts, copy);
1598 return stmts;
ddb555ed 1599 }
1d65f45c
RH
1600
1601 /* Create a new deep copy of the statement. */
1602 copy = gimple_copy (stmt);
1603
6b77934e 1604 /* Clear flags that need revisiting. */
538dd0b7 1605 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
2ddeb89b
JH
1606 {
1607 if (gimple_call_tail_p (call_stmt))
1608 gimple_call_set_tail (call_stmt, false);
1609 if (gimple_call_from_thunk_p (call_stmt))
1610 gimple_call_set_from_thunk (call_stmt, false);
d9a6bd32
JJ
1611 if (gimple_call_internal_p (call_stmt)
1612 && IN_RANGE (gimple_call_internal_fn (call_stmt),
1613 IFN_GOMP_SIMD_ORDERED_START,
1614 IFN_GOMP_SIMD_ORDERED_END))
1615 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
2ddeb89b 1616 }
6b77934e 1617
1d65f45c
RH
1618 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1619 RESX and EH_DISPATCH. */
1620 if (id->eh_map)
1621 switch (gimple_code (copy))
1622 {
1623 case GIMPLE_CALL:
1624 {
1625 tree r, fndecl = gimple_call_fndecl (copy);
1626 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1627 switch (DECL_FUNCTION_CODE (fndecl))
1628 {
1629 case BUILT_IN_EH_COPY_VALUES:
1630 r = gimple_call_arg (copy, 1);
1631 r = remap_eh_region_tree_nr (r, id);
1632 gimple_call_set_arg (copy, 1, r);
1633 /* FALLTHRU */
1634
1635 case BUILT_IN_EH_POINTER:
1636 case BUILT_IN_EH_FILTER:
1637 r = gimple_call_arg (copy, 0);
1638 r = remap_eh_region_tree_nr (r, id);
1639 gimple_call_set_arg (copy, 0, r);
1640 break;
1641
1642 default:
1643 break;
1644 }
d086d311 1645
25a6a873
RG
1646 /* Reset alias info if we didn't apply measures to
1647 keep it valid over inlining by setting DECL_PT_UID. */
1648 if (!id->src_cfun->gimple_df
1649 || !id->src_cfun->gimple_df->ipa_pta)
538dd0b7 1650 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1d65f45c
RH
1651 }
1652 break;
1653
1654 case GIMPLE_RESX:
1655 {
538dd0b7
DM
1656 gresx *resx_stmt = as_a <gresx *> (copy);
1657 int r = gimple_resx_region (resx_stmt);
1d65f45c 1658 r = remap_eh_region_nr (r, id);
538dd0b7 1659 gimple_resx_set_region (resx_stmt, r);
1d65f45c
RH
1660 }
1661 break;
1662
1663 case GIMPLE_EH_DISPATCH:
1664 {
538dd0b7
DM
1665 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1666 int r = gimple_eh_dispatch_region (eh_dispatch);
1d65f45c 1667 r = remap_eh_region_nr (r, id);
538dd0b7 1668 gimple_eh_dispatch_set_region (eh_dispatch, r);
1d65f45c
RH
1669 }
1670 break;
1671
1672 default:
1673 break;
1674 }
726a989a
RB
1675 }
1676
1677 /* If STMT has a block defined, map it to the newly constructed
16917761 1678 block. */
726a989a
RB
1679 if (gimple_block (copy))
1680 {
1681 tree *n;
b787e7a2 1682 n = id->decl_map->get (gimple_block (copy));
726a989a 1683 gcc_assert (n);
16917761 1684 gimple_set_block (copy, *n);
726a989a
RB
1685 }
1686
ddb555ed 1687 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
d5e254e1
IE
1688 {
1689 gimple_seq_add_stmt (&stmts, copy);
1690 return stmts;
1691 }
b5b8b0ac 1692
726a989a
RB
1693 /* Remap all the operands in COPY. */
1694 memset (&wi, 0, sizeof (wi));
1695 wi.info = id;
5a6e26b7
JH
1696 if (skip_first)
1697 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1698 else
b8698a0f 1699 walk_gimple_op (copy, remap_gimple_op_r, &wi);
726a989a 1700
5006671f
RG
1701 /* Clear the copied virtual operands. We are not remapping them here
1702 but are going to recreate them from scratch. */
1703 if (gimple_has_mem_ops (copy))
1704 {
1705 gimple_set_vdef (copy, NULL_TREE);
1706 gimple_set_vuse (copy, NULL_TREE);
1707 }
1708
d5e254e1
IE
1709 gimple_seq_add_stmt (&stmts, copy);
1710 return stmts;
726a989a
RB
1711}
1712
1713
e21aff8a
SB
1714/* Copy basic block, scale profile accordingly. Edges will be taken care of
1715 later */
1716
1717static basic_block
0178d644
VR
1718copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1719 gcov_type count_scale)
e21aff8a 1720{
c2a4718a 1721 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
e21aff8a 1722 basic_block copy_basic_block;
726a989a 1723 tree decl;
0d63a740 1724 gcov_type freq;
91382288
JH
1725 basic_block prev;
1726
1727 /* Search for previous copied basic block. */
1728 prev = bb->prev_bb;
1729 while (!prev->aux)
1730 prev = prev->prev_bb;
e21aff8a
SB
1731
1732 /* create_basic_block() will append every new block to
1733 basic_block_info automatically. */
c4d281b2 1734 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
8b47039c 1735 copy_basic_block->count = apply_scale (bb->count, count_scale);
45a80bb9 1736
726a989a
RB
1737 /* We are going to rebuild frequencies from scratch. These values
1738 have just small importance to drive canonicalize_loop_headers. */
8b47039c 1739 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
726a989a 1740
0d63a740
JH
1741 /* We recompute frequencies after inlining, so this is quite safe. */
1742 if (freq > BB_FREQ_MAX)
1743 freq = BB_FREQ_MAX;
1744 copy_basic_block->frequency = freq;
e21aff8a 1745
726a989a
RB
1746 copy_gsi = gsi_start_bb (copy_basic_block);
1747
1748 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
e21aff8a 1749 {
d5e254e1 1750 gimple_seq stmts;
355fe088
TS
1751 gimple *stmt = gsi_stmt (gsi);
1752 gimple *orig_stmt = stmt;
d5e254e1
IE
1753 gimple_stmt_iterator stmts_gsi;
1754 bool stmt_added = false;
e21aff8a 1755
416c991f 1756 id->regimplify = false;
d5e254e1
IE
1757 stmts = remap_gimple_stmt (stmt, id);
1758
1759 if (gimple_seq_empty_p (stmts))
726a989a
RB
1760 continue;
1761
c2a4718a 1762 seq_gsi = copy_gsi;
726a989a 1763
d5e254e1
IE
1764 for (stmts_gsi = gsi_start (stmts);
1765 !gsi_end_p (stmts_gsi); )
e21aff8a 1766 {
d5e254e1
IE
1767 stmt = gsi_stmt (stmts_gsi);
1768
1769 /* Advance iterator now before stmt is moved to seq_gsi. */
1770 gsi_next (&stmts_gsi);
2b65dae5 1771
d5e254e1
IE
1772 if (gimple_nop_p (stmt))
1773 continue;
1774
1775 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1776 orig_stmt);
1777
1778 /* With return slot optimization we can end up with
1779 non-gimple (foo *)&this->m, fix that here. */
1780 if (is_gimple_assign (stmt)
1781 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1782 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1783 {
1784 tree new_rhs;
1785 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1786 gimple_assign_rhs1 (stmt),
1787 true, NULL, false,
1788 GSI_CONTINUE_LINKING);
1789 gimple_assign_set_rhs1 (stmt, new_rhs);
1790 id->regimplify = false;
1791 }
1792
1793 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
c2a4718a 1794
d5e254e1
IE
1795 if (id->regimplify)
1796 gimple_regimplify_operands (stmt, &seq_gsi);
1797
1798 stmt_added = true;
1799 }
1800
1801 if (!stmt_added)
1802 continue;
c2a4718a
JJ
1803
1804 /* If copy_basic_block has been empty at the start of this iteration,
1805 call gsi_start_bb again to get at the newly added statements. */
1806 if (gsi_end_p (copy_gsi))
1807 copy_gsi = gsi_start_bb (copy_basic_block);
1808 else
1809 gsi_next (&copy_gsi);
110cfe1c 1810
726a989a
RB
1811 /* Process the new statement. The call to gimple_regimplify_operands
1812 possibly turned the statement into multiple statements, we
1813 need to process all of them. */
c2a4718a 1814 do
726a989a 1815 {
9187e02d 1816 tree fn;
538dd0b7 1817 gcall *call_stmt;
9187e02d 1818
c2a4718a 1819 stmt = gsi_stmt (copy_gsi);
538dd0b7
DM
1820 call_stmt = dyn_cast <gcall *> (stmt);
1821 if (call_stmt
1822 && gimple_call_va_arg_pack_p (call_stmt)
1823 && id->call_stmt)
726a989a
RB
1824 {
1825 /* __builtin_va_arg_pack () should be replaced by
1826 all arguments corresponding to ... in the caller. */
1827 tree p;
538dd0b7 1828 gcall *new_call;
9771b263 1829 vec<tree> argarray;
538dd0b7 1830 size_t nargs = gimple_call_num_args (id->call_stmt);
d5e254e1
IE
1831 size_t n, i, nargs_to_copy;
1832 bool remove_bounds = false;
726a989a 1833
910ad8de 1834 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
726a989a
RB
1835 nargs--;
1836
d5e254e1
IE
1837 /* Bounds should be removed from arg pack in case
1838 we handle not instrumented call in instrumented
1839 function. */
1840 nargs_to_copy = nargs;
538dd0b7 1841 if (gimple_call_with_bounds_p (id->call_stmt)
d5e254e1
IE
1842 && !gimple_call_with_bounds_p (stmt))
1843 {
538dd0b7
DM
1844 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1845 i < gimple_call_num_args (id->call_stmt);
d5e254e1 1846 i++)
538dd0b7 1847 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
d5e254e1
IE
1848 nargs_to_copy--;
1849 remove_bounds = true;
1850 }
1851
726a989a 1852 /* Create the new array of arguments. */
538dd0b7 1853 n = nargs_to_copy + gimple_call_num_args (call_stmt);
9771b263
DN
1854 argarray.create (n);
1855 argarray.safe_grow_cleared (n);
726a989a
RB
1856
1857 /* Copy all the arguments before '...' */
9771b263 1858 memcpy (argarray.address (),
538dd0b7
DM
1859 gimple_call_arg_ptr (call_stmt, 0),
1860 gimple_call_num_args (call_stmt) * sizeof (tree));
726a989a 1861
d5e254e1
IE
1862 if (remove_bounds)
1863 {
1864 /* Append the rest of arguments removing bounds. */
538dd0b7
DM
1865 unsigned cur = gimple_call_num_args (call_stmt);
1866 i = gimple_call_num_args (id->call_stmt) - nargs;
1867 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1868 i < gimple_call_num_args (id->call_stmt);
d5e254e1 1869 i++)
538dd0b7
DM
1870 if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1871 argarray[cur++] = gimple_call_arg (id->call_stmt, i);
d5e254e1
IE
1872 gcc_assert (cur == n);
1873 }
1874 else
1875 {
1876 /* Append the arguments passed in '...' */
538dd0b7
DM
1877 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1878 gimple_call_arg_ptr (id->call_stmt, 0)
1879 + (gimple_call_num_args (id->call_stmt) - nargs),
d5e254e1
IE
1880 nargs * sizeof (tree));
1881 }
726a989a 1882
538dd0b7 1883 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
726a989a
RB
1884 argarray);
1885
9771b263 1886 argarray.release ();
726a989a
RB
1887
1888 /* Copy all GIMPLE_CALL flags, location and block, except
1889 GF_CALL_VA_ARG_PACK. */
538dd0b7 1890 gimple_call_copy_flags (new_call, call_stmt);
726a989a
RB
1891 gimple_call_set_va_arg_pack (new_call, false);
1892 gimple_set_location (new_call, gimple_location (stmt));
1893 gimple_set_block (new_call, gimple_block (stmt));
538dd0b7 1894 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
726a989a
RB
1895
1896 gsi_replace (&copy_gsi, new_call, false);
1897 stmt = new_call;
1898 }
fca4adf2 1899 else if (call_stmt
538dd0b7 1900 && id->call_stmt
726a989a
RB
1901 && (decl = gimple_call_fndecl (stmt))
1902 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1903 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
e0704a46 1904 {
726a989a
RB
1905 /* __builtin_va_arg_pack_len () should be replaced by
1906 the number of anonymous arguments. */
538dd0b7 1907 size_t nargs = gimple_call_num_args (id->call_stmt), i;
726a989a 1908 tree count, p;
355fe088 1909 gimple *new_stmt;
726a989a 1910
910ad8de 1911 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
726a989a
RB
1912 nargs--;
1913
d5e254e1 1914 /* For instrumented calls we should ignore bounds. */
538dd0b7
DM
1915 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1916 i < gimple_call_num_args (id->call_stmt);
d5e254e1 1917 i++)
538dd0b7 1918 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
d5e254e1
IE
1919 nargs--;
1920
726a989a
RB
1921 count = build_int_cst (integer_type_node, nargs);
1922 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1923 gsi_replace (&copy_gsi, new_stmt, false);
1924 stmt = new_stmt;
1925 }
fca4adf2
JJ
1926 else if (call_stmt
1927 && id->call_stmt
1928 && gimple_call_internal_p (stmt)
1929 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1930 {
1931 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1932 gsi_remove (&copy_gsi, false);
1933 continue;
1934 }
b8a00a4d 1935
726a989a
RB
1936 /* Statements produced by inlining can be unfolded, especially
1937 when we constant propagated some operands. We can't fold
1938 them right now for two reasons:
1939 1) folding require SSA_NAME_DEF_STMTs to be correct
1940 2) we can't change function calls to builtins.
1941 So we just mark statement for later folding. We mark
1942 all new statements, instead just statements that has changed
1943 by some nontrivial substitution so even statements made
1944 foldable indirectly are updated. If this turns out to be
1945 expensive, copy_body can be told to watch for nontrivial
1946 changes. */
1947 if (id->statements_to_fold)
6e2830c3 1948 id->statements_to_fold->add (stmt);
726a989a
RB
1949
1950 /* We're duplicating a CALL_EXPR. Find any corresponding
1951 callgraph edges and update or duplicate them. */
538dd0b7 1952 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
726a989a 1953 {
9b2a5ef7 1954 struct cgraph_edge *edge;
6ef5231b 1955
726a989a 1956 switch (id->transform_call_graph_edges)
e0704a46 1957 {
9b2a5ef7 1958 case CB_CGE_DUPLICATE:
d52f5295 1959 edge = id->src_node->get_edge (orig_stmt);
9b2a5ef7 1960 if (edge)
0d63a740
JH
1961 {
1962 int edge_freq = edge->frequency;
042ae7d2
JH
1963 int new_freq;
1964 struct cgraph_edge *old_edge = edge;
538dd0b7 1965 edge = edge->clone (id->dst_node, call_stmt,
3dafb85c
ML
1966 gimple_uid (stmt),
1967 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1968 true);
0d63a740
JH
1969 /* We could also just rescale the frequency, but
1970 doing so would introduce roundoff errors and make
1971 verifier unhappy. */
67348ccc 1972 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
042ae7d2
JH
1973 copy_basic_block);
1974
1975 /* Speculative calls consist of two edges - direct and indirect.
1976 Duplicate the whole thing and distribute frequencies accordingly. */
1977 if (edge->speculative)
0d63a740 1978 {
042ae7d2
JH
1979 struct cgraph_edge *direct, *indirect;
1980 struct ipa_ref *ref;
1981
1982 gcc_assert (!edge->indirect_unknown_callee);
3dafb85c 1983 old_edge->speculative_call_info (direct, indirect, ref);
538dd0b7 1984 indirect = indirect->clone (id->dst_node, call_stmt,
3dafb85c
ML
1985 gimple_uid (stmt),
1986 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1987 true);
042ae7d2
JH
1988 if (old_edge->frequency + indirect->frequency)
1989 {
1990 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
1991 (old_edge->frequency + indirect->frequency)),
1992 CGRAPH_FREQ_MAX);
1993 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
1994 (old_edge->frequency + indirect->frequency)),
1995 CGRAPH_FREQ_MAX);
1996 }
d122681a 1997 id->dst_node->clone_reference (ref, stmt);
042ae7d2
JH
1998 }
1999 else
2000 {
2001 edge->frequency = new_freq;
2002 if (dump_file
ea19eb9f 2003 && profile_status_for_fn (cfun) != PROFILE_ABSENT
042ae7d2
JH
2004 && (edge_freq > edge->frequency + 10
2005 || edge_freq < edge->frequency - 10))
2006 {
2007 fprintf (dump_file, "Edge frequency estimated by "
2008 "cgraph %i diverge from inliner's estimate %i\n",
2009 edge_freq,
2010 edge->frequency);
2011 fprintf (dump_file,
2012 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
2013 bb->index,
2014 bb->frequency,
2015 copy_basic_block->frequency);
2016 }
0d63a740
JH
2017 }
2018 }
9b2a5ef7
RH
2019 break;
2020
2021 case CB_CGE_MOVE_CLONES:
d52f5295 2022 id->dst_node->set_call_stmt_including_clones (orig_stmt,
538dd0b7 2023 call_stmt);
d52f5295 2024 edge = id->dst_node->get_edge (stmt);
9b2a5ef7
RH
2025 break;
2026
2027 case CB_CGE_MOVE:
d52f5295 2028 edge = id->dst_node->get_edge (orig_stmt);
9b2a5ef7 2029 if (edge)
538dd0b7 2030 edge->set_call_stmt (call_stmt);
9b2a5ef7
RH
2031 break;
2032
2033 default:
2034 gcc_unreachable ();
110cfe1c 2035 }
f618d33e 2036
9b2a5ef7
RH
2037 /* Constant propagation on argument done during inlining
2038 may create new direct call. Produce an edge for it. */
b8698a0f 2039 if ((!edge
e33c6cd6 2040 || (edge->indirect_inlining_edge
9b2a5ef7 2041 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
67348ccc 2042 && id->dst_node->definition
9b2a5ef7
RH
2043 && (fn = gimple_call_fndecl (stmt)) != NULL)
2044 {
d52f5295 2045 struct cgraph_node *dest = cgraph_node::get (fn);
9b2a5ef7
RH
2046
2047 /* We have missing edge in the callgraph. This can happen
2048 when previous inlining turned an indirect call into a
0e3776db 2049 direct call by constant propagating arguments or we are
20a6bb58 2050 producing dead clone (for further cloning). In all
9b2a5ef7
RH
2051 other cases we hit a bug (incorrect node sharing is the
2052 most common reason for missing edges). */
67348ccc
DM
2053 gcc_assert (!dest->definition
2054 || dest->address_taken
2055 || !id->src_node->definition
2056 || !id->dst_node->definition);
9b2a5ef7 2057 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
d52f5295 2058 id->dst_node->create_edge_including_clones
538dd0b7 2059 (dest, orig_stmt, call_stmt, bb->count,
67348ccc 2060 compute_call_stmt_bb_frequency (id->dst_node->decl,
0d63a740 2061 copy_basic_block),
898b8927 2062 CIF_ORIGINALLY_INDIRECT_CALL);
9b2a5ef7 2063 else
538dd0b7 2064 id->dst_node->create_edge (dest, call_stmt,
47cb0d7d
JH
2065 bb->count,
2066 compute_call_stmt_bb_frequency
67348ccc 2067 (id->dst_node->decl,
960bfb69 2068 copy_basic_block))->inline_failed
9b2a5ef7
RH
2069 = CIF_ORIGINALLY_INDIRECT_CALL;
2070 if (dump_file)
2071 {
91382288 2072 fprintf (dump_file, "Created new direct edge to %s\n",
fec39fa6 2073 dest->name ());
9b2a5ef7
RH
2074 }
2075 }
9187e02d 2076
538dd0b7 2077 notice_special_calls (as_a <gcall *> (stmt));
726a989a 2078 }
e21aff8a 2079
1d65f45c
RH
2080 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2081 id->eh_map, id->eh_lp_nr);
726a989a 2082
b5b8b0ac 2083 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
726a989a
RB
2084 {
2085 ssa_op_iter i;
2086 tree def;
2087
726a989a
RB
2088 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
2089 if (TREE_CODE (def) == SSA_NAME)
2090 SSA_NAME_DEF_STMT (def) = stmt;
2091 }
2092
2093 gsi_next (&copy_gsi);
e21aff8a 2094 }
c2a4718a 2095 while (!gsi_end_p (copy_gsi));
726a989a
RB
2096
2097 copy_gsi = gsi_last_bb (copy_basic_block);
e21aff8a 2098 }
726a989a 2099
e21aff8a
SB
2100 return copy_basic_block;
2101}
2102
110cfe1c
JH
2103/* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2104 form is quite easy, since dominator relationship for old basic blocks does
2105 not change.
2106
2107 There is however exception where inlining might change dominator relation
2108 across EH edges from basic block within inlined functions destinating
5305a4cb 2109 to landing pads in function we inline into.
110cfe1c 2110
e9705dc5
AO
2111 The function fills in PHI_RESULTs of such PHI nodes if they refer
2112 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2113 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2114 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2115 set, and this means that there will be no overlapping live ranges
110cfe1c
JH
2116 for the underlying symbol.
2117
2118 This might change in future if we allow redirecting of EH edges and
2119 we might want to change way build CFG pre-inlining to include
2120 all the possible edges then. */
2121static void
e9705dc5
AO
2122update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2123 bool can_throw, bool nonlocal_goto)
110cfe1c
JH
2124{
2125 edge e;
2126 edge_iterator ei;
2127
2128 FOR_EACH_EDGE (e, ei, bb->succs)
2129 if (!e->dest->aux
2130 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2131 {
538dd0b7
DM
2132 gphi *phi;
2133 gphi_iterator si;
110cfe1c 2134
e9705dc5
AO
2135 if (!nonlocal_goto)
2136 gcc_assert (e->flags & EDGE_EH);
726a989a 2137
e9705dc5
AO
2138 if (!can_throw)
2139 gcc_assert (!(e->flags & EDGE_EH));
726a989a
RB
2140
2141 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
110cfe1c 2142 {
e9705dc5
AO
2143 edge re;
2144
538dd0b7 2145 phi = si.phi ();
726a989a 2146
3f8825c0
RB
2147 /* For abnormal goto/call edges the receiver can be the
2148 ENTRY_BLOCK. Do not assert this cannot happen. */
e9705dc5 2149
496a4ef5
JH
2150 gcc_assert ((e->flags & EDGE_EH)
2151 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
e9705dc5 2152
e9705dc5 2153 re = find_edge (ret_bb, e->dest);
0107dca2 2154 gcc_checking_assert (re);
e9705dc5
AO
2155 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2156 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2157
2158 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2159 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
110cfe1c
JH
2160 }
2161 }
2162}
2163
726a989a 2164
128a79fb
KH
2165/* Copy edges from BB into its copy constructed earlier, scale profile
2166 accordingly. Edges will be taken care of later. Assume aux
90a7788b
JJ
2167 pointers to point to the copies of each BB. Return true if any
2168 debug stmts are left after a statement that must end the basic block. */
726a989a 2169
90a7788b 2170static bool
92e776e9 2171copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
09b22f48 2172 basic_block abnormal_goto_dest)
e21aff8a 2173{
cceb1885 2174 basic_block new_bb = (basic_block) bb->aux;
e21aff8a
SB
2175 edge_iterator ei;
2176 edge old_edge;
726a989a 2177 gimple_stmt_iterator si;
e21aff8a 2178 int flags;
90a7788b 2179 bool need_debug_cleanup = false;
e21aff8a
SB
2180
2181 /* Use the indices from the original blocks to create edges for the
2182 new ones. */
2183 FOR_EACH_EDGE (old_edge, ei, bb->succs)
e0704a46
JH
2184 if (!(old_edge->flags & EDGE_EH))
2185 {
82d6e6fc 2186 edge new_edge;
e21aff8a 2187
e0704a46 2188 flags = old_edge->flags;
e21aff8a 2189
e0704a46 2190 /* Return edges do get a FALLTHRU flag when the get inlined. */
a764d660
RB
2191 if (old_edge->dest->index == EXIT_BLOCK
2192 && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
fefa31b5 2193 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
e0704a46 2194 flags |= EDGE_FALLTHRU;
82d6e6fc 2195 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
8b47039c 2196 new_edge->count = apply_scale (old_edge->count, count_scale);
82d6e6fc 2197 new_edge->probability = old_edge->probability;
e0704a46 2198 }
e21aff8a
SB
2199
2200 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
90a7788b 2201 return false;
e21aff8a 2202
726a989a 2203 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
e21aff8a 2204 {
355fe088 2205 gimple *copy_stmt;
e9705dc5 2206 bool can_throw, nonlocal_goto;
e21aff8a 2207
726a989a 2208 copy_stmt = gsi_stmt (si);
b5b8b0ac 2209 if (!is_gimple_debug (copy_stmt))
f9a21e13 2210 update_stmt (copy_stmt);
726a989a 2211
e21aff8a 2212 /* Do this before the possible split_block. */
726a989a 2213 gsi_next (&si);
e21aff8a
SB
2214
2215 /* If this tree could throw an exception, there are two
2216 cases where we need to add abnormal edge(s): the
2217 tree wasn't in a region and there is a "current
2218 region" in the caller; or the original tree had
2219 EH edges. In both cases split the block after the tree,
2220 and add abnormal edge(s) as needed; we need both
2221 those from the callee and the caller.
2222 We check whether the copy can throw, because the const
2223 propagation can change an INDIRECT_REF which throws
2224 into a COMPONENT_REF which doesn't. If the copy
2225 can throw, the original could also throw. */
726a989a 2226 can_throw = stmt_can_throw_internal (copy_stmt);
09b22f48
JJ
2227 nonlocal_goto
2228 = (stmt_can_make_abnormal_goto (copy_stmt)
2229 && !computed_goto_p (copy_stmt));
e9705dc5
AO
2230
2231 if (can_throw || nonlocal_goto)
e21aff8a 2232 {
90a7788b
JJ
2233 if (!gsi_end_p (si))
2234 {
2235 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2236 gsi_next (&si);
2237 if (gsi_end_p (si))
2238 need_debug_cleanup = true;
2239 }
726a989a 2240 if (!gsi_end_p (si))
e21aff8a
SB
2241 /* Note that bb's predecessor edges aren't necessarily
2242 right at this point; split_block doesn't care. */
2243 {
2244 edge e = split_block (new_bb, copy_stmt);
110cfe1c 2245
e21aff8a 2246 new_bb = e->dest;
110cfe1c 2247 new_bb->aux = e->src->aux;
726a989a 2248 si = gsi_start_bb (new_bb);
e21aff8a 2249 }
e9705dc5 2250 }
e21aff8a 2251
1d65f45c 2252 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
538dd0b7 2253 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
1d65f45c 2254 else if (can_throw)
e9705dc5 2255 make_eh_edges (copy_stmt);
110cfe1c 2256
a6f30e66
RB
2257 /* If the call we inline cannot make abnormal goto do not add
2258 additional abnormal edges but only retain those already present
2259 in the original function body. */
09b22f48
JJ
2260 if (abnormal_goto_dest == NULL)
2261 nonlocal_goto = false;
e9705dc5 2262 if (nonlocal_goto)
09b22f48
JJ
2263 {
2264 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2265
2266 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2267 nonlocal_goto = false;
2268 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2269 in OpenMP regions which aren't allowed to be left abnormally.
2270 So, no need to add abnormal edge in that case. */
2271 else if (is_gimple_call (copy_stmt)
2272 && gimple_call_internal_p (copy_stmt)
2273 && (gimple_call_internal_fn (copy_stmt)
2274 == IFN_ABNORMAL_DISPATCHER)
2275 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2276 nonlocal_goto = false;
2277 else
2278 make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2279 }
e9705dc5
AO
2280
2281 if ((can_throw || nonlocal_goto)
2282 && gimple_in_ssa_p (cfun))
726a989a 2283 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
e9705dc5 2284 can_throw, nonlocal_goto);
110cfe1c 2285 }
90a7788b 2286 return need_debug_cleanup;
110cfe1c
JH
2287}
2288
2289/* Copy the PHIs. All blocks and edges are copied, some blocks
2290 was possibly split and new outgoing EH edges inserted.
2291 BB points to the block of original function and AUX pointers links
2292 the original and newly copied blocks. */
2293
2294static void
2295copy_phis_for_bb (basic_block bb, copy_body_data *id)
2296{
3d9a9f94 2297 basic_block const new_bb = (basic_block) bb->aux;
110cfe1c 2298 edge_iterator ei;
538dd0b7
DM
2299 gphi *phi;
2300 gphi_iterator si;
6a78fd06
RG
2301 edge new_edge;
2302 bool inserted = false;
110cfe1c 2303
355a7673 2304 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
110cfe1c 2305 {
726a989a 2306 tree res, new_res;
538dd0b7 2307 gphi *new_phi;
110cfe1c 2308
538dd0b7 2309 phi = si.phi ();
726a989a
RB
2310 res = PHI_RESULT (phi);
2311 new_res = res;
ea057359 2312 if (!virtual_operand_p (res))
110cfe1c 2313 {
726a989a 2314 walk_tree (&new_res, copy_tree_body_r, id, NULL);
dcc748dd 2315 new_phi = create_phi_node (new_res, new_bb);
110cfe1c
JH
2316 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2317 {
8b3057b3
JH
2318 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2319 tree arg;
2320 tree new_arg;
8b3057b3 2321 edge_iterator ei2;
5368224f 2322 location_t locus;
8b3057b3 2323
20a6bb58 2324 /* When doing partial cloning, we allow PHIs on the entry block
8b3057b3
JH
2325 as long as all the arguments are the same. Find any input
2326 edge to see argument to copy. */
2327 if (!old_edge)
2328 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2329 if (!old_edge->src->aux)
2330 break;
2331
2332 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2333 new_arg = arg;
726a989a 2334 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
110cfe1c 2335 gcc_assert (new_arg);
36b6e793
JJ
2336 /* With return slot optimization we can end up with
2337 non-gimple (foo *)&this->m, fix that here. */
2338 if (TREE_CODE (new_arg) != SSA_NAME
2339 && TREE_CODE (new_arg) != FUNCTION_DECL
2340 && !is_gimple_val (new_arg))
2341 {
726a989a
RB
2342 gimple_seq stmts = NULL;
2343 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
6a78fd06
RG
2344 gsi_insert_seq_on_edge (new_edge, stmts);
2345 inserted = true;
36b6e793 2346 }
5368224f 2347 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
5368224f
DC
2348 if (LOCATION_BLOCK (locus))
2349 {
2350 tree *n;
b787e7a2 2351 n = id->decl_map->get (LOCATION_BLOCK (locus));
5368224f 2352 gcc_assert (n);
ebedc9a3 2353 locus = set_block (locus, *n);
5368224f 2354 }
16917761
RB
2355 else
2356 locus = LOCATION_LOCUS (locus);
5368224f 2357
16917761 2358 add_phi_arg (new_phi, new_arg, new_edge, locus);
110cfe1c 2359 }
e21aff8a
SB
2360 }
2361 }
6a78fd06
RG
2362
2363 /* Commit the delayed edge insertions. */
2364 if (inserted)
2365 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2366 gsi_commit_one_edge_insert (new_edge, NULL);
e21aff8a
SB
2367}
2368
726a989a 2369
e21aff8a 2370/* Wrapper for remap_decl so it can be used as a callback. */
726a989a 2371
e21aff8a
SB
2372static tree
2373remap_decl_1 (tree decl, void *data)
2374{
1b369fae 2375 return remap_decl (decl, (copy_body_data *) data);
e21aff8a
SB
2376}
2377
110cfe1c 2378/* Build struct function and associated datastructures for the new clone
af16bc76
MJ
2379 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2380 the cfun to the function of new_fndecl (and current_function_decl too). */
110cfe1c
JH
2381
2382static void
0d63a740 2383initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
110cfe1c 2384{
110cfe1c 2385 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
0d63a740 2386 gcov_type count_scale;
110cfe1c 2387
49bde175
JH
2388 if (!DECL_ARGUMENTS (new_fndecl))
2389 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2390 if (!DECL_RESULT (new_fndecl))
2391 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2392
fefa31b5 2393 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
8b47039c
TJ
2394 count_scale
2395 = GCOV_COMPUTE_SCALE (count,
fefa31b5 2396 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
110cfe1c 2397 else
0d63a740 2398 count_scale = REG_BR_PROB_BASE;
110cfe1c
JH
2399
2400 /* Register specific tree functions. */
726a989a 2401 gimple_register_cfg_hooks ();
39ecc018
JH
2402
2403 /* Get clean struct function. */
2404 push_struct_function (new_fndecl);
2405
2406 /* We will rebuild these, so just sanity check that they are empty. */
2407 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2408 gcc_assert (cfun->local_decls == NULL);
2409 gcc_assert (cfun->cfg == NULL);
2410 gcc_assert (cfun->decl == new_fndecl);
2411
20a6bb58 2412 /* Copy items we preserve during cloning. */
39ecc018
JH
2413 cfun->static_chain_decl = src_cfun->static_chain_decl;
2414 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2415 cfun->function_end_locus = src_cfun->function_end_locus;
a9e0d843 2416 cfun->curr_properties = src_cfun->curr_properties;
39ecc018 2417 cfun->last_verified = src_cfun->last_verified;
39ecc018
JH
2418 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2419 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
39ecc018
JH
2420 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2421 cfun->stdarg = src_cfun->stdarg;
39ecc018 2422 cfun->after_inlining = src_cfun->after_inlining;
8f4f502f
EB
2423 cfun->can_throw_non_call_exceptions
2424 = src_cfun->can_throw_non_call_exceptions;
9510c5af 2425 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
39ecc018
JH
2426 cfun->returns_struct = src_cfun->returns_struct;
2427 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
39ecc018 2428
110cfe1c
JH
2429 init_empty_tree_cfg ();
2430
ea19eb9f 2431 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
fefa31b5
DM
2432 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2433 (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
110cfe1c 2434 REG_BR_PROB_BASE);
fefa31b5
DM
2435 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2436 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2437 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2438 (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
110cfe1c 2439 REG_BR_PROB_BASE);
fefa31b5
DM
2440 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2441 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
110cfe1c
JH
2442 if (src_cfun->eh)
2443 init_eh_for_function ();
2444
2445 if (src_cfun->gimple_df)
2446 {
5db9ba0c 2447 init_tree_ssa (cfun);
110cfe1c 2448 cfun->gimple_df->in_ssa_p = true;
3828719a 2449 init_ssa_operands (cfun);
110cfe1c 2450 }
110cfe1c
JH
2451}
2452
90a7788b
JJ
2453/* Helper function for copy_cfg_body. Move debug stmts from the end
2454 of NEW_BB to the beginning of successor basic blocks when needed. If the
2455 successor has multiple predecessors, reset them, otherwise keep
2456 their value. */
2457
2458static void
2459maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2460{
2461 edge e;
2462 edge_iterator ei;
2463 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2464
2465 if (gsi_end_p (si)
2466 || gsi_one_before_end_p (si)
2467 || !(stmt_can_throw_internal (gsi_stmt (si))
2468 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2469 return;
2470
2471 FOR_EACH_EDGE (e, ei, new_bb->succs)
2472 {
2473 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2474 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2475 while (is_gimple_debug (gsi_stmt (ssi)))
2476 {
355fe088 2477 gimple *stmt = gsi_stmt (ssi);
538dd0b7 2478 gdebug *new_stmt;
90a7788b
JJ
2479 tree var;
2480 tree value;
2481
2482 /* For the last edge move the debug stmts instead of copying
2483 them. */
2484 if (ei_one_before_end_p (ei))
2485 {
2486 si = ssi;
2487 gsi_prev (&ssi);
ddb555ed 2488 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
90a7788b
JJ
2489 gimple_debug_bind_reset_value (stmt);
2490 gsi_remove (&si, false);
2491 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2492 continue;
2493 }
2494
ddb555ed 2495 if (gimple_debug_bind_p (stmt))
90a7788b 2496 {
ddb555ed
JJ
2497 var = gimple_debug_bind_get_var (stmt);
2498 if (single_pred_p (e->dest))
2499 {
2500 value = gimple_debug_bind_get_value (stmt);
2501 value = unshare_expr (value);
2502 }
2503 else
2504 value = NULL_TREE;
2505 new_stmt = gimple_build_debug_bind (var, value, stmt);
2506 }
2507 else if (gimple_debug_source_bind_p (stmt))
2508 {
2509 var = gimple_debug_source_bind_get_var (stmt);
2510 value = gimple_debug_source_bind_get_value (stmt);
2511 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
90a7788b
JJ
2512 }
2513 else
ddb555ed 2514 gcc_unreachable ();
90a7788b 2515 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
9771b263 2516 id->debug_stmts.safe_push (new_stmt);
90a7788b
JJ
2517 gsi_prev (&ssi);
2518 }
2519 }
2520}
2521
a9e0d843
RB
2522/* Make a copy of the sub-loops of SRC_PARENT and place them
2523 as siblings of DEST_PARENT. */
2524
2525static void
f3b331d1 2526copy_loops (copy_body_data *id,
a9e0d843
RB
2527 struct loop *dest_parent, struct loop *src_parent)
2528{
2529 struct loop *src_loop = src_parent->inner;
2530 while (src_loop)
2531 {
f3b331d1
JJ
2532 if (!id->blocks_to_copy
2533 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
a9e0d843
RB
2534 {
2535 struct loop *dest_loop = alloc_loop ();
2536
2537 /* Assign the new loop its header and latch and associate
2538 those with the new loop. */
33d9078a
RB
2539 dest_loop->header = (basic_block)src_loop->header->aux;
2540 dest_loop->header->loop_father = dest_loop;
a9e0d843
RB
2541 if (src_loop->latch != NULL)
2542 {
2543 dest_loop->latch = (basic_block)src_loop->latch->aux;
2544 dest_loop->latch->loop_father = dest_loop;
2545 }
2546
2547 /* Copy loop meta-data. */
2548 copy_loop_info (src_loop, dest_loop);
2549
2550 /* Finally place it into the loop array and the loop tree. */
0fc822d0 2551 place_new_loop (cfun, dest_loop);
a9e0d843
RB
2552 flow_loop_tree_node_add (dest_parent, dest_loop);
2553
718c4601
EB
2554 dest_loop->safelen = src_loop->safelen;
2555 dest_loop->dont_vectorize = src_loop->dont_vectorize;
b15b5979 2556 if (src_loop->force_vectorize)
f3b331d1 2557 {
b15b5979
EB
2558 dest_loop->force_vectorize = true;
2559 cfun->has_force_vectorize_loops = true;
f3b331d1 2560 }
718c4601
EB
2561 if (src_loop->simduid)
2562 {
2563 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2564 cfun->has_simduid_loops = true;
2565 }
f3b331d1 2566
a9e0d843 2567 /* Recurse. */
f3b331d1 2568 copy_loops (id, dest_loop, src_loop);
a9e0d843
RB
2569 }
2570 src_loop = src_loop->next;
2571 }
2572}
2573
042ae7d2
JH
2574/* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2575
2576void
2577redirect_all_calls (copy_body_data * id, basic_block bb)
2578{
2579 gimple_stmt_iterator si;
355fe088 2580 gimple *last = last_stmt (bb);
042ae7d2
JH
2581 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2582 {
355fe088 2583 gimple *stmt = gsi_stmt (si);
15aed8c4 2584 if (is_gimple_call (stmt))
042ae7d2 2585 {
15aed8c4 2586 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
042ae7d2 2587 if (edge)
15aed8c4
JJ
2588 {
2589 edge->redirect_call_stmt_to_callee ();
2590 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2591 gimple_purge_dead_eh_edges (bb);
2592 }
042ae7d2
JH
2593 }
2594 }
2595}
2596
eb4b92c1
TJ
2597/* Convert estimated frequencies into counts for NODE, scaling COUNT
2598 with each bb's frequency. Used when NODE has a 0-weight entry
2599 but we are about to inline it into a non-zero count call bb.
2600 See the comments for handle_missing_profiles() in predict.c for
2601 when this can happen for COMDATs. */
2602
2603void
2604freqs_to_counts (struct cgraph_node *node, gcov_type count)
2605{
2606 basic_block bb;
2607 edge_iterator ei;
2608 edge e;
2609 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2610
2611 FOR_ALL_BB_FN(bb, fn)
2612 {
2613 bb->count = apply_scale (count,
2614 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2615 FOR_EACH_EDGE (e, ei, bb->succs)
2616 e->count = apply_probability (e->src->count, e->probability);
2617 }
2618}
2619
e21aff8a
SB
2620/* Make a copy of the body of FN so that it can be inserted inline in
2621 another function. Walks FN via CFG, returns new fndecl. */
2622
2623static tree
0d63a740 2624copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
91382288 2625 basic_block entry_block_map, basic_block exit_block_map,
f3b331d1 2626 basic_block new_entry)
e21aff8a 2627{
1b369fae 2628 tree callee_fndecl = id->src_fn;
e21aff8a 2629 /* Original cfun for the callee, doesn't change. */
1b369fae 2630 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
110cfe1c 2631 struct function *cfun_to_copy;
e21aff8a
SB
2632 basic_block bb;
2633 tree new_fndecl = NULL;
90a7788b 2634 bool need_debug_cleanup = false;
0d63a740 2635 gcov_type count_scale;
110cfe1c 2636 int last;
20a6bb58
JH
2637 int incoming_frequency = 0;
2638 gcov_type incoming_count = 0;
e21aff8a 2639
eb4b92c1
TJ
2640 /* This can happen for COMDAT routines that end up with 0 counts
2641 despite being called (see the comments for handle_missing_profiles()
2642 in predict.c as to why). Apply counts to the blocks in the callee
2643 before inlining, using the guessed edge frequencies, so that we don't
2644 end up with a 0-count inline body which can confuse downstream
2645 optimizations such as function splitting. */
fefa31b5 2646 if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
eb4b92c1
TJ
2647 {
2648 /* Apply the larger of the call bb count and the total incoming
2649 call edge count to the callee. */
2650 gcov_type in_count = 0;
2651 struct cgraph_edge *in_edge;
2652 for (in_edge = id->src_node->callers; in_edge;
2653 in_edge = in_edge->next_caller)
2654 in_count += in_edge->count;
2655 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2656 }
2657
fefa31b5 2658 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
8b47039c
TJ
2659 count_scale
2660 = GCOV_COMPUTE_SCALE (count,
fefa31b5 2661 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
e21aff8a 2662 else
0d63a740 2663 count_scale = REG_BR_PROB_BASE;
e21aff8a
SB
2664
2665 /* Register specific tree functions. */
726a989a 2666 gimple_register_cfg_hooks ();
e21aff8a 2667
6626665f
DM
2668 /* If we are inlining just region of the function, make sure to connect
2669 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2670 part of loop, we must compute frequency and probability of
2671 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
20a6bb58 2672 probabilities of edges incoming from nonduplicated region. */
b35366ce
JH
2673 if (new_entry)
2674 {
2675 edge e;
2676 edge_iterator ei;
2677
2678 FOR_EACH_EDGE (e, ei, new_entry->preds)
2679 if (!e->src->aux)
2680 {
20a6bb58
JH
2681 incoming_frequency += EDGE_FREQUENCY (e);
2682 incoming_count += e->count;
b35366ce 2683 }
8b47039c 2684 incoming_count = apply_scale (incoming_count, count_scale);
20a6bb58 2685 incoming_frequency
8b47039c 2686 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
fefa31b5
DM
2687 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2688 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
b35366ce
JH
2689 }
2690
e21aff8a 2691 /* Must have a CFG here at this point. */
fefa31b5 2692 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
e21aff8a
SB
2693 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2694
110cfe1c
JH
2695 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2696
fefa31b5
DM
2697 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2698 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2699 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2700 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
e21aff8a 2701
e21aff8a
SB
2702 /* Duplicate any exception-handling regions. */
2703 if (cfun->eh)
1d65f45c
RH
2704 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2705 remap_decl_1, id);
726a989a 2706
e21aff8a
SB
2707 /* Use aux pointers to map the original blocks to copy. */
2708 FOR_EACH_BB_FN (bb, cfun_to_copy)
f3b331d1 2709 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
91382288
JH
2710 {
2711 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2712 bb->aux = new_bb;
2713 new_bb->aux = bb;
a9e0d843 2714 new_bb->loop_father = entry_block_map->loop_father;
91382288 2715 }
110cfe1c 2716
8b1c6fd7 2717 last = last_basic_block_for_fn (cfun);
726a989a 2718
e21aff8a 2719 /* Now that we've duplicated the blocks, duplicate their edges. */
09b22f48 2720 basic_block abnormal_goto_dest = NULL;
538dd0b7
DM
2721 if (id->call_stmt
2722 && stmt_can_make_abnormal_goto (id->call_stmt))
09b22f48 2723 {
538dd0b7 2724 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
09b22f48 2725
538dd0b7 2726 bb = gimple_bb (id->call_stmt);
09b22f48
JJ
2727 gsi_next (&gsi);
2728 if (gsi_end_p (gsi))
2729 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2730 }
e21aff8a 2731 FOR_ALL_BB_FN (bb, cfun_to_copy)
f3b331d1
JJ
2732 if (!id->blocks_to_copy
2733 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
92e776e9 2734 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
09b22f48 2735 abnormal_goto_dest);
726a989a 2736
91382288 2737 if (new_entry)
110cfe1c 2738 {
b35366ce 2739 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
91382288 2740 e->probability = REG_BR_PROB_BASE;
20a6bb58 2741 e->count = incoming_count;
110cfe1c 2742 }
726a989a 2743
a9e0d843 2744 /* Duplicate the loop tree, if available and wanted. */
0fc822d0 2745 if (loops_for_fn (src_cfun) != NULL
a9e0d843
RB
2746 && current_loops != NULL)
2747 {
f3b331d1 2748 copy_loops (id, entry_block_map->loop_father,
0fc822d0 2749 get_loop (src_cfun, 0));
a9e0d843
RB
2750 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2751 loops_state_set (LOOPS_NEED_FIXUP);
2752 }
2753
9f8e7a96
RB
2754 /* If the loop tree in the source function needed fixup, mark the
2755 destination loop tree for fixup, too. */
2756 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2757 loops_state_set (LOOPS_NEED_FIXUP);
2758
8b3057b3
JH
2759 if (gimple_in_ssa_p (cfun))
2760 FOR_ALL_BB_FN (bb, cfun_to_copy)
f3b331d1
JJ
2761 if (!id->blocks_to_copy
2762 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
8b3057b3
JH
2763 copy_phis_for_bb (bb, id);
2764
91382288
JH
2765 FOR_ALL_BB_FN (bb, cfun_to_copy)
2766 if (bb->aux)
2767 {
2768 if (need_debug_cleanup
2769 && bb->index != ENTRY_BLOCK
2770 && bb->index != EXIT_BLOCK)
2771 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
042ae7d2
JH
2772 /* Update call edge destinations. This can not be done before loop
2773 info is updated, because we may split basic blocks. */
c6927859
RB
2774 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2775 && bb->index != ENTRY_BLOCK
2776 && bb->index != EXIT_BLOCK)
042ae7d2 2777 redirect_all_calls (id, (basic_block)bb->aux);
91382288
JH
2778 ((basic_block)bb->aux)->aux = NULL;
2779 bb->aux = NULL;
2780 }
2781
110cfe1c
JH
2782 /* Zero out AUX fields of newly created block during EH edge
2783 insertion. */
8b1c6fd7 2784 for (; last < last_basic_block_for_fn (cfun); last++)
90a7788b
JJ
2785 {
2786 if (need_debug_cleanup)
06e28de2
DM
2787 maybe_move_debug_stmts_to_successors (id,
2788 BASIC_BLOCK_FOR_FN (cfun, last));
2789 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
042ae7d2
JH
2790 /* Update call edge destinations. This can not be done before loop
2791 info is updated, because we may split basic blocks. */
2792 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
06e28de2 2793 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
90a7788b 2794 }
110cfe1c
JH
2795 entry_block_map->aux = NULL;
2796 exit_block_map->aux = NULL;
e21aff8a 2797
1d65f45c
RH
2798 if (id->eh_map)
2799 {
b787e7a2 2800 delete id->eh_map;
1d65f45c
RH
2801 id->eh_map = NULL;
2802 }
f3dccf50
RB
2803 if (id->dependence_map)
2804 {
2805 delete id->dependence_map;
2806 id->dependence_map = NULL;
2807 }
1d65f45c 2808
e21aff8a
SB
2809 return new_fndecl;
2810}
2811
b5b8b0ac
AO
2812/* Copy the debug STMT using ID. We deal with these statements in a
2813 special way: if any variable in their VALUE expression wasn't
2814 remapped yet, we won't remap it, because that would get decl uids
2815 out of sync, causing codegen differences between -g and -g0. If
2816 this arises, we drop the VALUE expression altogether. */
2817
2818static void
538dd0b7 2819copy_debug_stmt (gdebug *stmt, copy_body_data *id)
b5b8b0ac
AO
2820{
2821 tree t, *n;
2822 struct walk_stmt_info wi;
2823
b5b8b0ac
AO
2824 if (gimple_block (stmt))
2825 {
b787e7a2 2826 n = id->decl_map->get (gimple_block (stmt));
16917761 2827 gimple_set_block (stmt, n ? *n : id->block);
b5b8b0ac 2828 }
b5b8b0ac
AO
2829
2830 /* Remap all the operands in COPY. */
2831 memset (&wi, 0, sizeof (wi));
2832 wi.info = id;
2833
2834 processing_debug_stmt = 1;
2835
ddb555ed
JJ
2836 if (gimple_debug_source_bind_p (stmt))
2837 t = gimple_debug_source_bind_get_var (stmt);
2838 else
2839 t = gimple_debug_bind_get_var (stmt);
b5b8b0ac
AO
2840
2841 if (TREE_CODE (t) == PARM_DECL && id->debug_map
b787e7a2 2842 && (n = id->debug_map->get (t)))
b5b8b0ac
AO
2843 {
2844 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2845 t = *n;
2846 }
d17af147 2847 else if (TREE_CODE (t) == VAR_DECL
5f564b8f 2848 && !is_global_var (t)
b787e7a2 2849 && !id->decl_map->get (t))
d17af147 2850 /* T is a non-localized variable. */;
b5b8b0ac
AO
2851 else
2852 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2853
ddb555ed
JJ
2854 if (gimple_debug_bind_p (stmt))
2855 {
2856 gimple_debug_bind_set_var (stmt, t);
b5b8b0ac 2857
ddb555ed
JJ
2858 if (gimple_debug_bind_has_value_p (stmt))
2859 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2860 remap_gimple_op_r, &wi, NULL);
b5b8b0ac 2861
ddb555ed
JJ
2862 /* Punt if any decl couldn't be remapped. */
2863 if (processing_debug_stmt < 0)
2864 gimple_debug_bind_reset_value (stmt);
2865 }
2866 else if (gimple_debug_source_bind_p (stmt))
2867 {
2868 gimple_debug_source_bind_set_var (stmt, t);
878eef4a
JJ
2869 /* When inlining and source bind refers to one of the optimized
2870 away parameters, change the source bind into normal debug bind
2871 referring to the corresponding DEBUG_EXPR_DECL that should have
2872 been bound before the call stmt. */
2873 t = gimple_debug_source_bind_get_value (stmt);
2874 if (t != NULL_TREE
2875 && TREE_CODE (t) == PARM_DECL
538dd0b7 2876 && id->call_stmt)
878eef4a 2877 {
9771b263 2878 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
878eef4a
JJ
2879 unsigned int i;
2880 if (debug_args != NULL)
2881 {
9771b263
DN
2882 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2883 if ((**debug_args)[i] == DECL_ORIGIN (t)
2884 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
878eef4a 2885 {
9771b263 2886 t = (**debug_args)[i + 1];
daa6e488 2887 stmt->subcode = GIMPLE_DEBUG_BIND;
878eef4a
JJ
2888 gimple_debug_bind_set_value (stmt, t);
2889 break;
2890 }
2891 }
3856d50d
JJ
2892 }
2893 if (gimple_debug_source_bind_p (stmt))
2894 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2895 remap_gimple_op_r, &wi, NULL);
ddb555ed 2896 }
b5b8b0ac
AO
2897
2898 processing_debug_stmt = 0;
2899
2900 update_stmt (stmt);
b5b8b0ac
AO
2901}
2902
2903/* Process deferred debug stmts. In order to give values better odds
2904 of being successfully remapped, we delay the processing of debug
2905 stmts until all other stmts that might require remapping are
2906 processed. */
2907
2908static void
2909copy_debug_stmts (copy_body_data *id)
2910{
2911 size_t i;
538dd0b7 2912 gdebug *stmt;
b5b8b0ac 2913
9771b263 2914 if (!id->debug_stmts.exists ())
b5b8b0ac
AO
2915 return;
2916
9771b263 2917 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
b5b8b0ac
AO
2918 copy_debug_stmt (stmt, id);
2919
9771b263 2920 id->debug_stmts.release ();
b5b8b0ac
AO
2921}
2922
f82a627c
EB
2923/* Make a copy of the body of SRC_FN so that it can be inserted inline in
2924 another function. */
2925
2926static tree
2927copy_tree_body (copy_body_data *id)
2928{
2929 tree fndecl = id->src_fn;
2930 tree body = DECL_SAVED_TREE (fndecl);
2931
2932 walk_tree (&body, copy_tree_body_r, id, NULL);
2933
2934 return body;
2935}
2936
b5b8b0ac
AO
2937/* Make a copy of the body of FN so that it can be inserted inline in
2938 another function. */
2939
e21aff8a 2940static tree
0d63a740 2941copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
91382288 2942 basic_block entry_block_map, basic_block exit_block_map,
f3b331d1 2943 basic_block new_entry)
e21aff8a 2944{
1b369fae 2945 tree fndecl = id->src_fn;
e21aff8a
SB
2946 tree body;
2947
2948 /* If this body has a CFG, walk CFG and copy. */
fefa31b5 2949 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
91382288 2950 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
f3b331d1 2951 new_entry);
b5b8b0ac 2952 copy_debug_stmts (id);
e21aff8a
SB
2953
2954 return body;
2955}
2956
04482133
AO
2957/* Return true if VALUE is an ADDR_EXPR of an automatic variable
2958 defined in function FN, or of a data member thereof. */
2959
2960static bool
2961self_inlining_addr_expr (tree value, tree fn)
2962{
2963 tree var;
2964
2965 if (TREE_CODE (value) != ADDR_EXPR)
2966 return false;
2967
2968 var = get_base_address (TREE_OPERAND (value, 0));
e21aff8a 2969
50886bf1 2970 return var && auto_var_in_fn_p (var, fn);
04482133
AO
2971}
2972
b5b8b0ac
AO
2973/* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2974 lexical block and line number information from base_stmt, if given,
2975 or from the last stmt of the block otherwise. */
2976
355fe088 2977static gimple *
b5b8b0ac
AO
2978insert_init_debug_bind (copy_body_data *id,
2979 basic_block bb, tree var, tree value,
355fe088 2980 gimple *base_stmt)
b5b8b0ac 2981{
355fe088 2982 gimple *note;
b5b8b0ac
AO
2983 gimple_stmt_iterator gsi;
2984 tree tracked_var;
2985
2986 if (!gimple_in_ssa_p (id->src_cfun))
2987 return NULL;
2988
683750ce 2989 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
b5b8b0ac
AO
2990 return NULL;
2991
2992 tracked_var = target_for_debug_bind (var);
2993 if (!tracked_var)
2994 return NULL;
2995
2996 if (bb)
2997 {
2998 gsi = gsi_last_bb (bb);
2999 if (!base_stmt && !gsi_end_p (gsi))
3000 base_stmt = gsi_stmt (gsi);
3001 }
3002
2bdb7f56 3003 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
b5b8b0ac
AO
3004
3005 if (bb)
3006 {
3007 if (!gsi_end_p (gsi))
3008 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3009 else
3010 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3011 }
3012
3013 return note;
3014}
3015
6de9cd9a 3016static void
355fe088 3017insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
0f1961a2 3018{
0f1961a2
JH
3019 /* If VAR represents a zero-sized variable, it's possible that the
3020 assignment statement may result in no gimple statements. */
3021 if (init_stmt)
c2a4718a
JJ
3022 {
3023 gimple_stmt_iterator si = gsi_last_bb (bb);
0f1961a2 3024
bfb0b886
RG
3025 /* We can end up with init statements that store to a non-register
3026 from a rhs with a conversion. Handle that here by forcing the
3027 rhs into a temporary. gimple_regimplify_operands is not
3028 prepared to do this for us. */
b5b8b0ac
AO
3029 if (!is_gimple_debug (init_stmt)
3030 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
bfb0b886
RG
3031 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3032 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3033 {
3034 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3035 gimple_expr_type (init_stmt),
3036 gimple_assign_rhs1 (init_stmt));
3037 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3038 GSI_NEW_STMT);
3039 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3040 gimple_assign_set_rhs1 (init_stmt, rhs);
3041 }
c2a4718a
JJ
3042 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3043 gimple_regimplify_operands (init_stmt, &si);
b5b8b0ac 3044
683750ce 3045 if (!is_gimple_debug (init_stmt))
b5b8b0ac 3046 {
70b5e7dc
RG
3047 tree def = gimple_assign_lhs (init_stmt);
3048 insert_init_debug_bind (id, bb, def, def, init_stmt);
b5b8b0ac 3049 }
c2a4718a 3050 }
0f1961a2
JH
3051}
3052
3053/* Initialize parameter P with VALUE. If needed, produce init statement
3054 at the end of BB. When BB is NULL, we return init statement to be
3055 output later. */
355fe088 3056static gimple *
1b369fae 3057setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
e21aff8a 3058 basic_block bb, tree *vars)
6de9cd9a 3059{
355fe088 3060 gimple *init_stmt = NULL;
6de9cd9a 3061 tree var;
f4088621 3062 tree rhs = value;
110cfe1c 3063 tree def = (gimple_in_ssa_p (cfun)
32244553 3064 ? ssa_default_def (id->src_cfun, p) : NULL);
6de9cd9a 3065
f4088621
RG
3066 if (value
3067 && value != error_mark_node
3068 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
c54e3854 3069 {
c4ac6e94 3070 /* If we can match up types by promotion/demotion do so. */
c54e3854 3071 if (fold_convertible_p (TREE_TYPE (p), value))
c4ac6e94 3072 rhs = fold_convert (TREE_TYPE (p), value);
c54e3854 3073 else
c4ac6e94
RG
3074 {
3075 /* ??? For valid programs we should not end up here.
3076 Still if we end up with truly mismatched types here, fall back
3077 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3078 GIMPLE to the following passes. */
3079 if (!is_gimple_reg_type (TREE_TYPE (value))
3080 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3081 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3082 else
3083 rhs = build_zero_cst (TREE_TYPE (p));
3084 }
c54e3854 3085 }
f4088621 3086
b5b8b0ac
AO
3087 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3088 here since the type of this decl must be visible to the calling
3089 function. */
3090 var = copy_decl_to_var (p, id);
3091
b5b8b0ac 3092 /* Declare this new variable. */
910ad8de 3093 DECL_CHAIN (var) = *vars;
b5b8b0ac
AO
3094 *vars = var;
3095
3096 /* Make gimplifier happy about this variable. */
3097 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3098
110cfe1c 3099 /* If the parameter is never assigned to, has no SSA_NAMEs created,
b5b8b0ac
AO
3100 we would not need to create a new variable here at all, if it
3101 weren't for debug info. Still, we can just use the argument
3102 value. */
6de9cd9a
DN
3103 if (TREE_READONLY (p)
3104 && !TREE_ADDRESSABLE (p)
110cfe1c
JH
3105 && value && !TREE_SIDE_EFFECTS (value)
3106 && !def)
6de9cd9a 3107 {
84936f6f
RH
3108 /* We may produce non-gimple trees by adding NOPs or introduce
3109 invalid sharing when operand is not really constant.
3110 It is not big deal to prohibit constant propagation here as
3111 we will constant propagate in DOM1 pass anyway. */
3112 if (is_gimple_min_invariant (value)
f4088621
RG
3113 && useless_type_conversion_p (TREE_TYPE (p),
3114 TREE_TYPE (value))
04482133
AO
3115 /* We have to be very careful about ADDR_EXPR. Make sure
3116 the base variable isn't a local variable of the inlined
3117 function, e.g., when doing recursive inlining, direct or
3118 mutually-recursive or whatever, which is why we don't
3119 just test whether fn == current_function_decl. */
3120 && ! self_inlining_addr_expr (value, fn))
6de9cd9a 3121 {
6de9cd9a 3122 insert_decl_map (id, p, value);
b5b8b0ac
AO
3123 insert_debug_decl_map (id, p, var);
3124 return insert_init_debug_bind (id, bb, var, value, NULL);
6de9cd9a
DN
3125 }
3126 }
3127
6de9cd9a
DN
3128 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3129 that way, when the PARM_DECL is encountered, it will be
3130 automatically replaced by the VAR_DECL. */
7c7d3047 3131 insert_decl_map (id, p, var);
6de9cd9a 3132
6de9cd9a
DN
3133 /* Even if P was TREE_READONLY, the new VAR should not be.
3134 In the original code, we would have constructed a
3135 temporary, and then the function body would have never
3136 changed the value of P. However, now, we will be
3137 constructing VAR directly. The constructor body may
3138 change its value multiple times as it is being
3139 constructed. Therefore, it must not be TREE_READONLY;
3140 the back-end assumes that TREE_READONLY variable is
3141 assigned to only once. */
3142 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3143 TREE_READONLY (var) = 0;
3144
110cfe1c
JH
3145 /* If there is no setup required and we are in SSA, take the easy route
3146 replacing all SSA names representing the function parameter by the
3147 SSA name passed to function.
3148
3149 We need to construct map for the variable anyway as it might be used
3150 in different SSA names when parameter is set in function.
3151
8454d27e
JH
3152 Do replacement at -O0 for const arguments replaced by constant.
3153 This is important for builtin_constant_p and other construct requiring
b5b8b0ac 3154 constant argument to be visible in inlined function body. */
110cfe1c 3155 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
8454d27e
JH
3156 && (optimize
3157 || (TREE_READONLY (p)
3158 && is_gimple_min_invariant (rhs)))
110cfe1c 3159 && (TREE_CODE (rhs) == SSA_NAME
9b718f81
JH
3160 || is_gimple_min_invariant (rhs))
3161 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
110cfe1c
JH
3162 {
3163 insert_decl_map (id, def, rhs);
b5b8b0ac 3164 return insert_init_debug_bind (id, bb, var, rhs, NULL);
110cfe1c
JH
3165 }
3166
f6f2da7d
JH
3167 /* If the value of argument is never used, don't care about initializing
3168 it. */
1cf5abb3 3169 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
f6f2da7d
JH
3170 {
3171 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
b5b8b0ac 3172 return insert_init_debug_bind (id, bb, var, rhs, NULL);
f6f2da7d
JH
3173 }
3174
6de9cd9a
DN
3175 /* Initialize this VAR_DECL from the equivalent argument. Convert
3176 the argument to the proper type in case it was promoted. */
3177 if (value)
3178 {
6de9cd9a 3179 if (rhs == error_mark_node)
110cfe1c 3180 {
7c7d3047 3181 insert_decl_map (id, p, var);
b5b8b0ac 3182 return insert_init_debug_bind (id, bb, var, rhs, NULL);
110cfe1c 3183 }
afe08db5 3184
73dab33b 3185 STRIP_USELESS_TYPE_CONVERSION (rhs);
6de9cd9a 3186
6b18b1a3 3187 /* If we are in SSA form properly remap the default definition
27eb31c9
RG
3188 or assign to a dummy SSA name if the parameter is unused and
3189 we are not optimizing. */
6b18b1a3 3190 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
110cfe1c 3191 {
6b18b1a3
RG
3192 if (def)
3193 {
3194 def = remap_ssa_name (def, id);
3195 init_stmt = gimple_build_assign (def, rhs);
3196 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
32244553 3197 set_ssa_default_def (cfun, var, NULL);
6b18b1a3 3198 }
27eb31c9
RG
3199 else if (!optimize)
3200 {
b731b390 3201 def = make_ssa_name (var);
27eb31c9
RG
3202 init_stmt = gimple_build_assign (def, rhs);
3203 }
110cfe1c
JH
3204 }
3205 else
726a989a 3206 init_stmt = gimple_build_assign (var, rhs);
6de9cd9a 3207
0f1961a2 3208 if (bb && init_stmt)
b5b8b0ac 3209 insert_init_stmt (id, bb, init_stmt);
6de9cd9a 3210 }
0f1961a2 3211 return init_stmt;
6de9cd9a
DN
3212}
3213
d4e4baa9 3214/* Generate code to initialize the parameters of the function at the
726a989a 3215 top of the stack in ID from the GIMPLE_CALL STMT. */
d4e4baa9 3216
e21aff8a 3217static void
355fe088 3218initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
e21aff8a 3219 tree fn, basic_block bb)
d4e4baa9 3220{
d4e4baa9 3221 tree parms;
726a989a 3222 size_t i;
d4e4baa9 3223 tree p;
d436bff8 3224 tree vars = NULL_TREE;
726a989a 3225 tree static_chain = gimple_call_chain (stmt);
d4e4baa9
AO
3226
3227 /* Figure out what the parameters are. */
18c6ada9 3228 parms = DECL_ARGUMENTS (fn);
d4e4baa9 3229
d4e4baa9
AO
3230 /* Loop through the parameter declarations, replacing each with an
3231 equivalent VAR_DECL, appropriately initialized. */
910ad8de 3232 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
726a989a
RB
3233 {
3234 tree val;
3235 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3236 setup_one_parameter (id, p, val, fn, bb, &vars);
3237 }
ea184343
RG
3238 /* After remapping parameters remap their types. This has to be done
3239 in a second loop over all parameters to appropriately remap
3240 variable sized arrays when the size is specified in a
3241 parameter following the array. */
910ad8de 3242 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
ea184343 3243 {
b787e7a2 3244 tree *varp = id->decl_map->get (p);
ea184343
RG
3245 if (varp
3246 && TREE_CODE (*varp) == VAR_DECL)
3247 {
72aa3dca 3248 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
32244553 3249 ? ssa_default_def (id->src_cfun, p) : NULL);
72aa3dca
RG
3250 tree var = *varp;
3251 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
ea184343
RG
3252 /* Also remap the default definition if it was remapped
3253 to the default definition of the parameter replacement
3254 by the parameter setup. */
72aa3dca 3255 if (def)
ea184343 3256 {
b787e7a2 3257 tree *defp = id->decl_map->get (def);
ea184343
RG
3258 if (defp
3259 && TREE_CODE (*defp) == SSA_NAME
72aa3dca
RG
3260 && SSA_NAME_VAR (*defp) == var)
3261 TREE_TYPE (*defp) = TREE_TYPE (var);
ea184343
RG
3262 }
3263 }
3264 }
4838c5ee 3265
6de9cd9a
DN
3266 /* Initialize the static chain. */
3267 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
ea99e0be 3268 gcc_assert (fn != current_function_decl);
6de9cd9a
DN
3269 if (p)
3270 {
3271 /* No static chain? Seems like a bug in tree-nested.c. */
1e128c5f 3272 gcc_assert (static_chain);
4838c5ee 3273
e21aff8a 3274 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
4838c5ee
AO
3275 }
3276
e21aff8a 3277 declare_inline_vars (id->block, vars);
d4e4baa9
AO
3278}
3279
726a989a 3280
e21aff8a
SB
3281/* Declare a return variable to replace the RESULT_DECL for the
3282 function we are calling. An appropriate DECL_STMT is returned.
3283 The USE_STMT is filled to contain a use of the declaration to
3284 indicate the return value of the function.
3285
110cfe1c
JH
3286 RETURN_SLOT, if non-null is place where to store the result. It
3287 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
726a989a 3288 was the LHS of the MODIFY_EXPR to which this call is the RHS.
7740f00d 3289
d5e254e1
IE
3290 RETURN_BOUNDS holds a destination for returned bounds.
3291
0f900dfa
JJ
3292 The return value is a (possibly null) value that holds the result
3293 as seen by the caller. */
d4e4baa9 3294
d436bff8 3295static tree
6938f93f 3296declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
d5e254e1 3297 tree return_bounds, basic_block entry_bb)
d4e4baa9 3298{
1b369fae 3299 tree callee = id->src_fn;
7740f00d
RH
3300 tree result = DECL_RESULT (callee);
3301 tree callee_type = TREE_TYPE (result);
ea2edf88 3302 tree caller_type;
7740f00d 3303 tree var, use;
d4e4baa9 3304
ea2edf88
RG
3305 /* Handle type-mismatches in the function declaration return type
3306 vs. the call expression. */
3307 if (modify_dest)
3308 caller_type = TREE_TYPE (modify_dest);
3309 else
3310 caller_type = TREE_TYPE (TREE_TYPE (callee));
3311
1a2c27e9
EB
3312 /* We don't need to do anything for functions that don't return anything. */
3313 if (VOID_TYPE_P (callee_type))
0f900dfa 3314 return NULL_TREE;
d4e4baa9 3315
cc77ae10 3316 /* If there was a return slot, then the return value is the
7740f00d 3317 dereferenced address of that object. */
110cfe1c 3318 if (return_slot)
7740f00d 3319 {
110cfe1c 3320 /* The front end shouldn't have used both return_slot and
7740f00d 3321 a modify expression. */
1e128c5f 3322 gcc_assert (!modify_dest);
cc77ae10 3323 if (DECL_BY_REFERENCE (result))
110cfe1c
JH
3324 {
3325 tree return_slot_addr = build_fold_addr_expr (return_slot);
3326 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3327
3328 /* We are going to construct *&return_slot and we can't do that
b8698a0f 3329 for variables believed to be not addressable.
110cfe1c
JH
3330
3331 FIXME: This check possibly can match, because values returned
3332 via return slot optimization are not believed to have address
3333 taken by alias analysis. */
3334 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
110cfe1c
JH
3335 var = return_slot_addr;
3336 }
cc77ae10 3337 else
110cfe1c
JH
3338 {
3339 var = return_slot;
3340 gcc_assert (TREE_CODE (var) != SSA_NAME);
62ba699e
RB
3341 if (TREE_ADDRESSABLE (result))
3342 mark_addressable (var);
110cfe1c 3343 }
0890b981
AP
3344 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3345 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3346 && !DECL_GIMPLE_REG_P (result)
22918034 3347 && DECL_P (var))
0890b981 3348 DECL_GIMPLE_REG_P (var) = 0;
7740f00d
RH
3349 use = NULL;
3350 goto done;
3351 }
3352
3353 /* All types requiring non-trivial constructors should have been handled. */
1e128c5f 3354 gcc_assert (!TREE_ADDRESSABLE (callee_type));
7740f00d
RH
3355
3356 /* Attempt to avoid creating a new temporary variable. */
110cfe1c
JH
3357 if (modify_dest
3358 && TREE_CODE (modify_dest) != SSA_NAME)
7740f00d
RH
3359 {
3360 bool use_it = false;
3361
3362 /* We can't use MODIFY_DEST if there's type promotion involved. */
f4088621 3363 if (!useless_type_conversion_p (callee_type, caller_type))
7740f00d
RH
3364 use_it = false;
3365
3366 /* ??? If we're assigning to a variable sized type, then we must
3367 reuse the destination variable, because we've no good way to
3368 create variable sized temporaries at this point. */
3369 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3370 use_it = true;
3371
3372 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3373 reuse it as the result of the call directly. Don't do this if
3374 it would promote MODIFY_DEST to addressable. */
e2f9fe42
RH
3375 else if (TREE_ADDRESSABLE (result))
3376 use_it = false;
3377 else
3378 {
3379 tree base_m = get_base_address (modify_dest);
3380
3381 /* If the base isn't a decl, then it's a pointer, and we don't
3382 know where that's going to go. */
3383 if (!DECL_P (base_m))
3384 use_it = false;
3385 else if (is_global_var (base_m))
3386 use_it = false;
0890b981
AP
3387 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3388 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3389 && !DECL_GIMPLE_REG_P (result)
3390 && DECL_GIMPLE_REG_P (base_m))
1d327c16 3391 use_it = false;
e2f9fe42
RH
3392 else if (!TREE_ADDRESSABLE (base_m))
3393 use_it = true;
3394 }
7740f00d
RH
3395
3396 if (use_it)
3397 {
3398 var = modify_dest;
3399 use = NULL;
3400 goto done;
3401 }
3402 }
3403
1e128c5f 3404 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
7740f00d 3405
c08cd4c1 3406 var = copy_result_decl_to_var (result, id);
7740f00d 3407 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
7740f00d 3408
6de9cd9a 3409 /* Do not have the rest of GCC warn about this variable as it should
471854f8 3410 not be visible to the user. */
6de9cd9a 3411 TREE_NO_WARNING (var) = 1;
d4e4baa9 3412
c08cd4c1
JM
3413 declare_inline_vars (id->block, var);
3414
7740f00d
RH
3415 /* Build the use expr. If the return type of the function was
3416 promoted, convert it back to the expected type. */
3417 use = var;
f4088621 3418 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
c4ac6e94
RG
3419 {
3420 /* If we can match up types by promotion/demotion do so. */
3421 if (fold_convertible_p (caller_type, var))
3422 use = fold_convert (caller_type, var);
3423 else
3424 {
3425 /* ??? For valid programs we should not end up here.
3426 Still if we end up with truly mismatched types here, fall back
3427 to using a MEM_REF to not leak invalid GIMPLE to the following
3428 passes. */
3429 /* Prevent var from being written into SSA form. */
3430 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3431 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3432 DECL_GIMPLE_REG_P (var) = false;
3433 else if (is_gimple_reg_type (TREE_TYPE (var)))
3434 TREE_ADDRESSABLE (var) = true;
3435 use = fold_build2 (MEM_REF, caller_type,
3436 build_fold_addr_expr (var),
3437 build_int_cst (ptr_type_node, 0));
3438 }
3439 }
b8698a0f 3440
73dab33b 3441 STRIP_USELESS_TYPE_CONVERSION (use);
7740f00d 3442
c08cd4c1 3443 if (DECL_BY_REFERENCE (result))
32848948
RG
3444 {
3445 TREE_ADDRESSABLE (var) = 1;
3446 var = build_fold_addr_expr (var);
3447 }
c08cd4c1 3448
7740f00d 3449 done:
d4e4baa9
AO
3450 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3451 way, when the RESULT_DECL is encountered, it will be
6938f93f
JH
3452 automatically replaced by the VAR_DECL.
3453
3454 When returning by reference, ensure that RESULT_DECL remaps to
3455 gimple_val. */
3456 if (DECL_BY_REFERENCE (result)
3457 && !is_gimple_val (var))
3458 {
3459 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3460 insert_decl_map (id, result, temp);
6b18b1a3
RG
3461 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3462 it's default_def SSA_NAME. */
3463 if (gimple_in_ssa_p (id->src_cfun)
3464 && is_gimple_reg (result))
3465 {
b731b390 3466 temp = make_ssa_name (temp);
32244553 3467 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
6b18b1a3 3468 }
6938f93f
JH
3469 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3470 }
3471 else
3472 insert_decl_map (id, result, var);
d4e4baa9 3473
6de9cd9a
DN
3474 /* Remember this so we can ignore it in remap_decls. */
3475 id->retvar = var;
3476
d5e254e1
IE
3477 /* If returned bounds are used, then make var for them. */
3478 if (return_bounds)
3479 {
3480 tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3481 DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3482 TREE_NO_WARNING (bndtemp) = 1;
3483 declare_inline_vars (id->block, bndtemp);
3484
3485 id->retbnd = bndtemp;
3486 insert_init_stmt (id, entry_bb,
3487 gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3488 }
3489
0f900dfa 3490 return use;
d4e4baa9
AO
3491}
3492
27dbd3ac
RH
3493/* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3494 to a local label. */
4838c5ee 3495
27dbd3ac
RH
3496static tree
3497has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
4838c5ee 3498{
27dbd3ac
RH
3499 tree node = *nodep;
3500 tree fn = (tree) fnp;
726a989a 3501
27dbd3ac
RH
3502 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3503 return node;
3504
3505 if (TYPE_P (node))
3506 *walk_subtrees = 0;
3507
3508 return NULL_TREE;
3509}
726a989a 3510
27dbd3ac
RH
3511/* Determine if the function can be copied. If so return NULL. If
3512 not return a string describng the reason for failure. */
3513
06201ad5 3514const char *
27dbd3ac
RH
3515copy_forbidden (struct function *fun, tree fndecl)
3516{
3517 const char *reason = fun->cannot_be_copied_reason;
c021f10b
NF
3518 tree decl;
3519 unsigned ix;
27dbd3ac
RH
3520
3521 /* Only examine the function once. */
3522 if (fun->cannot_be_copied_set)
3523 return reason;
3524
3525 /* We cannot copy a function that receives a non-local goto
3526 because we cannot remap the destination label used in the
3527 function that is performing the non-local goto. */
3528 /* ??? Actually, this should be possible, if we work at it.
3529 No doubt there's just a handful of places that simply
3530 assume it doesn't happen and don't substitute properly. */
3531 if (fun->has_nonlocal_label)
3532 {
3533 reason = G_("function %q+F can never be copied "
3534 "because it receives a non-local goto");
3535 goto fail;
3536 }
3537
c021f10b
NF
3538 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3539 if (TREE_CODE (decl) == VAR_DECL
3540 && TREE_STATIC (decl)
3541 && !DECL_EXTERNAL (decl)
3542 && DECL_INITIAL (decl)
3543 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3544 has_label_address_in_static_1,
3545 fndecl))
3546 {
3547 reason = G_("function %q+F can never be copied because it saves "
3548 "address of local label in a static variable");
3549 goto fail;
3550 }
27dbd3ac
RH
3551
3552 fail:
3553 fun->cannot_be_copied_reason = reason;
3554 fun->cannot_be_copied_set = true;
3555 return reason;
3556}
3557
3558
3559static const char *inline_forbidden_reason;
3560
3561/* A callback for walk_gimple_seq to handle statements. Returns non-null
3562 iff a function can not be inlined. Also sets the reason why. */
c986baf6 3563
c986baf6 3564static tree
726a989a
RB
3565inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3566 struct walk_stmt_info *wip)
c986baf6 3567{
726a989a 3568 tree fn = (tree) wip->info;
f08545a8 3569 tree t;
355fe088 3570 gimple *stmt = gsi_stmt (*gsi);
c986baf6 3571
726a989a 3572 switch (gimple_code (stmt))
f08545a8 3573 {
726a989a 3574 case GIMPLE_CALL:
3197c4fd
AS
3575 /* Refuse to inline alloca call unless user explicitly forced so as
3576 this may change program's memory overhead drastically when the
3577 function using alloca is called in loop. In GCC present in
3578 SPEC2000 inlining into schedule_block cause it to require 2GB of
63d2a353
MM
3579 RAM instead of 256MB. Don't do so for alloca calls emitted for
3580 VLA objects as those can't cause unbounded growth (they're always
3581 wrapped inside stack_save/stack_restore regions. */
726a989a 3582 if (gimple_alloca_call_p (stmt)
538dd0b7 3583 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
f08545a8
JH
3584 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3585 {
ddd2d57e 3586 inline_forbidden_reason
dee15844 3587 = G_("function %q+F can never be inlined because it uses "
ddd2d57e 3588 "alloca (override using the always_inline attribute)");
726a989a
RB
3589 *handled_ops_p = true;
3590 return fn;
f08545a8 3591 }
726a989a
RB
3592
3593 t = gimple_call_fndecl (stmt);
3594 if (t == NULL_TREE)
f08545a8 3595 break;
84f5e1b1 3596
f08545a8
JH
3597 /* We cannot inline functions that call setjmp. */
3598 if (setjmp_call_p (t))
3599 {
ddd2d57e 3600 inline_forbidden_reason
dee15844 3601 = G_("function %q+F can never be inlined because it uses setjmp");
726a989a
RB
3602 *handled_ops_p = true;
3603 return t;
f08545a8
JH
3604 }
3605
6de9cd9a 3606 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3197c4fd 3607 switch (DECL_FUNCTION_CODE (t))
f08545a8 3608 {
3197c4fd
AS
3609 /* We cannot inline functions that take a variable number of
3610 arguments. */
3611 case BUILT_IN_VA_START:
3197c4fd
AS
3612 case BUILT_IN_NEXT_ARG:
3613 case BUILT_IN_VA_END:
6de9cd9a 3614 inline_forbidden_reason
dee15844 3615 = G_("function %q+F can never be inlined because it "
6de9cd9a 3616 "uses variable argument lists");
726a989a
RB
3617 *handled_ops_p = true;
3618 return t;
6de9cd9a 3619
3197c4fd 3620 case BUILT_IN_LONGJMP:
6de9cd9a
DN
3621 /* We can't inline functions that call __builtin_longjmp at
3622 all. The non-local goto machinery really requires the
3623 destination be in a different function. If we allow the
3624 function calling __builtin_longjmp to be inlined into the
3625 function calling __builtin_setjmp, Things will Go Awry. */
3626 inline_forbidden_reason
dee15844 3627 = G_("function %q+F can never be inlined because "
6de9cd9a 3628 "it uses setjmp-longjmp exception handling");
726a989a
RB
3629 *handled_ops_p = true;
3630 return t;
6de9cd9a
DN
3631
3632 case BUILT_IN_NONLOCAL_GOTO:
3633 /* Similarly. */
3634 inline_forbidden_reason
dee15844 3635 = G_("function %q+F can never be inlined because "
6de9cd9a 3636 "it uses non-local goto");
726a989a
RB
3637 *handled_ops_p = true;
3638 return t;
f08545a8 3639
4b284111
JJ
3640 case BUILT_IN_RETURN:
3641 case BUILT_IN_APPLY_ARGS:
3642 /* If a __builtin_apply_args caller would be inlined,
3643 it would be saving arguments of the function it has
3644 been inlined into. Similarly __builtin_return would
3645 return from the function the inline has been inlined into. */
3646 inline_forbidden_reason
dee15844 3647 = G_("function %q+F can never be inlined because "
4b284111 3648 "it uses __builtin_return or __builtin_apply_args");
726a989a
RB
3649 *handled_ops_p = true;
3650 return t;
4b284111 3651
3197c4fd
AS
3652 default:
3653 break;
3654 }
f08545a8
JH
3655 break;
3656
726a989a
RB
3657 case GIMPLE_GOTO:
3658 t = gimple_goto_dest (stmt);
f08545a8
JH
3659
3660 /* We will not inline a function which uses computed goto. The
3661 addresses of its local labels, which may be tucked into
3662 global storage, are of course not constant across
3663 instantiations, which causes unexpected behavior. */
3664 if (TREE_CODE (t) != LABEL_DECL)
3665 {
ddd2d57e 3666 inline_forbidden_reason
dee15844 3667 = G_("function %q+F can never be inlined "
ddd2d57e 3668 "because it contains a computed goto");
726a989a
RB
3669 *handled_ops_p = true;
3670 return t;
f08545a8 3671 }
6de9cd9a 3672 break;
f08545a8 3673
f08545a8
JH
3674 default:
3675 break;
3676 }
3677
726a989a 3678 *handled_ops_p = false;
f08545a8 3679 return NULL_TREE;
84f5e1b1
RH
3680}
3681
726a989a
RB
3682/* Return true if FNDECL is a function that cannot be inlined into
3683 another one. */
3684
3685static bool
f08545a8 3686inline_forbidden_p (tree fndecl)
84f5e1b1 3687{
2092ee7d 3688 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
726a989a 3689 struct walk_stmt_info wi;
726a989a
RB
3690 basic_block bb;
3691 bool forbidden_p = false;
3692
27dbd3ac
RH
3693 /* First check for shared reasons not to copy the code. */
3694 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3695 if (inline_forbidden_reason != NULL)
3696 return true;
3697
3698 /* Next, walk the statements of the function looking for
3699 constraucts we can't handle, or are non-optimal for inlining. */
6e2830c3 3700 hash_set<tree> visited_nodes;
726a989a
RB
3701 memset (&wi, 0, sizeof (wi));
3702 wi.info = (void *) fndecl;
6e2830c3 3703 wi.pset = &visited_nodes;
e21aff8a 3704
2092ee7d 3705 FOR_EACH_BB_FN (bb, fun)
726a989a 3706 {
355fe088 3707 gimple *ret;
726a989a 3708 gimple_seq seq = bb_seq (bb);
27dbd3ac 3709 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
726a989a
RB
3710 forbidden_p = (ret != NULL);
3711 if (forbidden_p)
27dbd3ac 3712 break;
2092ee7d
JJ
3713 }
3714
726a989a 3715 return forbidden_p;
84f5e1b1 3716}
6399c0ab
SB
3717\f
3718/* Return false if the function FNDECL cannot be inlined on account of its
3719 attributes, true otherwise. */
3720static bool
3721function_attribute_inlinable_p (const_tree fndecl)
3722{
3723 if (targetm.attribute_table)
3724 {
3725 const_tree a;
3726
3727 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3728 {
3729 const_tree name = TREE_PURPOSE (a);
3730 int i;
3731
3732 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3733 if (is_attribute_p (targetm.attribute_table[i].name, name))
3734 return targetm.function_attribute_inlinable_p (fndecl);
3735 }
3736 }
3737
3738 return true;
3739}
84f5e1b1 3740
b3c3af2f
SB
3741/* Returns nonzero if FN is a function that does not have any
3742 fundamental inline blocking properties. */
d4e4baa9 3743
27dbd3ac
RH
3744bool
3745tree_inlinable_function_p (tree fn)
d4e4baa9 3746{
b3c3af2f 3747 bool inlinable = true;
18177c7e
RG
3748 bool do_warning;
3749 tree always_inline;
d4e4baa9
AO
3750
3751 /* If we've already decided this function shouldn't be inlined,
3752 there's no need to check again. */
3753 if (DECL_UNINLINABLE (fn))
b3c3af2f 3754 return false;
d4e4baa9 3755
18177c7e
RG
3756 /* We only warn for functions declared `inline' by the user. */
3757 do_warning = (warn_inline
18177c7e 3758 && DECL_DECLARED_INLINE_P (fn)
0494626a 3759 && !DECL_NO_INLINE_WARNING_P (fn)
18177c7e
RG
3760 && !DECL_IN_SYSTEM_HEADER (fn));
3761
3762 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3763
e90acd93 3764 if (flag_no_inline
18177c7e
RG
3765 && always_inline == NULL)
3766 {
3767 if (do_warning)
3768 warning (OPT_Winline, "function %q+F can never be inlined because it "
3769 "is suppressed using -fno-inline", fn);
3770 inlinable = false;
3771 }
3772
18177c7e
RG
3773 else if (!function_attribute_inlinable_p (fn))
3774 {
3775 if (do_warning)
3776 warning (OPT_Winline, "function %q+F can never be inlined because it "
3777 "uses attributes conflicting with inlining", fn);
3778 inlinable = false;
3779 }
46c5ad27 3780
f08545a8 3781 else if (inline_forbidden_p (fn))
b3c3af2f
SB
3782 {
3783 /* See if we should warn about uninlinable functions. Previously,
3784 some of these warnings would be issued while trying to expand
3785 the function inline, but that would cause multiple warnings
3786 about functions that would for example call alloca. But since
3787 this a property of the function, just one warning is enough.
3788 As a bonus we can now give more details about the reason why a
18177c7e
RG
3789 function is not inlinable. */
3790 if (always_inline)
c9fc06dc 3791 error (inline_forbidden_reason, fn);
2d327012 3792 else if (do_warning)
d2fcbf6f 3793 warning (OPT_Winline, inline_forbidden_reason, fn);
b3c3af2f
SB
3794
3795 inlinable = false;
3796 }
d4e4baa9
AO
3797
3798 /* Squirrel away the result so that we don't have to check again. */
b3c3af2f 3799 DECL_UNINLINABLE (fn) = !inlinable;
d4e4baa9 3800
b3c3af2f
SB
3801 return inlinable;
3802}
3803
b4c9af96
RB
3804/* Estimate the cost of a memory move of type TYPE. Use machine dependent
3805 word size and take possible memcpy call into account and return
3806 cost based on whether optimizing for size or speed according to SPEED_P. */
e5c4f28a
RG
3807
3808int
1ed85d52 3809estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
e5c4f28a
RG
3810{
3811 HOST_WIDE_INT size;
3812
078c3644
JH
3813 gcc_assert (!VOID_TYPE_P (type));
3814
c204d113
L
3815 if (TREE_CODE (type) == VECTOR_TYPE)
3816 {
ef4bddc2
RS
3817 machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3818 machine_mode simd
c204d113
L
3819 = targetm.vectorize.preferred_simd_mode (inner);
3820 int simd_mode_size = GET_MODE_SIZE (simd);
3821 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3822 / simd_mode_size);
3823 }
3824
e5c4f28a
RG
3825 size = int_size_in_bytes (type);
3826
b4c9af96 3827 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
e5c4f28a
RG
3828 /* Cost of a memcpy call, 3 arguments and the call. */
3829 return 4;
3830 else
3831 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3832}
3833
726a989a 3834/* Returns cost of operation CODE, according to WEIGHTS */
7f9bc51b 3835
726a989a 3836static int
02f0b13a
JH
3837estimate_operator_cost (enum tree_code code, eni_weights *weights,
3838 tree op1 ATTRIBUTE_UNUSED, tree op2)
6de9cd9a 3839{
726a989a 3840 switch (code)
6de9cd9a 3841 {
726a989a
RB
3842 /* These are "free" conversions, or their presumed cost
3843 is folded into other operations. */
61fcaeec 3844 case RANGE_EXPR:
1a87cf0c 3845 CASE_CONVERT:
726a989a
RB
3846 case COMPLEX_EXPR:
3847 case PAREN_EXPR:
d4d92cd3 3848 case VIEW_CONVERT_EXPR:
726a989a 3849 return 0;
6de9cd9a 3850
e5c4f28a
RG
3851 /* Assign cost of 1 to usual operations.
3852 ??? We may consider mapping RTL costs to this. */
6de9cd9a 3853 case COND_EXPR:
4151978d 3854 case VEC_COND_EXPR:
2205ed25 3855 case VEC_PERM_EXPR:
6de9cd9a
DN
3856
3857 case PLUS_EXPR:
5be014d5 3858 case POINTER_PLUS_EXPR:
6de9cd9a
DN
3859 case MINUS_EXPR:
3860 case MULT_EXPR:
98449720 3861 case MULT_HIGHPART_EXPR:
16949072 3862 case FMA_EXPR:
6de9cd9a 3863
09e881c9 3864 case ADDR_SPACE_CONVERT_EXPR:
325217ed 3865 case FIXED_CONVERT_EXPR:
6de9cd9a 3866 case FIX_TRUNC_EXPR:
6de9cd9a
DN
3867
3868 case NEGATE_EXPR:
3869 case FLOAT_EXPR:
3870 case MIN_EXPR:
3871 case MAX_EXPR:
3872 case ABS_EXPR:
3873
3874 case LSHIFT_EXPR:
3875 case RSHIFT_EXPR:
3876 case LROTATE_EXPR:
3877 case RROTATE_EXPR:
3878
3879 case BIT_IOR_EXPR:
3880 case BIT_XOR_EXPR:
3881 case BIT_AND_EXPR:
3882 case BIT_NOT_EXPR:
3883
3884 case TRUTH_ANDIF_EXPR:
3885 case TRUTH_ORIF_EXPR:
3886 case TRUTH_AND_EXPR:
3887 case TRUTH_OR_EXPR:
3888 case TRUTH_XOR_EXPR:
3889 case TRUTH_NOT_EXPR:
3890
3891 case LT_EXPR:
3892 case LE_EXPR:
3893 case GT_EXPR:
3894 case GE_EXPR:
3895 case EQ_EXPR:
3896 case NE_EXPR:
3897 case ORDERED_EXPR:
3898 case UNORDERED_EXPR:
3899
3900 case UNLT_EXPR:
3901 case UNLE_EXPR:
3902 case UNGT_EXPR:
3903 case UNGE_EXPR:
3904 case UNEQ_EXPR:
d1a7edaf 3905 case LTGT_EXPR:
6de9cd9a 3906
6de9cd9a
DN
3907 case CONJ_EXPR:
3908
3909 case PREDECREMENT_EXPR:
3910 case PREINCREMENT_EXPR:
3911 case POSTDECREMENT_EXPR:
3912 case POSTINCREMENT_EXPR:
3913
16630a2c
DN
3914 case REALIGN_LOAD_EXPR:
3915
61d3cdbb
DN
3916 case REDUC_MAX_EXPR:
3917 case REDUC_MIN_EXPR:
3918 case REDUC_PLUS_EXPR:
20f06221 3919 case WIDEN_SUM_EXPR:
726a989a
RB
3920 case WIDEN_MULT_EXPR:
3921 case DOT_PROD_EXPR:
79d652a5 3922 case SAD_EXPR:
0354c0c7
BS
3923 case WIDEN_MULT_PLUS_EXPR:
3924 case WIDEN_MULT_MINUS_EXPR:
36ba4aae 3925 case WIDEN_LSHIFT_EXPR:
726a989a 3926
89d67cca
DN
3927 case VEC_WIDEN_MULT_HI_EXPR:
3928 case VEC_WIDEN_MULT_LO_EXPR:
3f30a9a6
RH
3929 case VEC_WIDEN_MULT_EVEN_EXPR:
3930 case VEC_WIDEN_MULT_ODD_EXPR:
89d67cca
DN
3931 case VEC_UNPACK_HI_EXPR:
3932 case VEC_UNPACK_LO_EXPR:
d9987fb4
UB
3933 case VEC_UNPACK_FLOAT_HI_EXPR:
3934 case VEC_UNPACK_FLOAT_LO_EXPR:
8115817b 3935 case VEC_PACK_TRUNC_EXPR:
89d67cca 3936 case VEC_PACK_SAT_EXPR:
d9987fb4 3937 case VEC_PACK_FIX_TRUNC_EXPR:
36ba4aae
IR
3938 case VEC_WIDEN_LSHIFT_HI_EXPR:
3939 case VEC_WIDEN_LSHIFT_LO_EXPR:
98b44b0e 3940
726a989a 3941 return 1;
6de9cd9a 3942
1ea7e6ad 3943 /* Few special cases of expensive operations. This is useful
6de9cd9a
DN
3944 to avoid inlining on functions having too many of these. */
3945 case TRUNC_DIV_EXPR:
3946 case CEIL_DIV_EXPR:
3947 case FLOOR_DIV_EXPR:
3948 case ROUND_DIV_EXPR:
3949 case EXACT_DIV_EXPR:
3950 case TRUNC_MOD_EXPR:
3951 case CEIL_MOD_EXPR:
3952 case FLOOR_MOD_EXPR:
3953 case ROUND_MOD_EXPR:
3954 case RDIV_EXPR:
02f0b13a
JH
3955 if (TREE_CODE (op2) != INTEGER_CST)
3956 return weights->div_mod_cost;
3957 return 1;
726a989a
RB
3958
3959 default:
3960 /* We expect a copy assignment with no operator. */
3961 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3962 return 0;
3963 }
3964}
3965
3966
3967/* Estimate number of instructions that will be created by expanding
3968 the statements in the statement sequence STMTS.
3969 WEIGHTS contains weights attributed to various constructs. */
3970
fcb901cd
KT
3971int
3972estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
726a989a
RB
3973{
3974 int cost;
3975 gimple_stmt_iterator gsi;
3976
3977 cost = 0;
3978 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3979 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3980
3981 return cost;
3982}
3983
3984
3985/* Estimate number of instructions that will be created by expanding STMT.
3986 WEIGHTS contains weights attributed to various constructs. */
3987
3988int
355fe088 3989estimate_num_insns (gimple *stmt, eni_weights *weights)
726a989a
RB
3990{
3991 unsigned cost, i;
3992 enum gimple_code code = gimple_code (stmt);
3993 tree lhs;
02f0b13a 3994 tree rhs;
726a989a
RB
3995
3996 switch (code)
3997 {
3998 case GIMPLE_ASSIGN:
3999 /* Try to estimate the cost of assignments. We have three cases to
4000 deal with:
4001 1) Simple assignments to registers;
4002 2) Stores to things that must live in memory. This includes
4003 "normal" stores to scalars, but also assignments of large
4004 structures, or constructors of big arrays;
4005
4006 Let us look at the first two cases, assuming we have "a = b + C":
4007 <GIMPLE_ASSIGN <var_decl "a">
4008 <plus_expr <var_decl "b"> <constant C>>
4009 If "a" is a GIMPLE register, the assignment to it is free on almost
4010 any target, because "a" usually ends up in a real register. Hence
4011 the only cost of this expression comes from the PLUS_EXPR, and we
4012 can ignore the GIMPLE_ASSIGN.
4013 If "a" is not a GIMPLE register, the assignment to "a" will most
4014 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4015 of moving something into "a", which we compute using the function
4016 estimate_move_cost. */
bccc50d4
JJ
4017 if (gimple_clobber_p (stmt))
4018 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4019
726a989a 4020 lhs = gimple_assign_lhs (stmt);
02f0b13a
JH
4021 rhs = gimple_assign_rhs1 (stmt);
4022
c12d9242 4023 cost = 0;
726a989a 4024
c12d9242
RB
4025 /* Account for the cost of moving to / from memory. */
4026 if (gimple_store_p (stmt))
b4c9af96 4027 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
c12d9242 4028 if (gimple_assign_load_p (stmt))
b4c9af96 4029 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
02f0b13a
JH
4030
4031 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4032 gimple_assign_rhs1 (stmt),
4033 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4034 == GIMPLE_BINARY_RHS
4035 ? gimple_assign_rhs2 (stmt) : NULL);
726a989a
RB
4036 break;
4037
4038 case GIMPLE_COND:
02f0b13a
JH
4039 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4040 gimple_op (stmt, 0),
4041 gimple_op (stmt, 1));
726a989a
RB
4042 break;
4043
4044 case GIMPLE_SWITCH:
538dd0b7
DM
4045 {
4046 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4047 /* Take into account cost of the switch + guess 2 conditional jumps for
4048 each case label.
4049
4050 TODO: once the switch expansion logic is sufficiently separated, we can
4051 do better job on estimating cost of the switch. */
4052 if (weights->time_based)
4053 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4054 else
4055 cost = gimple_switch_num_labels (switch_stmt) * 2;
4056 }
6de9cd9a 4057 break;
726a989a
RB
4058
4059 case GIMPLE_CALL:
6de9cd9a 4060 {
e9287a41 4061 tree decl;
6de9cd9a 4062
e9287a41
RB
4063 if (gimple_call_internal_p (stmt))
4064 return 0;
fd9710dc
RB
4065 else if ((decl = gimple_call_fndecl (stmt))
4066 && DECL_BUILT_IN (decl))
e9f7ad79 4067 {
fd9710dc
RB
4068 /* Do not special case builtins where we see the body.
4069 This just confuse inliner. */
4070 struct cgraph_node *node;
d52f5295 4071 if (!(node = cgraph_node::get (decl))
fd9710dc
RB
4072 || node->definition)
4073 ;
4074 /* For buitins that are likely expanded to nothing or
4075 inlined do not account operand costs. */
4076 else if (is_simple_builtin (decl))
4077 return 0;
4078 else if (is_inexpensive_builtin (decl))
4079 return weights->target_builtin_call_cost;
4080 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
e9f7ad79 4081 {
fd9710dc
RB
4082 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4083 specialize the cheap expansion we do here.
4084 ??? This asks for a more general solution. */
4085 switch (DECL_FUNCTION_CODE (decl))
4086 {
4087 case BUILT_IN_POW:
4088 case BUILT_IN_POWF:
4089 case BUILT_IN_POWL:
4090 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
624d31fe
RS
4091 && (real_equal
4092 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4093 &dconst2)))
fd9710dc
RB
4094 return estimate_operator_cost
4095 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4096 gimple_call_arg (stmt, 0));
4097 break;
4098
4099 default:
4100 break;
4101 }
e9f7ad79
RG
4102 }
4103 }
b8698a0f 4104
fd9710dc 4105 cost = decl ? weights->call_cost : weights->indirect_call_cost;
3c04921b 4106 if (gimple_call_lhs (stmt))
b4c9af96
RB
4107 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4108 weights->time_based);
3c04921b 4109 for (i = 0; i < gimple_call_num_args (stmt); i++)
c7f599d0 4110 {
3c04921b 4111 tree arg = gimple_call_arg (stmt, i);
b4c9af96
RB
4112 cost += estimate_move_cost (TREE_TYPE (arg),
4113 weights->time_based);
c7f599d0 4114 }
6de9cd9a
DN
4115 break;
4116 }
88f4034b 4117
9bb2f479
JH
4118 case GIMPLE_RETURN:
4119 return weights->return_cost;
4120
726a989a
RB
4121 case GIMPLE_GOTO:
4122 case GIMPLE_LABEL:
4123 case GIMPLE_NOP:
4124 case GIMPLE_PHI:
726a989a 4125 case GIMPLE_PREDICT:
b5b8b0ac 4126 case GIMPLE_DEBUG:
726a989a
RB
4127 return 0;
4128
4129 case GIMPLE_ASM:
cc4029ee 4130 {
538dd0b7 4131 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
cc4029ee
AK
4132 /* 1000 means infinity. This avoids overflows later
4133 with very long asm statements. */
4134 if (count > 1000)
4135 count = 1000;
4136 return count;
4137 }
726a989a 4138
1d65f45c
RH
4139 case GIMPLE_RESX:
4140 /* This is either going to be an external function call with one
4141 argument, or two register copy statements plus a goto. */
4142 return 2;
4143
4144 case GIMPLE_EH_DISPATCH:
4145 /* ??? This is going to turn into a switch statement. Ideally
4146 we'd have a look at the eh region and estimate the number of
4147 edges involved. */
4148 return 10;
4149
726a989a 4150 case GIMPLE_BIND:
538dd0b7
DM
4151 return estimate_num_insns_seq (
4152 gimple_bind_body (as_a <gbind *> (stmt)),
4153 weights);
726a989a
RB
4154
4155 case GIMPLE_EH_FILTER:
4156 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4157
4158 case GIMPLE_CATCH:
538dd0b7
DM
4159 return estimate_num_insns_seq (gimple_catch_handler (
4160 as_a <gcatch *> (stmt)),
4161 weights);
726a989a
RB
4162
4163 case GIMPLE_TRY:
4164 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4165 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4166
41dbbb37 4167 /* OMP directives are generally very expensive. */
726a989a
RB
4168
4169 case GIMPLE_OMP_RETURN:
4170 case GIMPLE_OMP_SECTIONS_SWITCH:
4171 case GIMPLE_OMP_ATOMIC_STORE:
4172 case GIMPLE_OMP_CONTINUE:
4173 /* ...except these, which are cheap. */
4174 return 0;
4175
4176 case GIMPLE_OMP_ATOMIC_LOAD:
4177 return weights->omp_cost;
4178
4179 case GIMPLE_OMP_FOR:
4180 return (weights->omp_cost
4181 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4182 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4183
4184 case GIMPLE_OMP_PARALLEL:
4185 case GIMPLE_OMP_TASK:
4186 case GIMPLE_OMP_CRITICAL:
4187 case GIMPLE_OMP_MASTER:
acf0174b 4188 case GIMPLE_OMP_TASKGROUP:
726a989a
RB
4189 case GIMPLE_OMP_ORDERED:
4190 case GIMPLE_OMP_SECTION:
4191 case GIMPLE_OMP_SECTIONS:
4192 case GIMPLE_OMP_SINGLE:
acf0174b
JJ
4193 case GIMPLE_OMP_TARGET:
4194 case GIMPLE_OMP_TEAMS:
726a989a
RB
4195 return (weights->omp_cost
4196 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
88f4034b 4197
0a35513e
AH
4198 case GIMPLE_TRANSACTION:
4199 return (weights->tm_cost
538dd0b7
DM
4200 + estimate_num_insns_seq (gimple_transaction_body (
4201 as_a <gtransaction *> (stmt)),
0a35513e
AH
4202 weights));
4203
6de9cd9a 4204 default:
1e128c5f 4205 gcc_unreachable ();
6de9cd9a 4206 }
726a989a
RB
4207
4208 return cost;
6de9cd9a
DN
4209}
4210
726a989a
RB
4211/* Estimate number of instructions that will be created by expanding
4212 function FNDECL. WEIGHTS contains weights attributed to various
4213 constructs. */
aa4a53af 4214
6de9cd9a 4215int
726a989a 4216estimate_num_insns_fn (tree fndecl, eni_weights *weights)
6de9cd9a 4217{
726a989a
RB
4218 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4219 gimple_stmt_iterator bsi;
e21aff8a 4220 basic_block bb;
726a989a 4221 int n = 0;
e21aff8a 4222
726a989a
RB
4223 gcc_assert (my_function && my_function->cfg);
4224 FOR_EACH_BB_FN (bb, my_function)
e21aff8a 4225 {
726a989a
RB
4226 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4227 n += estimate_num_insns (gsi_stmt (bsi), weights);
e21aff8a 4228 }
e21aff8a 4229
726a989a 4230 return n;
7f9bc51b
ZD
4231}
4232
726a989a 4233
7f9bc51b
ZD
4234/* Initializes weights used by estimate_num_insns. */
4235
4236void
4237init_inline_once (void)
4238{
7f9bc51b 4239 eni_size_weights.call_cost = 1;
d2d668fb 4240 eni_size_weights.indirect_call_cost = 3;
625a2efb 4241 eni_size_weights.target_builtin_call_cost = 1;
7f9bc51b 4242 eni_size_weights.div_mod_cost = 1;
7f9bc51b 4243 eni_size_weights.omp_cost = 40;
0a35513e 4244 eni_size_weights.tm_cost = 10;
02f0b13a 4245 eni_size_weights.time_based = false;
9bb2f479 4246 eni_size_weights.return_cost = 1;
7f9bc51b
ZD
4247
4248 /* Estimating time for call is difficult, since we have no idea what the
4249 called function does. In the current uses of eni_time_weights,
4250 underestimating the cost does less harm than overestimating it, so
ea2c620c 4251 we choose a rather small value here. */
7f9bc51b 4252 eni_time_weights.call_cost = 10;
d2d668fb 4253 eni_time_weights.indirect_call_cost = 15;
9bb2f479 4254 eni_time_weights.target_builtin_call_cost = 1;
7f9bc51b 4255 eni_time_weights.div_mod_cost = 10;
7f9bc51b 4256 eni_time_weights.omp_cost = 40;
0a35513e 4257 eni_time_weights.tm_cost = 40;
02f0b13a 4258 eni_time_weights.time_based = true;
9bb2f479 4259 eni_time_weights.return_cost = 2;
6de9cd9a
DN
4260}
4261
726a989a 4262
e21aff8a 4263/* Install new lexical TREE_BLOCK underneath 'current_block'. */
726a989a 4264
e21aff8a 4265static void
4a283090 4266prepend_lexical_block (tree current_block, tree new_block)
e21aff8a 4267{
4a283090
JH
4268 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4269 BLOCK_SUBBLOCKS (current_block) = new_block;
e21aff8a 4270 BLOCK_SUPERCONTEXT (new_block) = current_block;
e21aff8a
SB
4271}
4272
c021f10b
NF
4273/* Add local variables from CALLEE to CALLER. */
4274
4275static inline void
4276add_local_variables (struct function *callee, struct function *caller,
ae0379fc 4277 copy_body_data *id)
c021f10b
NF
4278{
4279 tree var;
4280 unsigned ix;
4281
4282 FOR_EACH_LOCAL_DECL (callee, ix, var)
ae0379fc 4283 if (!can_be_nonlocal (var, id))
42694189
JJ
4284 {
4285 tree new_var = remap_decl (var, id);
4286
4287 /* Remap debug-expressions. */
4288 if (TREE_CODE (new_var) == VAR_DECL
839b422f 4289 && DECL_HAS_DEBUG_EXPR_P (var)
42694189
JJ
4290 && new_var != var)
4291 {
4292 tree tem = DECL_DEBUG_EXPR (var);
4293 bool old_regimplify = id->regimplify;
4294 id->remapping_type_depth++;
4295 walk_tree (&tem, copy_tree_body_r, id, NULL);
4296 id->remapping_type_depth--;
4297 id->regimplify = old_regimplify;
4298 SET_DECL_DEBUG_EXPR (new_var, tem);
839b422f 4299 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
42694189
JJ
4300 }
4301 add_local_decl (caller, new_var);
4302 }
c021f10b
NF
4303}
4304
f79c81b9
AO
4305/* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4306 have brought in or introduced any debug stmts for SRCVAR. */
4307
4308static inline void
4309reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4310{
4311 tree *remappedvarp = id->decl_map->get (srcvar);
4312
4313 if (!remappedvarp)
4314 return;
4315
4316 if (TREE_CODE (*remappedvarp) != VAR_DECL)
4317 return;
4318
4319 if (*remappedvarp == id->retvar || *remappedvarp == id->retbnd)
4320 return;
4321
4322 tree tvar = target_for_debug_bind (*remappedvarp);
4323 if (!tvar)
4324 return;
4325
4326 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4327 id->call_stmt);
4328 gimple_seq_add_stmt (bindings, stmt);
4329}
4330
4331/* For each inlined variable for which we may have debug bind stmts,
4332 add before GSI a final debug stmt resetting it, marking the end of
4333 its life, so that var-tracking knows it doesn't have to compute
4334 further locations for it. */
4335
4336static inline void
4337reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4338{
4339 tree var;
4340 unsigned ix;
4341 gimple_seq bindings = NULL;
4342
4343 if (!gimple_in_ssa_p (id->src_cfun))
4344 return;
4345
4346 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4347 return;
4348
4349 for (var = DECL_ARGUMENTS (id->src_fn);
4350 var; var = DECL_CHAIN (var))
4351 reset_debug_binding (id, var, &bindings);
4352
4353 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4354 reset_debug_binding (id, var, &bindings);
4355
4356 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4357}
4358
726a989a 4359/* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
d4e4baa9 4360
e21aff8a 4361static bool
355fe088 4362expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
d4e4baa9 4363{
0f900dfa 4364 tree use_retvar;
d436bff8 4365 tree fn;
b787e7a2
TS
4366 hash_map<tree, tree> *dst;
4367 hash_map<tree, tree> *st = NULL;
110cfe1c 4368 tree return_slot;
7740f00d 4369 tree modify_dest;
d5e254e1 4370 tree return_bounds = NULL;
e21aff8a 4371 struct cgraph_edge *cg_edge;
61a05df1 4372 cgraph_inline_failed_t reason;
e21aff8a
SB
4373 basic_block return_block;
4374 edge e;
726a989a 4375 gimple_stmt_iterator gsi, stmt_gsi;
9915b1c9 4376 bool successfully_inlined = false;
4f6c2131 4377 bool purge_dead_abnormal_edges;
538dd0b7 4378 gcall *call_stmt;
d5e254e1 4379 unsigned int i;
d4e4baa9 4380
9915b1c9
MLI
4381 /* The gimplifier uses input_location in too many places, such as
4382 internal_get_tmp_var (). */
4383 location_t saved_location = input_location;
035775c8 4384 input_location = gimple_location (stmt);
6de9cd9a 4385
d4e4baa9 4386 /* From here on, we're only interested in CALL_EXPRs. */
538dd0b7
DM
4387 call_stmt = dyn_cast <gcall *> (stmt);
4388 if (!call_stmt)
6de9cd9a 4389 goto egress;
d4e4baa9 4390
d52f5295 4391 cg_edge = id->dst_node->get_edge (stmt);
db09f943 4392 gcc_checking_assert (cg_edge);
d4e4baa9
AO
4393 /* First, see if we can figure out what function is being called.
4394 If we cannot, then there is no hope of inlining the function. */
db09f943 4395 if (cg_edge->indirect_unknown_callee)
3949c4a7 4396 goto egress;
67348ccc 4397 fn = cg_edge->callee->decl;
db09f943 4398 gcc_checking_assert (fn);
b58b1157 4399
726a989a 4400 /* If FN is a declaration of a function in a nested scope that was
a1a0fd4e
AO
4401 globally declared inline, we don't set its DECL_INITIAL.
4402 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4403 C++ front-end uses it for cdtors to refer to their internal
4404 declarations, that are not real functions. Fortunately those
4405 don't have trees to be saved, so we can tell by checking their
726a989a
RB
4406 gimple_body. */
4407 if (!DECL_INITIAL (fn)
a1a0fd4e 4408 && DECL_ABSTRACT_ORIGIN (fn)
39ecc018 4409 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
a1a0fd4e
AO
4410 fn = DECL_ABSTRACT_ORIGIN (fn);
4411
8f4f502f 4412 /* Don't try to inline functions that are not well-suited to inlining. */
9c8305f8 4413 if (cg_edge->inline_failed)
a833faa5 4414 {
9c8305f8 4415 reason = cg_edge->inline_failed;
3e293154
MJ
4416 /* If this call was originally indirect, we do not want to emit any
4417 inlining related warnings or sorry messages because there are no
4418 guarantees regarding those. */
e33c6cd6 4419 if (cg_edge->indirect_inlining_edge)
3e293154
MJ
4420 goto egress;
4421
7fac66d4 4422 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
bfc61b40
JH
4423 /* For extern inline functions that get redefined we always
4424 silently ignored always_inline flag. Better behaviour would
4425 be to be able to keep both bodies and use extern inline body
4426 for inlining, but we can't do that because frontends overwrite
4427 the body. */
4428 && !cg_edge->callee->local.redefined_extern_inline
df9dda2d
ST
4429 /* During early inline pass, report only when optimization is
4430 not turned on. */
3dafb85c 4431 && (symtab->global_info_ready
1cf11770
L
4432 || !optimize
4433 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
c9fc06dc
CB
4434 /* PR 20090218-1_0.c. Body can be provided by another module. */
4435 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
2d327012 4436 {
c9fc06dc
CB
4437 error ("inlining failed in call to always_inline %q+F: %s", fn,
4438 cgraph_inline_failed_string (reason));
9915b1c9
MLI
4439 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4440 inform (gimple_location (stmt), "called from here");
4441 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4442 inform (DECL_SOURCE_LOCATION (cfun->decl),
4443 "called from this function");
2d327012 4444 }
ff7037dc
EB
4445 else if (warn_inline
4446 && DECL_DECLARED_INLINE_P (fn)
4447 && !DECL_NO_INLINE_WARNING_P (fn)
2d327012 4448 && !DECL_IN_SYSTEM_HEADER (fn)
61a05df1 4449 && reason != CIF_UNSPECIFIED
d63db217 4450 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
d7d1d041 4451 /* Do not warn about not inlined recursive calls. */
3dafb85c 4452 && !cg_edge->recursive_p ()
d63db217 4453 /* Avoid warnings during early inline pass. */
3dafb85c 4454 && symtab->global_info_ready)
a833faa5 4455 {
9915b1c9
MLI
4456 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4457 fn, _(cgraph_inline_failed_string (reason))))
4458 {
4459 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4460 inform (gimple_location (stmt), "called from here");
4461 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4462 inform (DECL_SOURCE_LOCATION (cfun->decl),
4463 "called from this function");
4464 }
a833faa5 4465 }
6de9cd9a 4466 goto egress;
a833faa5 4467 }
67348ccc 4468 fn = cg_edge->callee->decl;
70486010 4469 cg_edge->callee->get_untransformed_body ();
d4e4baa9 4470
b2b29377 4471 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
d52f5295 4472 cg_edge->callee->verify ();
18c6ada9 4473
e21aff8a 4474 /* We will be inlining this callee. */
1d65f45c 4475 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
d5e254e1 4476 id->assign_stmts.create (0);
e21aff8a 4477
f9417da1 4478 /* Update the callers EH personality. */
67348ccc
DM
4479 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4480 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4481 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
f9417da1 4482
726a989a 4483 /* Split the block holding the GIMPLE_CALL. */
e21aff8a
SB
4484 e = split_block (bb, stmt);
4485 bb = e->src;
4486 return_block = e->dest;
4487 remove_edge (e);
4488
4f6c2131
EB
4489 /* split_block splits after the statement; work around this by
4490 moving the call into the second block manually. Not pretty,
4491 but seems easier than doing the CFG manipulation by hand
726a989a
RB
4492 when the GIMPLE_CALL is in the last statement of BB. */
4493 stmt_gsi = gsi_last_bb (bb);
4494 gsi_remove (&stmt_gsi, false);
4f6c2131 4495
726a989a 4496 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4f6c2131
EB
4497 been the source of abnormal edges. In this case, schedule
4498 the removal of dead abnormal edges. */
726a989a
RB
4499 gsi = gsi_start_bb (return_block);
4500 if (gsi_end_p (gsi))
e21aff8a 4501 {
726a989a 4502 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4f6c2131 4503 purge_dead_abnormal_edges = true;
e21aff8a 4504 }
4f6c2131
EB
4505 else
4506 {
726a989a 4507 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4f6c2131
EB
4508 purge_dead_abnormal_edges = false;
4509 }
4510
726a989a 4511 stmt_gsi = gsi_start_bb (return_block);
742a37d5 4512
d436bff8
AH
4513 /* Build a block containing code to initialize the arguments, the
4514 actual inline expansion of the body, and a label for the return
4515 statements within the function to jump to. The type of the
3e492e9c
RB
4516 statement expression is the return type of the function call.
4517 ??? If the call does not have an associated block then we will
4518 remap all callee blocks to NULL, effectively dropping most of
4519 its debug information. This should only happen for calls to
4520 artificial decls inserted by the compiler itself. We need to
4521 either link the inlined blocks into the caller block tree or
4522 not refer to them in any way to not break GC for locations. */
5368224f 4523 if (gimple_block (stmt))
3e492e9c
RB
4524 {
4525 id->block = make_node (BLOCK);
4526 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
9915b1c9
MLI
4527 BLOCK_SOURCE_LOCATION (id->block)
4528 = LOCATION_LOCUS (gimple_location (stmt));
3e492e9c
RB
4529 prepend_lexical_block (gimple_block (stmt), id->block);
4530 }
e21aff8a 4531
d4e4baa9
AO
4532 /* Local declarations will be replaced by their equivalents in this
4533 map. */
4534 st = id->decl_map;
b787e7a2 4535 id->decl_map = new hash_map<tree, tree>;
b5b8b0ac
AO
4536 dst = id->debug_map;
4537 id->debug_map = NULL;
d4e4baa9 4538
e21aff8a 4539 /* Record the function we are about to inline. */
1b369fae
RH
4540 id->src_fn = fn;
4541 id->src_node = cg_edge->callee;
110cfe1c 4542 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
538dd0b7 4543 id->call_stmt = stmt;
1b369fae 4544
026c3cfd 4545 /* If the src function contains an IFN_VA_ARG, then so will the dst
d67cb100
TV
4546 function after inlining. */
4547 if ((id->src_cfun->curr_properties & PROP_gimple_lva) == 0)
4548 {
4549 struct function *dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4550 dst_cfun->curr_properties &= ~PROP_gimple_lva;
4551 }
4552
3c8da8a5
AO
4553 gcc_assert (!id->src_cfun->after_inlining);
4554
045685a9 4555 id->entry_bb = bb;
7299cb99
JH
4556 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4557 {
4558 gimple_stmt_iterator si = gsi_last_bb (bb);
4559 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4560 NOT_TAKEN),
4561 GSI_NEW_STMT);
4562 }
726a989a 4563 initialize_inlined_parameters (id, stmt, fn, bb);
d4e4baa9 4564
ea99e0be 4565 if (DECL_INITIAL (fn))
94645a02 4566 {
3e492e9c
RB
4567 if (gimple_block (stmt))
4568 {
4569 tree *var;
4570
4571 prepend_lexical_block (id->block,
4572 remap_blocks (DECL_INITIAL (fn), id));
4573 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4574 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4575 == NULL_TREE));
4576 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4577 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4578 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4579 under it. The parameters can be then evaluated in the debugger,
4580 but don't show in backtraces. */
4581 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4582 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4583 {
4584 tree v = *var;
4585 *var = TREE_CHAIN (v);
4586 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4587 BLOCK_VARS (id->block) = v;
4588 }
4589 else
4590 var = &TREE_CHAIN (*var);
4591 }
4592 else
4593 remap_blocks_to_null (DECL_INITIAL (fn), id);
94645a02 4594 }
acb8f212 4595
d4e4baa9
AO
4596 /* Return statements in the function body will be replaced by jumps
4597 to the RET_LABEL. */
1e128c5f
GB
4598 gcc_assert (DECL_INITIAL (fn));
4599 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
23700f65 4600
726a989a 4601 /* Find the LHS to which the result of this call is assigned. */
110cfe1c 4602 return_slot = NULL;
726a989a 4603 if (gimple_call_lhs (stmt))
81bafd36 4604 {
726a989a 4605 modify_dest = gimple_call_lhs (stmt);
81bafd36 4606
d5e254e1
IE
4607 /* Remember where to copy returned bounds. */
4608 if (gimple_call_with_bounds_p (stmt)
4609 && TREE_CODE (modify_dest) == SSA_NAME)
4610 {
538dd0b7 4611 gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
d5e254e1
IE
4612 if (retbnd)
4613 {
4614 return_bounds = gimple_call_lhs (retbnd);
4615 /* If returned bounds are not used then just
4616 remove unused call. */
4617 if (!return_bounds)
4618 {
4619 gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
4620 gsi_remove (&iter, true);
4621 }
4622 }
4623 }
4624
81bafd36
ILT
4625 /* The function which we are inlining might not return a value,
4626 in which case we should issue a warning that the function
4627 does not return a value. In that case the optimizers will
4628 see that the variable to which the value is assigned was not
4629 initialized. We do not want to issue a warning about that
4630 uninitialized variable. */
4631 if (DECL_P (modify_dest))
4632 TREE_NO_WARNING (modify_dest) = 1;
726a989a 4633
538dd0b7 4634 if (gimple_call_return_slot_opt_p (call_stmt))
fa47911c 4635 {
110cfe1c 4636 return_slot = modify_dest;
fa47911c
JM
4637 modify_dest = NULL;
4638 }
81bafd36 4639 }
7740f00d
RH
4640 else
4641 modify_dest = NULL;
4642
1ea193c2
ILT
4643 /* If we are inlining a call to the C++ operator new, we don't want
4644 to use type based alias analysis on the return value. Otherwise
4645 we may get confused if the compiler sees that the inlined new
4646 function returns a pointer which was just deleted. See bug
4647 33407. */
4648 if (DECL_IS_OPERATOR_NEW (fn))
4649 {
4650 return_slot = NULL;
4651 modify_dest = NULL;
4652 }
4653
d4e4baa9 4654 /* Declare the return variable for the function. */
d5e254e1
IE
4655 use_retvar = declare_return_variable (id, return_slot, modify_dest,
4656 return_bounds, bb);
1ea193c2 4657
acb8f212 4658 /* Add local vars in this inlined callee to caller. */
ae0379fc 4659 add_local_variables (id->src_cfun, cfun, id);
acb8f212 4660
0d63a740
JH
4661 if (dump_file && (dump_flags & TDF_DETAILS))
4662 {
4663 fprintf (dump_file, "Inlining ");
b8698a0f 4664 print_generic_expr (dump_file, id->src_fn, 0);
0d63a740 4665 fprintf (dump_file, " to ");
b8698a0f 4666 print_generic_expr (dump_file, id->dst_fn, 0);
0d63a740
JH
4667 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4668 }
4669
eb50f5f4
JH
4670 /* This is it. Duplicate the callee body. Assume callee is
4671 pre-gimplified. Note that we must not alter the caller
4672 function in any way before this point, as this CALL_EXPR may be
4673 a self-referential call; if we're calling ourselves, we need to
4674 duplicate our body before altering anything. */
98339851 4675 copy_body (id, cg_edge->callee->count,
8b47039c 4676 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
f3b331d1 4677 bb, return_block, NULL);
eb50f5f4 4678
f79c81b9
AO
4679 reset_debug_bindings (id, stmt_gsi);
4680
d086d311 4681 /* Reset the escaped solution. */
6b8ed145 4682 if (cfun->gimple_df)
d086d311 4683 pt_solution_reset (&cfun->gimple_df->escaped);
6b8ed145 4684
d4e4baa9 4685 /* Clean up. */
b5b8b0ac
AO
4686 if (id->debug_map)
4687 {
b787e7a2 4688 delete id->debug_map;
b5b8b0ac
AO
4689 id->debug_map = dst;
4690 }
b787e7a2 4691 delete id->decl_map;
d4e4baa9
AO
4692 id->decl_map = st;
4693
5006671f
RG
4694 /* Unlink the calls virtual operands before replacing it. */
4695 unlink_stmt_vdef (stmt);
c742772c
RB
4696 if (gimple_vdef (stmt)
4697 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4698 release_ssa_name (gimple_vdef (stmt));
5006671f 4699
84936f6f 4700 /* If the inlined function returns a result that we care about,
726a989a
RB
4701 substitute the GIMPLE_CALL with an assignment of the return
4702 variable to the LHS of the call. That is, if STMT was
4703 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4704 if (use_retvar && gimple_call_lhs (stmt))
e21aff8a 4705 {
355fe088 4706 gimple *old_stmt = stmt;
726a989a
RB
4707 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4708 gsi_replace (&stmt_gsi, stmt, false);
726a989a 4709 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
d5e254e1
IE
4710
4711 /* Copy bounds if we copy structure with bounds. */
4712 if (chkp_function_instrumented_p (id->dst_fn)
4713 && !BOUNDED_P (use_retvar)
4714 && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
4715 id->assign_stmts.safe_push (stmt);
e21aff8a 4716 }
6de9cd9a 4717 else
110cfe1c 4718 {
726a989a
RB
4719 /* Handle the case of inlining a function with no return
4720 statement, which causes the return value to become undefined. */
4721 if (gimple_call_lhs (stmt)
4722 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
110cfe1c 4723 {
726a989a
RB
4724 tree name = gimple_call_lhs (stmt);
4725 tree var = SSA_NAME_VAR (name);
32244553 4726 tree def = ssa_default_def (cfun, var);
110cfe1c 4727
110cfe1c
JH
4728 if (def)
4729 {
726a989a
RB
4730 /* If the variable is used undefined, make this name
4731 undefined via a move. */
4732 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4733 gsi_replace (&stmt_gsi, stmt, true);
110cfe1c 4734 }
110cfe1c
JH
4735 else
4736 {
726a989a
RB
4737 /* Otherwise make this variable undefined. */
4738 gsi_remove (&stmt_gsi, true);
32244553 4739 set_ssa_default_def (cfun, var, name);
726a989a 4740 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
110cfe1c
JH
4741 }
4742 }
4743 else
726a989a 4744 gsi_remove (&stmt_gsi, true);
110cfe1c 4745 }
d4e4baa9 4746
d5e254e1
IE
4747 /* Put returned bounds into the correct place if required. */
4748 if (return_bounds)
4749 {
355fe088
TS
4750 gimple *old_stmt = SSA_NAME_DEF_STMT (return_bounds);
4751 gimple *new_stmt = gimple_build_assign (return_bounds, id->retbnd);
d5e254e1
IE
4752 gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
4753 unlink_stmt_vdef (old_stmt);
4754 gsi_replace (&bnd_gsi, new_stmt, false);
4755 maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
4756 cgraph_update_edges_for_call_stmt (old_stmt,
4757 gimple_call_fndecl (old_stmt),
4758 new_stmt);
4759 }
4760
4f6c2131 4761 if (purge_dead_abnormal_edges)
30fd5881
EB
4762 {
4763 gimple_purge_dead_eh_edges (return_block);
4764 gimple_purge_dead_abnormal_call_edges (return_block);
4765 }
84936f6f 4766
e21aff8a
SB
4767 /* If the value of the new expression is ignored, that's OK. We
4768 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4769 the equivalent inlined version either. */
726a989a
RB
4770 if (is_gimple_assign (stmt))
4771 {
4772 gcc_assert (gimple_assign_single_p (stmt)
1a87cf0c 4773 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
726a989a
RB
4774 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4775 }
84936f6f 4776
d5e254e1
IE
4777 /* Copy bounds for all generated assigns that need it. */
4778 for (i = 0; i < id->assign_stmts.length (); i++)
4779 chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
4780 id->assign_stmts.release ();
4781
1eb3331e
DB
4782 /* Output the inlining info for this abstract function, since it has been
4783 inlined. If we don't do this now, we can lose the information about the
4784 variables in the function when the blocks get blown away as soon as we
4785 remove the cgraph node. */
3e492e9c 4786 if (gimple_block (stmt))
67348ccc 4787 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
84936f6f 4788
e72fcfe8 4789 /* Update callgraph if needed. */
d52f5295 4790 cg_edge->callee->remove ();
e72fcfe8 4791
e21aff8a 4792 id->block = NULL_TREE;
9915b1c9 4793 successfully_inlined = true;
742a37d5 4794
6de9cd9a
DN
4795 egress:
4796 input_location = saved_location;
e21aff8a 4797 return successfully_inlined;
d4e4baa9 4798}
6de9cd9a 4799
e21aff8a
SB
4800/* Expand call statements reachable from STMT_P.
4801 We can only have CALL_EXPRs as the "toplevel" tree code or nested
0a35513e 4802 in a MODIFY_EXPR. */
e21aff8a
SB
4803
4804static bool
1b369fae 4805gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
6de9cd9a 4806{
726a989a 4807 gimple_stmt_iterator gsi;
2aa26a55 4808 bool inlined = false;
6de9cd9a 4809
2aa26a55 4810 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
6de9cd9a 4811 {
355fe088 4812 gimple *stmt = gsi_stmt (gsi);
2aa26a55 4813 gsi_prev (&gsi);
e21aff8a 4814
726a989a 4815 if (is_gimple_call (stmt)
2aa26a55
RB
4816 && !gimple_call_internal_p (stmt))
4817 inlined |= expand_call_inline (bb, stmt, id);
6de9cd9a 4818 }
726a989a 4819
2aa26a55 4820 return inlined;
6de9cd9a
DN
4821}
4822
726a989a 4823
b8a00a4d
JH
4824/* Walk all basic blocks created after FIRST and try to fold every statement
4825 in the STATEMENTS pointer set. */
726a989a 4826
b8a00a4d 4827static void
355fe088 4828fold_marked_statements (int first, hash_set<gimple *> *statements)
b8a00a4d 4829{
0cae8d31 4830 for (; first < n_basic_blocks_for_fn (cfun); first++)
06e28de2 4831 if (BASIC_BLOCK_FOR_FN (cfun, first))
b8a00a4d 4832 {
726a989a
RB
4833 gimple_stmt_iterator gsi;
4834
06e28de2 4835 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
726a989a
RB
4836 !gsi_end_p (gsi);
4837 gsi_next (&gsi))
6e2830c3 4838 if (statements->contains (gsi_stmt (gsi)))
9477eb38 4839 {
355fe088 4840 gimple *old_stmt = gsi_stmt (gsi);
4b685e14 4841 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
2bafad93 4842
44e10129
MM
4843 if (old_decl && DECL_BUILT_IN (old_decl))
4844 {
4845 /* Folding builtins can create multiple instructions,
4846 we need to look at all of them. */
4847 gimple_stmt_iterator i2 = gsi;
4848 gsi_prev (&i2);
4849 if (fold_stmt (&gsi))
4850 {
355fe088 4851 gimple *new_stmt;
a9d24544
JJ
4852 /* If a builtin at the end of a bb folded into nothing,
4853 the following loop won't work. */
4854 if (gsi_end_p (gsi))
4855 {
4856 cgraph_update_edges_for_call_stmt (old_stmt,
4857 old_decl, NULL);
4858 break;
4859 }
44e10129 4860 if (gsi_end_p (i2))
06e28de2 4861 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
44e10129
MM
4862 else
4863 gsi_next (&i2);
4864 while (1)
4865 {
4866 new_stmt = gsi_stmt (i2);
4867 update_stmt (new_stmt);
4868 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4869 new_stmt);
4870
4871 if (new_stmt == gsi_stmt (gsi))
4872 {
4873 /* It is okay to check only for the very last
4874 of these statements. If it is a throwing
4875 statement nothing will change. If it isn't
4876 this can remove EH edges. If that weren't
4877 correct then because some intermediate stmts
4878 throw, but not the last one. That would mean
4879 we'd have to split the block, which we can't
4880 here and we'd loose anyway. And as builtins
4881 probably never throw, this all
4882 is mood anyway. */
4883 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4884 new_stmt))
06e28de2
DM
4885 gimple_purge_dead_eh_edges (
4886 BASIC_BLOCK_FOR_FN (cfun, first));
44e10129
MM
4887 break;
4888 }
4889 gsi_next (&i2);
4890 }
4891 }
4892 }
4893 else if (fold_stmt (&gsi))
9477eb38 4894 {
726a989a
RB
4895 /* Re-read the statement from GSI as fold_stmt() may
4896 have changed it. */
355fe088 4897 gimple *new_stmt = gsi_stmt (gsi);
726a989a
RB
4898 update_stmt (new_stmt);
4899
4b685e14
JH
4900 if (is_gimple_call (old_stmt)
4901 || is_gimple_call (new_stmt))
44e10129
MM
4902 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4903 new_stmt);
726a989a
RB
4904
4905 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
06e28de2
DM
4906 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4907 first));
9477eb38
JH
4908 }
4909 }
b8a00a4d
JH
4910 }
4911}
4912
d4e4baa9
AO
4913/* Expand calls to inline functions in the body of FN. */
4914
873aa8f5 4915unsigned int
46c5ad27 4916optimize_inline_calls (tree fn)
d4e4baa9 4917{
1b369fae 4918 copy_body_data id;
e21aff8a 4919 basic_block bb;
0cae8d31 4920 int last = n_basic_blocks_for_fn (cfun);
5d7b099c 4921 bool inlined_p = false;
d406b663 4922
d4e4baa9
AO
4923 /* Clear out ID. */
4924 memset (&id, 0, sizeof (id));
4925
d52f5295 4926 id.src_node = id.dst_node = cgraph_node::get (fn);
67348ccc 4927 gcc_assert (id.dst_node->definition);
1b369fae 4928 id.dst_fn = fn;
d4e4baa9 4929 /* Or any functions that aren't finished yet. */
d4e4baa9 4930 if (current_function_decl)
0f900dfa 4931 id.dst_fn = current_function_decl;
1b369fae
RH
4932
4933 id.copy_decl = copy_decl_maybe_to_var;
4934 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4935 id.transform_new_cfg = false;
4936 id.transform_return_to_modify = true;
78bbd765 4937 id.transform_parameter = true;
9ff420f1 4938 id.transform_lang_insert_block = NULL;
355fe088 4939 id.statements_to_fold = new hash_set<gimple *>;
1b369fae 4940
45852dcc 4941 push_gimplify_context ();
d4e4baa9 4942
672987e8
ZD
4943 /* We make no attempts to keep dominance info up-to-date. */
4944 free_dominance_info (CDI_DOMINATORS);
4945 free_dominance_info (CDI_POST_DOMINATORS);
4946
726a989a
RB
4947 /* Register specific gimple functions. */
4948 gimple_register_cfg_hooks ();
4949
e21aff8a
SB
4950 /* Reach the trees by walking over the CFG, and note the
4951 enclosing basic-blocks in the call edges. */
4952 /* We walk the blocks going forward, because inlined function bodies
4953 will split id->current_basic_block, and the new blocks will
4954 follow it; we'll trudge through them, processing their CALL_EXPRs
4955 along the way. */
11cd3bed 4956 FOR_EACH_BB_FN (bb, cfun)
5d7b099c 4957 inlined_p |= gimple_expand_calls_inline (bb, &id);
d4e4baa9 4958
e21aff8a 4959 pop_gimplify_context (NULL);
6de9cd9a 4960
b2b29377 4961 if (flag_checking)
18c6ada9
JH
4962 {
4963 struct cgraph_edge *e;
4964
d52f5295 4965 id.dst_node->verify ();
18c6ada9
JH
4966
4967 /* Double check that we inlined everything we are supposed to inline. */
1b369fae 4968 for (e = id.dst_node->callees; e; e = e->next_callee)
1e128c5f 4969 gcc_assert (e->inline_failed);
18c6ada9 4970 }
b8698a0f 4971
5d7b099c 4972 /* Fold queued statements. */
a9eafe81 4973 fold_marked_statements (last, id.statements_to_fold);
6e2830c3 4974 delete id.statements_to_fold;
b8698a0f 4975
9771b263 4976 gcc_assert (!id.debug_stmts.exists ());
b5b8b0ac 4977
5d7b099c
RG
4978 /* If we didn't inline into the function there is nothing to do. */
4979 if (!inlined_p)
4980 return 0;
4981
a9eafe81
AP
4982 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4983 number_blocks (fn);
b8a00a4d 4984
078c3644 4985 delete_unreachable_blocks_update_callgraph (&id);
b2b29377
MM
4986 if (flag_checking)
4987 id.dst_node->verify ();
726a989a 4988
110cfe1c
JH
4989 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4990 not possible yet - the IPA passes might make various functions to not
4991 throw and they don't care to proactively update local EH info. This is
4992 done later in fixup_cfg pass that also execute the verification. */
726a989a
RB
4993 return (TODO_update_ssa
4994 | TODO_cleanup_cfg
45a80bb9 4995 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5d7b099c 4996 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
0a6a6ac9
DM
4997 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
4998 ? TODO_rebuild_frequencies : 0));
d4e4baa9
AO
4999}
5000
d4e4baa9
AO
5001/* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5002
5003tree
46c5ad27 5004copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
d4e4baa9
AO
5005{
5006 enum tree_code code = TREE_CODE (*tp);
07beea0d 5007 enum tree_code_class cl = TREE_CODE_CLASS (code);
d4e4baa9
AO
5008
5009 /* We make copies of most nodes. */
07beea0d 5010 if (IS_EXPR_CODE_CLASS (cl)
d4e4baa9
AO
5011 || code == TREE_LIST
5012 || code == TREE_VEC
8843c120
DN
5013 || code == TYPE_DECL
5014 || code == OMP_CLAUSE)
d4e4baa9
AO
5015 {
5016 /* Because the chain gets clobbered when we make a copy, we save it
5017 here. */
82d6e6fc 5018 tree chain = NULL_TREE, new_tree;
07beea0d 5019
81f653d6
NF
5020 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5021 chain = TREE_CHAIN (*tp);
d4e4baa9
AO
5022
5023 /* Copy the node. */
82d6e6fc 5024 new_tree = copy_node (*tp);
6de9cd9a 5025
82d6e6fc 5026 *tp = new_tree;
d4e4baa9
AO
5027
5028 /* Now, restore the chain, if appropriate. That will cause
5029 walk_tree to walk into the chain as well. */
50674e96
DN
5030 if (code == PARM_DECL
5031 || code == TREE_LIST
aaf46ef9 5032 || code == OMP_CLAUSE)
d4e4baa9
AO
5033 TREE_CHAIN (*tp) = chain;
5034
5035 /* For now, we don't update BLOCKs when we make copies. So, we
6de9cd9a
DN
5036 have to nullify all BIND_EXPRs. */
5037 if (TREE_CODE (*tp) == BIND_EXPR)
5038 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
d4e4baa9 5039 }
4038c495
GB
5040 else if (code == CONSTRUCTOR)
5041 {
5042 /* CONSTRUCTOR nodes need special handling because
5043 we need to duplicate the vector of elements. */
82d6e6fc 5044 tree new_tree;
4038c495 5045
82d6e6fc 5046 new_tree = copy_node (*tp);
9771b263 5047 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
82d6e6fc 5048 *tp = new_tree;
4038c495 5049 }
3533b943 5050 else if (code == STATEMENT_LIST)
deb5046b
JM
5051 /* We used to just abort on STATEMENT_LIST, but we can run into them
5052 with statement-expressions (c++/40975). */
5053 copy_statement_list (tp);
6615c446 5054 else if (TREE_CODE_CLASS (code) == tcc_type)
d4e4baa9 5055 *walk_subtrees = 0;
6615c446 5056 else if (TREE_CODE_CLASS (code) == tcc_declaration)
6de9cd9a 5057 *walk_subtrees = 0;
a396f8ae
GK
5058 else if (TREE_CODE_CLASS (code) == tcc_constant)
5059 *walk_subtrees = 0;
d4e4baa9
AO
5060 return NULL_TREE;
5061}
5062
5063/* The SAVE_EXPR pointed to by TP is being copied. If ST contains
aa4a53af 5064 information indicating to what new SAVE_EXPR this one should be mapped,
e21aff8a
SB
5065 use that one. Otherwise, create a new node and enter it in ST. FN is
5066 the function into which the copy will be placed. */
d4e4baa9 5067
892c7e1e 5068static void
b787e7a2 5069remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
d4e4baa9 5070{
6be42dd4 5071 tree *n;
5e20bdd7 5072 tree t;
d4e4baa9
AO
5073
5074 /* See if we already encountered this SAVE_EXPR. */
b787e7a2 5075 n = st->get (*tp);
d92b4486 5076
d4e4baa9
AO
5077 /* If we didn't already remap this SAVE_EXPR, do so now. */
5078 if (!n)
5079 {
5e20bdd7 5080 t = copy_node (*tp);
d4e4baa9 5081
d4e4baa9 5082 /* Remember this SAVE_EXPR. */
b787e7a2 5083 st->put (*tp, t);
350ebd54 5084 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
b787e7a2 5085 st->put (t, t);
d4e4baa9
AO
5086 }
5087 else
5e20bdd7
JZ
5088 {
5089 /* We've already walked into this SAVE_EXPR; don't do it again. */
5090 *walk_subtrees = 0;
6be42dd4 5091 t = *n;
5e20bdd7 5092 }
d4e4baa9
AO
5093
5094 /* Replace this SAVE_EXPR with the copy. */
5e20bdd7 5095 *tp = t;
d4e4baa9 5096}
d436bff8 5097
726a989a
RB
5098/* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5099 label, copies the declaration and enters it in the splay_tree in DATA (which
5100 is really a 'copy_body_data *'. */
5101
5102static tree
5103mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5104 bool *handled_ops_p ATTRIBUTE_UNUSED,
5105 struct walk_stmt_info *wi)
5106{
5107 copy_body_data *id = (copy_body_data *) wi->info;
538dd0b7 5108 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
726a989a 5109
538dd0b7 5110 if (stmt)
726a989a
RB
5111 {
5112 tree decl = gimple_label_label (stmt);
5113
5114 /* Copy the decl and remember the copy. */
5115 insert_decl_map (id, decl, id->copy_decl (decl, id));
5116 }
5117
5118 return NULL_TREE;
5119}
5120
f6803341
MJ
5121static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5122 struct walk_stmt_info *wi);
726a989a
RB
5123
5124/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5125 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5126 remaps all local declarations to appropriate replacements in gimple
5127 operands. */
5128
5129static tree
5130replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5131{
5132 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5133 copy_body_data *id = (copy_body_data *) wi->info;
b787e7a2 5134 hash_map<tree, tree> *st = id->decl_map;
726a989a
RB
5135 tree *n;
5136 tree expr = *tp;
5137
5138 /* Only a local declaration (variable or label). */
5139 if ((TREE_CODE (expr) == VAR_DECL
5140 && !TREE_STATIC (expr))
5141 || TREE_CODE (expr) == LABEL_DECL)
5142 {
5143 /* Lookup the declaration. */
b787e7a2 5144 n = st->get (expr);
726a989a
RB
5145
5146 /* If it's there, remap it. */
5147 if (n)
5148 *tp = *n;
5149 *walk_subtrees = 0;
5150 }
5151 else if (TREE_CODE (expr) == STATEMENT_LIST
5152 || TREE_CODE (expr) == BIND_EXPR
5153 || TREE_CODE (expr) == SAVE_EXPR)
5154 gcc_unreachable ();
5155 else if (TREE_CODE (expr) == TARGET_EXPR)
5156 {
5157 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5158 It's OK for this to happen if it was part of a subtree that
5159 isn't immediately expanded, such as operand 2 of another
5160 TARGET_EXPR. */
5161 if (!TREE_OPERAND (expr, 1))
5162 {
5163 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5164 TREE_OPERAND (expr, 3) = NULL_TREE;
5165 }
5166 }
f6803341
MJ
5167 else if (TREE_CODE (expr) == OMP_CLAUSE)
5168 {
5169 /* Before the omplower pass completes, some OMP clauses can contain
5170 sequences that are neither copied by gimple_seq_copy nor walked by
5171 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5172 in those situations, we have to copy and process them explicitely. */
5173
5174 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5175 {
5176 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5177 seq = duplicate_remap_omp_clause_seq (seq, wi);
5178 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5179 }
5180 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5181 {
5182 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5183 seq = duplicate_remap_omp_clause_seq (seq, wi);
5184 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5185 }
5186 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5187 {
5188 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5189 seq = duplicate_remap_omp_clause_seq (seq, wi);
5190 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5191 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5192 seq = duplicate_remap_omp_clause_seq (seq, wi);
5193 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5194 }
5195 }
726a989a
RB
5196
5197 /* Keep iterating. */
5198 return NULL_TREE;
5199}
5200
5201
5202/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5203 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5204 remaps all local declarations to appropriate replacements in gimple
5205 statements. */
5206
5207static tree
5208replace_locals_stmt (gimple_stmt_iterator *gsip,
5209 bool *handled_ops_p ATTRIBUTE_UNUSED,
5210 struct walk_stmt_info *wi)
5211{
5212 copy_body_data *id = (copy_body_data *) wi->info;
355fe088 5213 gimple *gs = gsi_stmt (*gsip);
726a989a 5214
538dd0b7 5215 if (gbind *stmt = dyn_cast <gbind *> (gs))
726a989a
RB
5216 {
5217 tree block = gimple_bind_block (stmt);
5218
5219 if (block)
5220 {
5221 remap_block (&block, id);
5222 gimple_bind_set_block (stmt, block);
5223 }
5224
5225 /* This will remap a lot of the same decls again, but this should be
5226 harmless. */
5227 if (gimple_bind_vars (stmt))
9771b263
DN
5228 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
5229 NULL, id));
726a989a
RB
5230 }
5231
5232 /* Keep iterating. */
5233 return NULL_TREE;
5234}
5235
f6803341
MJ
5236/* Create a copy of SEQ and remap all decls in it. */
5237
5238static gimple_seq
5239duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5240{
5241 if (!seq)
5242 return NULL;
5243
5244 /* If there are any labels in OMP sequences, they can be only referred to in
5245 the sequence itself and therefore we can do both here. */
5246 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5247 gimple_seq copy = gimple_seq_copy (seq);
5248 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5249 return copy;
5250}
726a989a
RB
5251
5252/* Copies everything in SEQ and replaces variables and labels local to
5253 current_function_decl. */
5254
5255gimple_seq
5256copy_gimple_seq_and_replace_locals (gimple_seq seq)
5257{
5258 copy_body_data id;
5259 struct walk_stmt_info wi;
726a989a
RB
5260 gimple_seq copy;
5261
5262 /* There's nothing to do for NULL_TREE. */
5263 if (seq == NULL)
5264 return seq;
5265
5266 /* Set up ID. */
5267 memset (&id, 0, sizeof (id));
5268 id.src_fn = current_function_decl;
5269 id.dst_fn = current_function_decl;
b787e7a2 5270 id.decl_map = new hash_map<tree, tree>;
b5b8b0ac 5271 id.debug_map = NULL;
726a989a
RB
5272
5273 id.copy_decl = copy_decl_no_change;
5274 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5275 id.transform_new_cfg = false;
5276 id.transform_return_to_modify = false;
78bbd765 5277 id.transform_parameter = false;
726a989a
RB
5278 id.transform_lang_insert_block = NULL;
5279
5280 /* Walk the tree once to find local labels. */
5281 memset (&wi, 0, sizeof (wi));
6e2830c3 5282 hash_set<tree> visited;
726a989a 5283 wi.info = &id;
6e2830c3 5284 wi.pset = &visited;
726a989a 5285 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
726a989a
RB
5286
5287 copy = gimple_seq_copy (seq);
5288
5289 /* Walk the copy, remapping decls. */
5290 memset (&wi, 0, sizeof (wi));
5291 wi.info = &id;
5292 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5293
5294 /* Clean up. */
b787e7a2 5295 delete id.decl_map;
b5b8b0ac 5296 if (id.debug_map)
b787e7a2 5297 delete id.debug_map;
f3dccf50
RB
5298 if (id.dependence_map)
5299 {
5300 delete id.dependence_map;
5301 id.dependence_map = NULL;
5302 }
726a989a
RB
5303
5304 return copy;
5305}
5306
5307
6de9cd9a 5308/* Allow someone to determine if SEARCH is a child of TOP from gdb. */
aa4a53af 5309
6de9cd9a
DN
5310static tree
5311debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5312{
5313 if (*tp == data)
5314 return (tree) data;
5315 else
5316 return NULL;
5317}
5318
24e47c76 5319DEBUG_FUNCTION bool
6de9cd9a
DN
5320debug_find_tree (tree top, tree search)
5321{
5322 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5323}
5324
e21aff8a 5325
6de9cd9a
DN
5326/* Declare the variables created by the inliner. Add all the variables in
5327 VARS to BIND_EXPR. */
5328
5329static void
e21aff8a 5330declare_inline_vars (tree block, tree vars)
6de9cd9a 5331{
84936f6f 5332 tree t;
910ad8de 5333 for (t = vars; t; t = DECL_CHAIN (t))
9659ce8b
JH
5334 {
5335 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5336 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
c021f10b 5337 add_local_decl (cfun, t);
9659ce8b 5338 }
6de9cd9a 5339
e21aff8a
SB
5340 if (block)
5341 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5342}
5343
19734dd8 5344/* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
1b369fae
RH
5345 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5346 VAR_DECL translation. */
19734dd8 5347
1b369fae
RH
5348static tree
5349copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
19734dd8 5350{
19734dd8
RL
5351 /* Don't generate debug information for the copy if we wouldn't have
5352 generated it for the copy either. */
5353 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5354 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5355
5356 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
b8698a0f 5357 declaration inspired this copy. */
19734dd8
RL
5358 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5359
5360 /* The new variable/label has no RTL, yet. */
68a976f2
RL
5361 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5362 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
2eb79bbb 5363 SET_DECL_RTL (copy, 0);
b8698a0f 5364
19734dd8
RL
5365 /* These args would always appear unused, if not for this. */
5366 TREE_USED (copy) = 1;
5367
5368 /* Set the context for the new declaration. */
5369 if (!DECL_CONTEXT (decl))
5370 /* Globals stay global. */
5371 ;
1b369fae 5372 else if (DECL_CONTEXT (decl) != id->src_fn)
19734dd8
RL
5373 /* Things that weren't in the scope of the function we're inlining
5374 from aren't in the scope we're inlining to, either. */
5375 ;
5376 else if (TREE_STATIC (decl))
5377 /* Function-scoped static variables should stay in the original
5378 function. */
5379 ;
5380 else
5381 /* Ordinary automatic local variables are now in the scope of the
5382 new function. */
1b369fae 5383 DECL_CONTEXT (copy) = id->dst_fn;
19734dd8
RL
5384
5385 return copy;
5386}
5387
1b369fae
RH
5388static tree
5389copy_decl_to_var (tree decl, copy_body_data *id)
5390{
5391 tree copy, type;
5392
5393 gcc_assert (TREE_CODE (decl) == PARM_DECL
5394 || TREE_CODE (decl) == RESULT_DECL);
5395
5396 type = TREE_TYPE (decl);
5397
c2255bc4
AH
5398 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5399 VAR_DECL, DECL_NAME (decl), type);
25a6a873
RG
5400 if (DECL_PT_UID_SET_P (decl))
5401 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
1b369fae
RH
5402 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5403 TREE_READONLY (copy) = TREE_READONLY (decl);
5404 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
0890b981 5405 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
1b369fae
RH
5406
5407 return copy_decl_for_dup_finish (id, decl, copy);
5408}
5409
c08cd4c1
JM
5410/* Like copy_decl_to_var, but create a return slot object instead of a
5411 pointer variable for return by invisible reference. */
5412
5413static tree
5414copy_result_decl_to_var (tree decl, copy_body_data *id)
5415{
5416 tree copy, type;
5417
5418 gcc_assert (TREE_CODE (decl) == PARM_DECL
5419 || TREE_CODE (decl) == RESULT_DECL);
5420
5421 type = TREE_TYPE (decl);
5422 if (DECL_BY_REFERENCE (decl))
5423 type = TREE_TYPE (type);
5424
c2255bc4
AH
5425 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5426 VAR_DECL, DECL_NAME (decl), type);
25a6a873
RG
5427 if (DECL_PT_UID_SET_P (decl))
5428 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
c08cd4c1
JM
5429 TREE_READONLY (copy) = TREE_READONLY (decl);
5430 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5431 if (!DECL_BY_REFERENCE (decl))
5432 {
5433 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
0890b981 5434 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
c08cd4c1
JM
5435 }
5436
5437 return copy_decl_for_dup_finish (id, decl, copy);
5438}
5439
9ff420f1 5440tree
1b369fae
RH
5441copy_decl_no_change (tree decl, copy_body_data *id)
5442{
5443 tree copy;
5444
5445 copy = copy_node (decl);
5446
5447 /* The COPY is not abstract; it will be generated in DST_FN. */
00de328a 5448 DECL_ABSTRACT_P (copy) = false;
1b369fae
RH
5449 lang_hooks.dup_lang_specific_decl (copy);
5450
5451 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5452 been taken; it's for internal bookkeeping in expand_goto_internal. */
5453 if (TREE_CODE (copy) == LABEL_DECL)
5454 {
5455 TREE_ADDRESSABLE (copy) = 0;
5456 LABEL_DECL_UID (copy) = -1;
5457 }
5458
5459 return copy_decl_for_dup_finish (id, decl, copy);
5460}
5461
5462static tree
5463copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5464{
5465 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5466 return copy_decl_to_var (decl, id);
5467 else
5468 return copy_decl_no_change (decl, id);
5469}
5470
19734dd8
RL
5471/* Return a copy of the function's argument tree. */
5472static tree
c6f7cfc1
JH
5473copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5474 bitmap args_to_skip, tree *vars)
19734dd8 5475{
c6f7cfc1
JH
5476 tree arg, *parg;
5477 tree new_parm = NULL;
5478 int i = 0;
19734dd8 5479
c6f7cfc1
JH
5480 parg = &new_parm;
5481
910ad8de 5482 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
c6f7cfc1
JH
5483 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5484 {
5485 tree new_tree = remap_decl (arg, id);
d7da5cc8
MJ
5486 if (TREE_CODE (new_tree) != PARM_DECL)
5487 new_tree = id->copy_decl (arg, id);
c6f7cfc1
JH
5488 lang_hooks.dup_lang_specific_decl (new_tree);
5489 *parg = new_tree;
910ad8de 5490 parg = &DECL_CHAIN (new_tree);
c6f7cfc1 5491 }
b787e7a2 5492 else if (!id->decl_map->get (arg))
c6f7cfc1
JH
5493 {
5494 /* Make an equivalent VAR_DECL. If the argument was used
5495 as temporary variable later in function, the uses will be
5496 replaced by local variable. */
5497 tree var = copy_decl_to_var (arg, id);
c6f7cfc1
JH
5498 insert_decl_map (id, arg, var);
5499 /* Declare this new variable. */
910ad8de 5500 DECL_CHAIN (var) = *vars;
c6f7cfc1
JH
5501 *vars = var;
5502 }
5503 return new_parm;
19734dd8
RL
5504}
5505
5506/* Return a copy of the function's static chain. */
5507static tree
1b369fae 5508copy_static_chain (tree static_chain, copy_body_data * id)
19734dd8
RL
5509{
5510 tree *chain_copy, *pvar;
5511
5512 chain_copy = &static_chain;
910ad8de 5513 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
19734dd8 5514 {
82d6e6fc
KG
5515 tree new_tree = remap_decl (*pvar, id);
5516 lang_hooks.dup_lang_specific_decl (new_tree);
910ad8de 5517 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
82d6e6fc 5518 *pvar = new_tree;
19734dd8
RL
5519 }
5520 return static_chain;
5521}
5522
5523/* Return true if the function is allowed to be versioned.
5524 This is a guard for the versioning functionality. */
27dbd3ac 5525
19734dd8
RL
5526bool
5527tree_versionable_function_p (tree fndecl)
5528{
86631ea3
MJ
5529 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5530 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
19734dd8
RL
5531}
5532
9187e02d
JH
5533/* Delete all unreachable basic blocks and update callgraph.
5534 Doing so is somewhat nontrivial because we need to update all clones and
5535 remove inline function that become unreachable. */
9f5e9983 5536
9187e02d
JH
5537static bool
5538delete_unreachable_blocks_update_callgraph (copy_body_data *id)
9f5e9983 5539{
9187e02d
JH
5540 bool changed = false;
5541 basic_block b, next_bb;
5542
5543 find_unreachable_blocks ();
5544
5545 /* Delete all unreachable basic blocks. */
5546
fefa31b5
DM
5547 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5548 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
9187e02d
JH
5549 {
5550 next_bb = b->next_bb;
5551
5552 if (!(b->flags & BB_REACHABLE))
5553 {
5554 gimple_stmt_iterator bsi;
5555
5556 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
042ae7d2
JH
5557 {
5558 struct cgraph_edge *e;
5559 struct cgraph_node *node;
9187e02d 5560
d122681a 5561 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
042ae7d2
JH
5562
5563 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
d52f5295 5564 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
042ae7d2
JH
5565 {
5566 if (!e->inline_failed)
d52f5295 5567 e->callee->remove_symbol_and_inline_clones (id->dst_node);
042ae7d2 5568 else
3dafb85c 5569 e->remove ();
042ae7d2
JH
5570 }
5571 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5572 && id->dst_node->clones)
5573 for (node = id->dst_node->clones; node != id->dst_node;)
9187e02d 5574 {
d122681a 5575 node->remove_stmt_references (gsi_stmt (bsi));
042ae7d2 5576 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
d52f5295 5577 && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
042ae7d2
JH
5578 {
5579 if (!e->inline_failed)
d52f5295 5580 e->callee->remove_symbol_and_inline_clones (id->dst_node);
042ae7d2 5581 else
3dafb85c 5582 e->remove ();
042ae7d2
JH
5583 }
5584
5585 if (node->clones)
5586 node = node->clones;
5587 else if (node->next_sibling_clone)
5588 node = node->next_sibling_clone;
9187e02d 5589 else
042ae7d2
JH
5590 {
5591 while (node != id->dst_node && !node->next_sibling_clone)
5592 node = node->clone_of;
5593 if (node != id->dst_node)
5594 node = node->next_sibling_clone;
5595 }
9187e02d 5596 }
042ae7d2 5597 }
9187e02d
JH
5598 delete_basic_block (b);
5599 changed = true;
5600 }
5601 }
5602
9187e02d 5603 return changed;
9f5e9983
JJ
5604}
5605
08ad1d6d
JH
5606/* Update clone info after duplication. */
5607
5608static void
5609update_clone_info (copy_body_data * id)
5610{
5611 struct cgraph_node *node;
5612 if (!id->dst_node->clones)
5613 return;
5614 for (node = id->dst_node->clones; node != id->dst_node;)
5615 {
5616 /* First update replace maps to match the new body. */
5617 if (node->clone.tree_map)
5618 {
5619 unsigned int i;
9771b263 5620 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
08ad1d6d
JH
5621 {
5622 struct ipa_replace_map *replace_info;
9771b263 5623 replace_info = (*node->clone.tree_map)[i];
08ad1d6d
JH
5624 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5625 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5626 }
5627 }
5628 if (node->clones)
5629 node = node->clones;
5630 else if (node->next_sibling_clone)
5631 node = node->next_sibling_clone;
5632 else
5633 {
5634 while (node != id->dst_node && !node->next_sibling_clone)
5635 node = node->clone_of;
5636 if (node != id->dst_node)
5637 node = node->next_sibling_clone;
5638 }
5639 }
5640}
5641
19734dd8
RL
5642/* Create a copy of a function's tree.
5643 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5644 of the original function and the new copied function
b8698a0f
L
5645 respectively. In case we want to replace a DECL
5646 tree with another tree while duplicating the function's
5647 body, TREE_MAP represents the mapping between these
ea99e0be 5648 trees. If UPDATE_CLONES is set, the call_stmt fields
91382288
JH
5649 of edges of clones of the function will be updated.
5650
5651 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5652 from new version.
1a2c27e9 5653 If SKIP_RETURN is true, the new version will return void.
91382288
JH
5654 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5655 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5656*/
19734dd8 5657void
27dbd3ac 5658tree_function_versioning (tree old_decl, tree new_decl,
d52f5295 5659 vec<ipa_replace_map *, va_gc> *tree_map,
91382288 5660 bool update_clones, bitmap args_to_skip,
1a2c27e9
EB
5661 bool skip_return, bitmap blocks_to_copy,
5662 basic_block new_entry)
19734dd8
RL
5663{
5664 struct cgraph_node *old_version_node;
5665 struct cgraph_node *new_version_node;
1b369fae 5666 copy_body_data id;
110cfe1c 5667 tree p;
19734dd8
RL
5668 unsigned i;
5669 struct ipa_replace_map *replace_info;
b5b8b0ac 5670 basic_block old_entry_block, bb;
355fe088 5671 auto_vec<gimple *, 10> init_stmts;
0f1961a2 5672 tree vars = NULL_TREE;
a49de7a4 5673 bitmap debug_args_to_skip = args_to_skip;
19734dd8
RL
5674
5675 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5676 && TREE_CODE (new_decl) == FUNCTION_DECL);
5677 DECL_POSSIBLY_INLINED (old_decl) = 1;
5678
d52f5295 5679 old_version_node = cgraph_node::get (old_decl);
fe660d7b 5680 gcc_checking_assert (old_version_node);
d52f5295 5681 new_version_node = cgraph_node::get (new_decl);
fe660d7b 5682 gcc_checking_assert (new_version_node);
19734dd8 5683
ddb555ed
JJ
5684 /* Copy over debug args. */
5685 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5686 {
9771b263 5687 vec<tree, va_gc> **new_debug_args, **old_debug_args;
ddb555ed
JJ
5688 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5689 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5690 old_debug_args = decl_debug_args_lookup (old_decl);
5691 if (old_debug_args)
5692 {
5693 new_debug_args = decl_debug_args_insert (new_decl);
9771b263 5694 *new_debug_args = vec_safe_copy (*old_debug_args);
ddb555ed
JJ
5695 }
5696 }
5697
a3aadcc5
JH
5698 /* Output the inlining info for this abstract function, since it has been
5699 inlined. If we don't do this now, we can lose the information about the
5700 variables in the function when the blocks get blown away as soon as we
5701 remove the cgraph node. */
5702 (*debug_hooks->outlining_inline_function) (old_decl);
5703
19734dd8
RL
5704 DECL_ARTIFICIAL (new_decl) = 1;
5705 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
c0c123ef
JH
5706 if (DECL_ORIGIN (old_decl) == old_decl)
5707 old_version_node->used_as_abstract_origin = true;
f9417da1 5708 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
19734dd8 5709
3d283195
JH
5710 /* Prepare the data structures for the tree copy. */
5711 memset (&id, 0, sizeof (id));
5712
19734dd8 5713 /* Generate a new name for the new version. */
355fe088 5714 id.statements_to_fold = new hash_set<gimple *>;
b5b8b0ac 5715
b787e7a2 5716 id.decl_map = new hash_map<tree, tree>;
b5b8b0ac 5717 id.debug_map = NULL;
1b369fae
RH
5718 id.src_fn = old_decl;
5719 id.dst_fn = new_decl;
5720 id.src_node = old_version_node;
5721 id.dst_node = new_version_node;
5722 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
4029a5e0 5723 id.blocks_to_copy = blocks_to_copy;
b8698a0f 5724
1b369fae
RH
5725 id.copy_decl = copy_decl_no_change;
5726 id.transform_call_graph_edges
5727 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5728 id.transform_new_cfg = true;
5729 id.transform_return_to_modify = false;
78bbd765 5730 id.transform_parameter = false;
9ff420f1 5731 id.transform_lang_insert_block = NULL;
1b369fae 5732
fefa31b5 5733 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
110cfe1c 5734 (DECL_STRUCT_FUNCTION (old_decl));
c0c123ef
JH
5735 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5736 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
110cfe1c 5737 initialize_cfun (new_decl, old_decl,
0d63a740 5738 old_entry_block->count);
95cc0a1a
IE
5739 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5740 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5741 = id.src_cfun->gimple_df->ipa_pta;
b8698a0f 5742
19734dd8
RL
5743 /* Copy the function's static chain. */
5744 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5745 if (p)
a49de7a4
JJ
5746 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5747 = copy_static_chain (p, &id);
b8698a0f 5748
19734dd8
RL
5749 /* If there's a tree_map, prepare for substitution. */
5750 if (tree_map)
9771b263 5751 for (i = 0; i < tree_map->length (); i++)
19734dd8 5752 {
355fe088 5753 gimple *init;
9771b263 5754 replace_info = (*tree_map)[i];
1b369fae 5755 if (replace_info->replace_p)
00fc2333 5756 {
a49de7a4 5757 int parm_num = -1;
922f15c2
JH
5758 if (!replace_info->old_tree)
5759 {
a49de7a4 5760 int p = replace_info->parm_num;
922f15c2 5761 tree parm;
a49de7a4 5762 tree req_type, new_type;
0e8853ee 5763
a49de7a4
JJ
5764 for (parm = DECL_ARGUMENTS (old_decl); p;
5765 parm = DECL_CHAIN (parm))
5766 p--;
922f15c2 5767 replace_info->old_tree = parm;
a49de7a4 5768 parm_num = replace_info->parm_num;
0e8853ee 5769 req_type = TREE_TYPE (parm);
a49de7a4
JJ
5770 new_type = TREE_TYPE (replace_info->new_tree);
5771 if (!useless_type_conversion_p (req_type, new_type))
0e8853ee
JH
5772 {
5773 if (fold_convertible_p (req_type, replace_info->new_tree))
a49de7a4
JJ
5774 replace_info->new_tree
5775 = fold_build1 (NOP_EXPR, req_type,
5776 replace_info->new_tree);
5777 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
5778 replace_info->new_tree
5779 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
5780 replace_info->new_tree);
0e8853ee
JH
5781 else
5782 {
5783 if (dump_file)
5784 {
5785 fprintf (dump_file, " const ");
a49de7a4
JJ
5786 print_generic_expr (dump_file,
5787 replace_info->new_tree, 0);
5788 fprintf (dump_file,
5789 " can't be converted to param ");
0e8853ee
JH
5790 print_generic_expr (dump_file, parm, 0);
5791 fprintf (dump_file, "\n");
5792 }
5793 replace_info->old_tree = NULL;
5794 }
5795 }
5796 }
5797 else
5798 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5799 if (replace_info->old_tree)
5800 {
5801 init = setup_one_parameter (&id, replace_info->old_tree,
5802 replace_info->new_tree, id.src_fn,
5803 NULL,
5804 &vars);
5805 if (init)
5806 init_stmts.safe_push (init);
a49de7a4
JJ
5807 if (MAY_HAVE_DEBUG_STMTS && args_to_skip)
5808 {
5809 if (parm_num == -1)
5810 {
5811 tree parm;
5812 int p;
5813 for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
5814 parm = DECL_CHAIN (parm), p++)
5815 if (parm == replace_info->old_tree)
5816 {
5817 parm_num = p;
5818 break;
5819 }
5820 }
5821 if (parm_num != -1)
5822 {
5823 if (debug_args_to_skip == args_to_skip)
5824 {
5825 debug_args_to_skip = BITMAP_ALLOC (NULL);
5826 bitmap_copy (debug_args_to_skip, args_to_skip);
5827 }
5828 bitmap_clear_bit (debug_args_to_skip, parm_num);
5829 }
5830 }
922f15c2 5831 }
00fc2333 5832 }
19734dd8 5833 }
eb50f5f4
JH
5834 /* Copy the function's arguments. */
5835 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
a49de7a4
JJ
5836 DECL_ARGUMENTS (new_decl)
5837 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5838 args_to_skip, &vars);
b8698a0f 5839
eb50f5f4 5840 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
474086eb 5841 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
b8698a0f 5842
0f1961a2 5843 declare_inline_vars (DECL_INITIAL (new_decl), vars);
9187e02d 5844
9771b263 5845 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
19734dd8 5846 /* Add local vars. */
ae0379fc 5847 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
b8698a0f 5848
90dda0e9 5849 if (DECL_RESULT (old_decl) == NULL_TREE)
1a2c27e9 5850 ;
90dda0e9 5851 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
1a2c27e9
EB
5852 {
5853 DECL_RESULT (new_decl)
5854 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5855 RESULT_DECL, NULL_TREE, void_type_node);
5856 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5857 cfun->returns_struct = 0;
5858 cfun->returns_pcc_struct = 0;
5859 }
5860 else
19734dd8 5861 {
6ff38230
RG
5862 tree old_name;
5863 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
19734dd8 5864 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6ff38230
RG
5865 if (gimple_in_ssa_p (id.src_cfun)
5866 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
32244553 5867 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6ff38230 5868 {
b731b390 5869 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6ff38230
RG
5870 insert_decl_map (&id, old_name, new_name);
5871 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
32244553 5872 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6ff38230 5873 }
19734dd8 5874 }
b8698a0f 5875
a9e0d843 5876 /* Set up the destination functions loop tree. */
0fc822d0 5877 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
a9e0d843
RB
5878 {
5879 cfun->curr_properties &= ~PROP_loops;
5880 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5881 cfun->curr_properties |= PROP_loops;
5882 }
5883
6ff38230
RG
5884 /* Copy the Function's body. */
5885 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
fefa31b5
DM
5886 ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5887 new_entry);
6ff38230 5888
19734dd8
RL
5889 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5890 number_blocks (new_decl);
5891
b5b8b0ac
AO
5892 /* We want to create the BB unconditionally, so that the addition of
5893 debug stmts doesn't affect BB count, which may in the end cause
5894 codegen differences. */
fefa31b5 5895 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
9771b263
DN
5896 while (init_stmts.length ())
5897 insert_init_stmt (&id, bb, init_stmts.pop ());
08ad1d6d 5898 update_clone_info (&id);
0f1961a2 5899
27dbd3ac
RH
5900 /* Remap the nonlocal_goto_save_area, if any. */
5901 if (cfun->nonlocal_goto_save_area)
5902 {
5903 struct walk_stmt_info wi;
5904
5905 memset (&wi, 0, sizeof (wi));
5906 wi.info = &id;
5907 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5908 }
5909
19734dd8 5910 /* Clean up. */
b787e7a2 5911 delete id.decl_map;
b5b8b0ac 5912 if (id.debug_map)
b787e7a2 5913 delete id.debug_map;
5006671f
RG
5914 free_dominance_info (CDI_DOMINATORS);
5915 free_dominance_info (CDI_POST_DOMINATORS);
9187e02d
JH
5916
5917 fold_marked_statements (0, id.statements_to_fold);
6e2830c3 5918 delete id.statements_to_fold;
9187e02d 5919 delete_unreachable_blocks_update_callgraph (&id);
67348ccc 5920 if (id.dst_node->definition)
3dafb85c 5921 cgraph_edge::rebuild_references ();
33d9078a
RB
5922 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
5923 {
5924 calculate_dominance_info (CDI_DOMINATORS);
5925 fix_loop_structure (NULL);
5926 }
9187e02d 5927 update_ssa (TODO_update_ssa);
b35366ce
JH
5928
5929 /* After partial cloning we need to rescale frequencies, so they are
5930 within proper range in the cloned function. */
5931 if (new_entry)
5932 {
5933 struct cgraph_edge *e;
5934 rebuild_frequencies ();
5935
fefa31b5 5936 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
b35366ce
JH
5937 for (e = new_version_node->callees; e; e = e->next_callee)
5938 {
5939 basic_block bb = gimple_bb (e->call_stmt);
02ec6988
MJ
5940 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5941 bb);
5942 e->count = bb->count;
5943 }
5944 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5945 {
5946 basic_block bb = gimple_bb (e->call_stmt);
5947 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5948 bb);
b35366ce
JH
5949 e->count = bb->count;
5950 }
5951 }
5952
a49de7a4
JJ
5953 if (debug_args_to_skip && MAY_HAVE_DEBUG_STMTS)
5954 {
5955 tree parm;
5956 vec<tree, va_gc> **debug_args = NULL;
5957 unsigned int len = 0;
5958 for (parm = DECL_ARGUMENTS (old_decl), i = 0;
5959 parm; parm = DECL_CHAIN (parm), i++)
5960 if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
5961 {
5962 tree ddecl;
5963
5964 if (debug_args == NULL)
5965 {
5966 debug_args = decl_debug_args_insert (new_decl);
5967 len = vec_safe_length (*debug_args);
5968 }
5969 ddecl = make_node (DEBUG_EXPR_DECL);
5970 DECL_ARTIFICIAL (ddecl) = 1;
5971 TREE_TYPE (ddecl) = TREE_TYPE (parm);
5972 DECL_MODE (ddecl) = DECL_MODE (parm);
5973 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
5974 vec_safe_push (*debug_args, ddecl);
5975 }
5976 if (debug_args != NULL)
5977 {
5978 /* On the callee side, add
5979 DEBUG D#Y s=> parm
5980 DEBUG var => D#Y
5981 stmts to the first bb where var is a VAR_DECL created for the
5982 optimized away parameter in DECL_INITIAL block. This hints
5983 in the debug info that var (whole DECL_ORIGIN is the parm
5984 PARM_DECL) is optimized away, but could be looked up at the
5985 call site as value of D#X there. */
5986 tree var = vars, vexpr;
5987 gimple_stmt_iterator cgsi
5988 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5989 gimple *def_temp;
5990 var = vars;
5991 i = vec_safe_length (*debug_args);
5992 do
5993 {
5994 i -= 2;
5995 while (var != NULL_TREE
5996 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
5997 var = TREE_CHAIN (var);
5998 if (var == NULL_TREE)
5999 break;
6000 vexpr = make_node (DEBUG_EXPR_DECL);
6001 parm = (**debug_args)[i];
6002 DECL_ARTIFICIAL (vexpr) = 1;
6003 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6004 DECL_MODE (vexpr) = DECL_MODE (parm);
6005 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6006 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6007 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6008 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6009 }
6010 while (i > len);
6011 }
6012 }
6013
6014 if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6015 BITMAP_FREE (debug_args_to_skip);
9187e02d
JH
6016 free_dominance_info (CDI_DOMINATORS);
6017 free_dominance_info (CDI_POST_DOMINATORS);
6018
9771b263 6019 gcc_assert (!id.debug_stmts.exists ());
110cfe1c 6020 pop_cfun ();
19734dd8
RL
6021 return;
6022}
6023
f82a627c
EB
6024/* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6025 the callee and return the inlined body on success. */
6026
6027tree
6028maybe_inline_call_in_expr (tree exp)
6029{
6030 tree fn = get_callee_fndecl (exp);
6031
6032 /* We can only try to inline "const" functions. */
6033 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6034 {
f82a627c
EB
6035 call_expr_arg_iterator iter;
6036 copy_body_data id;
6037 tree param, arg, t;
b787e7a2 6038 hash_map<tree, tree> decl_map;
f82a627c
EB
6039
6040 /* Remap the parameters. */
6041 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6042 param;
910ad8de 6043 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
b787e7a2 6044 decl_map.put (param, arg);
f82a627c
EB
6045
6046 memset (&id, 0, sizeof (id));
6047 id.src_fn = fn;
6048 id.dst_fn = current_function_decl;
6049 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
b787e7a2 6050 id.decl_map = &decl_map;
f82a627c
EB
6051
6052 id.copy_decl = copy_decl_no_change;
6053 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6054 id.transform_new_cfg = false;
6055 id.transform_return_to_modify = true;
78bbd765 6056 id.transform_parameter = true;
267ffce3 6057 id.transform_lang_insert_block = NULL;
f82a627c
EB
6058
6059 /* Make sure not to unshare trees behind the front-end's back
6060 since front-end specific mechanisms may rely on sharing. */
6061 id.regimplify = false;
6062 id.do_not_unshare = true;
6063
6064 /* We're not inside any EH region. */
1d65f45c 6065 id.eh_lp_nr = 0;
f82a627c
EB
6066
6067 t = copy_tree_body (&id);
f82a627c
EB
6068
6069 /* We can only return something suitable for use in a GENERIC
6070 expression tree. */
6071 if (TREE_CODE (t) == MODIFY_EXPR)
6072 return TREE_OPERAND (t, 1);
6073 }
6074
6075 return NULL_TREE;
6076}
6077
52dd234b
RH
6078/* Duplicate a type, fields and all. */
6079
6080tree
6081build_duplicate_type (tree type)
6082{
1b369fae 6083 struct copy_body_data id;
52dd234b
RH
6084
6085 memset (&id, 0, sizeof (id));
1b369fae
RH
6086 id.src_fn = current_function_decl;
6087 id.dst_fn = current_function_decl;
6088 id.src_cfun = cfun;
b787e7a2 6089 id.decl_map = new hash_map<tree, tree>;
b5b8b0ac 6090 id.debug_map = NULL;
4009f2e7 6091 id.copy_decl = copy_decl_no_change;
52dd234b
RH
6092
6093 type = remap_type_1 (type, &id);
6094
b787e7a2 6095 delete id.decl_map;
b5b8b0ac 6096 if (id.debug_map)
b787e7a2 6097 delete id.debug_map;
52dd234b 6098
f31c9f09
DG
6099 TYPE_CANONICAL (type) = type;
6100
52dd234b
RH
6101 return type;
6102}
60813a46
JM
6103
6104/* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6105 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6106 evaluation. */
6107
6108tree
6109copy_fn (tree fn, tree& parms, tree& result)
6110{
6111 copy_body_data id;
6112 tree param;
6113 hash_map<tree, tree> decl_map;
6114
6115 tree *p = &parms;
6116 *p = NULL_TREE;
6117
6118 memset (&id, 0, sizeof (id));
6119 id.src_fn = fn;
6120 id.dst_fn = current_function_decl;
6121 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6122 id.decl_map = &decl_map;
6123
6124 id.copy_decl = copy_decl_no_change;
6125 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6126 id.transform_new_cfg = false;
6127 id.transform_return_to_modify = false;
6128 id.transform_parameter = true;
6129 id.transform_lang_insert_block = NULL;
6130
6131 /* Make sure not to unshare trees behind the front-end's back
6132 since front-end specific mechanisms may rely on sharing. */
6133 id.regimplify = false;
6134 id.do_not_unshare = true;
6135
6136 /* We're not inside any EH region. */
6137 id.eh_lp_nr = 0;
6138
6139 /* Remap the parameters and result and return them to the caller. */
6140 for (param = DECL_ARGUMENTS (fn);
6141 param;
6142 param = DECL_CHAIN (param))
6143 {
6144 *p = remap_decl (param, &id);
6145 p = &DECL_CHAIN (*p);
6146 }
6147
6148 if (DECL_RESULT (fn))
6149 result = remap_decl (DECL_RESULT (fn), &id);
6150 else
6151 result = NULL_TREE;
6152
6153 return copy_tree_body (&id);
6154}