]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-inline.c
gimple-walk.h: New File.
[thirdparty/gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "tree.h"
27 #include "tree-inline.h"
28 #include "flags.h"
29 #include "params.h"
30 #include "input.h"
31 #include "insn-config.h"
32 #include "hashtab.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-iterator.h"
36 #include "intl.h"
37 #include "gimplify.h"
38 #include "gimple-iterator.h"
39 #include "gimple-walk.h"
40 #include "gimple-ssa.h"
41 #include "tree-cfg.h"
42 #include "tree-phinodes.h"
43 #include "ssa-iterators.h"
44 #include "tree-ssanames.h"
45 #include "tree-into-ssa.h"
46 #include "tree-dfa.h"
47 #include "tree-ssa.h"
48 #include "function.h"
49 #include "tree-pretty-print.h"
50 #include "except.h"
51 #include "debug.h"
52 #include "pointer-set.h"
53 #include "ipa-prop.h"
54 #include "value-prof.h"
55 #include "tree-pass.h"
56 #include "target.h"
57 #include "cfgloop.h"
58
59 #include "rtl.h" /* FIXME: For asm_str_count. */
60
61 /* I'm not real happy about this, but we need to handle gimple and
62 non-gimple trees. */
63
64 /* Inlining, Cloning, Versioning, Parallelization
65
66 Inlining: a function body is duplicated, but the PARM_DECLs are
67 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
68 MODIFY_EXPRs that store to a dedicated returned-value variable.
69 The duplicated eh_region info of the copy will later be appended
70 to the info for the caller; the eh_region info in copied throwing
71 statements and RESX statements are adjusted accordingly.
72
73 Cloning: (only in C++) We have one body for a con/de/structor, and
74 multiple function decls, each with a unique parameter list.
75 Duplicate the body, using the given splay tree; some parameters
76 will become constants (like 0 or 1).
77
78 Versioning: a function body is duplicated and the result is a new
79 function rather than into blocks of an existing function as with
80 inlining. Some parameters will become constants.
81
82 Parallelization: a region of a function is duplicated resulting in
83 a new function. Variables may be replaced with complex expressions
84 to enable shared variable semantics.
85
86 All of these will simultaneously lookup any callgraph edges. If
87 we're going to inline the duplicated function body, and the given
88 function has some cloned callgraph nodes (one for each place this
89 function will be inlined) those callgraph edges will be duplicated.
90 If we're cloning the body, those callgraph edges will be
91 updated to point into the new body. (Note that the original
92 callgraph node and edge list will not be altered.)
93
94 See the CALL_EXPR handling case in copy_tree_body_r (). */
95
96 /* To Do:
97
98 o In order to make inlining-on-trees work, we pessimized
99 function-local static constants. In particular, they are now
100 always output, even when not addressed. Fix this by treating
101 function-local static constants just like global static
102 constants; the back-end already knows not to output them if they
103 are not needed.
104
105 o Provide heuristics to clamp inlining of recursive template
106 calls? */
107
108
109 /* Weights that estimate_num_insns uses to estimate the size of the
110 produced code. */
111
112 eni_weights eni_size_weights;
113
114 /* Weights that estimate_num_insns uses to estimate the time necessary
115 to execute the produced code. */
116
117 eni_weights eni_time_weights;
118
119 /* Prototypes. */
120
121 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
122 static void remap_block (tree *, copy_body_data *);
123 static void copy_bind_expr (tree *, int *, copy_body_data *);
124 static void declare_inline_vars (tree, tree);
125 static void remap_save_expr (tree *, void *, int *);
126 static void prepend_lexical_block (tree current_block, tree new_block);
127 static tree copy_decl_to_var (tree, copy_body_data *);
128 static tree copy_result_decl_to_var (tree, copy_body_data *);
129 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
130 static gimple remap_gimple_stmt (gimple, copy_body_data *);
131 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
132
133 /* Insert a tree->tree mapping for ID. Despite the name suggests
134 that the trees should be variables, it is used for more than that. */
135
136 void
137 insert_decl_map (copy_body_data *id, tree key, tree value)
138 {
139 *pointer_map_insert (id->decl_map, key) = value;
140
141 /* Always insert an identity map as well. If we see this same new
142 node again, we won't want to duplicate it a second time. */
143 if (key != value)
144 *pointer_map_insert (id->decl_map, value) = value;
145 }
146
147 /* Insert a tree->tree mapping for ID. This is only used for
148 variables. */
149
150 static void
151 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
152 {
153 if (!gimple_in_ssa_p (id->src_cfun))
154 return;
155
156 if (!MAY_HAVE_DEBUG_STMTS)
157 return;
158
159 if (!target_for_debug_bind (key))
160 return;
161
162 gcc_assert (TREE_CODE (key) == PARM_DECL);
163 gcc_assert (TREE_CODE (value) == VAR_DECL);
164
165 if (!id->debug_map)
166 id->debug_map = pointer_map_create ();
167
168 *pointer_map_insert (id->debug_map, key) = value;
169 }
170
171 /* If nonzero, we're remapping the contents of inlined debug
172 statements. If negative, an error has occurred, such as a
173 reference to a variable that isn't available in the inlined
174 context. */
175 static int processing_debug_stmt = 0;
176
177 /* Construct new SSA name for old NAME. ID is the inline context. */
178
179 static tree
180 remap_ssa_name (tree name, copy_body_data *id)
181 {
182 tree new_tree, var;
183 tree *n;
184
185 gcc_assert (TREE_CODE (name) == SSA_NAME);
186
187 n = (tree *) pointer_map_contains (id->decl_map, name);
188 if (n)
189 return unshare_expr (*n);
190
191 if (processing_debug_stmt)
192 {
193 if (SSA_NAME_IS_DEFAULT_DEF (name)
194 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
195 && id->entry_bb == NULL
196 && single_succ_p (ENTRY_BLOCK_PTR))
197 {
198 tree vexpr = make_node (DEBUG_EXPR_DECL);
199 gimple def_temp;
200 gimple_stmt_iterator gsi;
201 tree val = SSA_NAME_VAR (name);
202
203 n = (tree *) pointer_map_contains (id->decl_map, val);
204 if (n != NULL)
205 val = *n;
206 if (TREE_CODE (val) != PARM_DECL)
207 {
208 processing_debug_stmt = -1;
209 return name;
210 }
211 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
212 DECL_ARTIFICIAL (vexpr) = 1;
213 TREE_TYPE (vexpr) = TREE_TYPE (name);
214 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
215 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
216 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
217 return vexpr;
218 }
219
220 processing_debug_stmt = -1;
221 return name;
222 }
223
224 /* Remap anonymous SSA names or SSA names of anonymous decls. */
225 var = SSA_NAME_VAR (name);
226 if (!var
227 || (!SSA_NAME_IS_DEFAULT_DEF (name)
228 && TREE_CODE (var) == VAR_DECL
229 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
230 && DECL_ARTIFICIAL (var)
231 && DECL_IGNORED_P (var)
232 && !DECL_NAME (var)))
233 {
234 struct ptr_info_def *pi;
235 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id), NULL);
236 if (!var && SSA_NAME_IDENTIFIER (name))
237 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
238 insert_decl_map (id, name, new_tree);
239 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
240 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
241 /* At least IPA points-to info can be directly transferred. */
242 if (id->src_cfun->gimple_df
243 && id->src_cfun->gimple_df->ipa_pta
244 && (pi = SSA_NAME_PTR_INFO (name))
245 && !pi->pt.anything)
246 {
247 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
248 new_pi->pt = pi->pt;
249 }
250 return new_tree;
251 }
252
253 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
254 in copy_bb. */
255 new_tree = remap_decl (var, id);
256
257 /* We might've substituted constant or another SSA_NAME for
258 the variable.
259
260 Replace the SSA name representing RESULT_DECL by variable during
261 inlining: this saves us from need to introduce PHI node in a case
262 return value is just partly initialized. */
263 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
264 && (!SSA_NAME_VAR (name)
265 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
266 || !id->transform_return_to_modify))
267 {
268 struct ptr_info_def *pi;
269 new_tree = make_ssa_name (new_tree, NULL);
270 insert_decl_map (id, name, new_tree);
271 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
272 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
273 /* At least IPA points-to info can be directly transferred. */
274 if (id->src_cfun->gimple_df
275 && id->src_cfun->gimple_df->ipa_pta
276 && (pi = SSA_NAME_PTR_INFO (name))
277 && !pi->pt.anything)
278 {
279 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
280 new_pi->pt = pi->pt;
281 }
282 if (SSA_NAME_IS_DEFAULT_DEF (name))
283 {
284 /* By inlining function having uninitialized variable, we might
285 extend the lifetime (variable might get reused). This cause
286 ICE in the case we end up extending lifetime of SSA name across
287 abnormal edge, but also increase register pressure.
288
289 We simply initialize all uninitialized vars by 0 except
290 for case we are inlining to very first BB. We can avoid
291 this for all BBs that are not inside strongly connected
292 regions of the CFG, but this is expensive to test. */
293 if (id->entry_bb
294 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
295 && (!SSA_NAME_VAR (name)
296 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
297 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
298 || EDGE_COUNT (id->entry_bb->preds) != 1))
299 {
300 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
301 gimple init_stmt;
302 tree zero = build_zero_cst (TREE_TYPE (new_tree));
303
304 init_stmt = gimple_build_assign (new_tree, zero);
305 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
306 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
307 }
308 else
309 {
310 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
311 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
312 }
313 }
314 }
315 else
316 insert_decl_map (id, name, new_tree);
317 return new_tree;
318 }
319
320 /* Remap DECL during the copying of the BLOCK tree for the function. */
321
322 tree
323 remap_decl (tree decl, copy_body_data *id)
324 {
325 tree *n;
326
327 /* We only remap local variables in the current function. */
328
329 /* See if we have remapped this declaration. */
330
331 n = (tree *) pointer_map_contains (id->decl_map, decl);
332
333 if (!n && processing_debug_stmt)
334 {
335 processing_debug_stmt = -1;
336 return decl;
337 }
338
339 /* If we didn't already have an equivalent for this declaration,
340 create one now. */
341 if (!n)
342 {
343 /* Make a copy of the variable or label. */
344 tree t = id->copy_decl (decl, id);
345
346 /* Remember it, so that if we encounter this local entity again
347 we can reuse this copy. Do this early because remap_type may
348 need this decl for TYPE_STUB_DECL. */
349 insert_decl_map (id, decl, t);
350
351 if (!DECL_P (t))
352 return t;
353
354 /* Remap types, if necessary. */
355 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
356 if (TREE_CODE (t) == TYPE_DECL)
357 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
358
359 /* Remap sizes as necessary. */
360 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
361 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
362
363 /* If fields, do likewise for offset and qualifier. */
364 if (TREE_CODE (t) == FIELD_DECL)
365 {
366 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
367 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
368 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
369 }
370
371 return t;
372 }
373
374 if (id->do_not_unshare)
375 return *n;
376 else
377 return unshare_expr (*n);
378 }
379
380 static tree
381 remap_type_1 (tree type, copy_body_data *id)
382 {
383 tree new_tree, t;
384
385 /* We do need a copy. build and register it now. If this is a pointer or
386 reference type, remap the designated type and make a new pointer or
387 reference type. */
388 if (TREE_CODE (type) == POINTER_TYPE)
389 {
390 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
391 TYPE_MODE (type),
392 TYPE_REF_CAN_ALIAS_ALL (type));
393 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
394 new_tree = build_type_attribute_qual_variant (new_tree,
395 TYPE_ATTRIBUTES (type),
396 TYPE_QUALS (type));
397 insert_decl_map (id, type, new_tree);
398 return new_tree;
399 }
400 else if (TREE_CODE (type) == REFERENCE_TYPE)
401 {
402 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
403 TYPE_MODE (type),
404 TYPE_REF_CAN_ALIAS_ALL (type));
405 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
406 new_tree = build_type_attribute_qual_variant (new_tree,
407 TYPE_ATTRIBUTES (type),
408 TYPE_QUALS (type));
409 insert_decl_map (id, type, new_tree);
410 return new_tree;
411 }
412 else
413 new_tree = copy_node (type);
414
415 insert_decl_map (id, type, new_tree);
416
417 /* This is a new type, not a copy of an old type. Need to reassociate
418 variants. We can handle everything except the main variant lazily. */
419 t = TYPE_MAIN_VARIANT (type);
420 if (type != t)
421 {
422 t = remap_type (t, id);
423 TYPE_MAIN_VARIANT (new_tree) = t;
424 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
425 TYPE_NEXT_VARIANT (t) = new_tree;
426 }
427 else
428 {
429 TYPE_MAIN_VARIANT (new_tree) = new_tree;
430 TYPE_NEXT_VARIANT (new_tree) = NULL;
431 }
432
433 if (TYPE_STUB_DECL (type))
434 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
435
436 /* Lazily create pointer and reference types. */
437 TYPE_POINTER_TO (new_tree) = NULL;
438 TYPE_REFERENCE_TO (new_tree) = NULL;
439
440 switch (TREE_CODE (new_tree))
441 {
442 case INTEGER_TYPE:
443 case REAL_TYPE:
444 case FIXED_POINT_TYPE:
445 case ENUMERAL_TYPE:
446 case BOOLEAN_TYPE:
447 t = TYPE_MIN_VALUE (new_tree);
448 if (t && TREE_CODE (t) != INTEGER_CST)
449 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
450
451 t = TYPE_MAX_VALUE (new_tree);
452 if (t && TREE_CODE (t) != INTEGER_CST)
453 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
454 return new_tree;
455
456 case FUNCTION_TYPE:
457 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
458 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
459 return new_tree;
460
461 case ARRAY_TYPE:
462 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
463 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
464 break;
465
466 case RECORD_TYPE:
467 case UNION_TYPE:
468 case QUAL_UNION_TYPE:
469 {
470 tree f, nf = NULL;
471
472 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
473 {
474 t = remap_decl (f, id);
475 DECL_CONTEXT (t) = new_tree;
476 DECL_CHAIN (t) = nf;
477 nf = t;
478 }
479 TYPE_FIELDS (new_tree) = nreverse (nf);
480 }
481 break;
482
483 case OFFSET_TYPE:
484 default:
485 /* Shouldn't have been thought variable sized. */
486 gcc_unreachable ();
487 }
488
489 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
490 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
491
492 return new_tree;
493 }
494
495 tree
496 remap_type (tree type, copy_body_data *id)
497 {
498 tree *node;
499 tree tmp;
500
501 if (type == NULL)
502 return type;
503
504 /* See if we have remapped this type. */
505 node = (tree *) pointer_map_contains (id->decl_map, type);
506 if (node)
507 return *node;
508
509 /* The type only needs remapping if it's variably modified. */
510 if (! variably_modified_type_p (type, id->src_fn))
511 {
512 insert_decl_map (id, type, type);
513 return type;
514 }
515
516 id->remapping_type_depth++;
517 tmp = remap_type_1 (type, id);
518 id->remapping_type_depth--;
519
520 return tmp;
521 }
522
523 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
524
525 static bool
526 can_be_nonlocal (tree decl, copy_body_data *id)
527 {
528 /* We can not duplicate function decls. */
529 if (TREE_CODE (decl) == FUNCTION_DECL)
530 return true;
531
532 /* Local static vars must be non-local or we get multiple declaration
533 problems. */
534 if (TREE_CODE (decl) == VAR_DECL
535 && !auto_var_in_fn_p (decl, id->src_fn))
536 return true;
537
538 return false;
539 }
540
541 static tree
542 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
543 copy_body_data *id)
544 {
545 tree old_var;
546 tree new_decls = NULL_TREE;
547
548 /* Remap its variables. */
549 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
550 {
551 tree new_var;
552
553 if (can_be_nonlocal (old_var, id))
554 {
555 /* We need to add this variable to the local decls as otherwise
556 nothing else will do so. */
557 if (TREE_CODE (old_var) == VAR_DECL
558 && ! DECL_EXTERNAL (old_var))
559 add_local_decl (cfun, old_var);
560 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
561 && !DECL_IGNORED_P (old_var)
562 && nonlocalized_list)
563 vec_safe_push (*nonlocalized_list, old_var);
564 continue;
565 }
566
567 /* Remap the variable. */
568 new_var = remap_decl (old_var, id);
569
570 /* If we didn't remap this variable, we can't mess with its
571 TREE_CHAIN. If we remapped this variable to the return slot, it's
572 already declared somewhere else, so don't declare it here. */
573
574 if (new_var == id->retvar)
575 ;
576 else if (!new_var)
577 {
578 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
579 && !DECL_IGNORED_P (old_var)
580 && nonlocalized_list)
581 vec_safe_push (*nonlocalized_list, old_var);
582 }
583 else
584 {
585 gcc_assert (DECL_P (new_var));
586 DECL_CHAIN (new_var) = new_decls;
587 new_decls = new_var;
588
589 /* Also copy value-expressions. */
590 if (TREE_CODE (new_var) == VAR_DECL
591 && DECL_HAS_VALUE_EXPR_P (new_var))
592 {
593 tree tem = DECL_VALUE_EXPR (new_var);
594 bool old_regimplify = id->regimplify;
595 id->remapping_type_depth++;
596 walk_tree (&tem, copy_tree_body_r, id, NULL);
597 id->remapping_type_depth--;
598 id->regimplify = old_regimplify;
599 SET_DECL_VALUE_EXPR (new_var, tem);
600 }
601 }
602 }
603
604 return nreverse (new_decls);
605 }
606
607 /* Copy the BLOCK to contain remapped versions of the variables
608 therein. And hook the new block into the block-tree. */
609
610 static void
611 remap_block (tree *block, copy_body_data *id)
612 {
613 tree old_block;
614 tree new_block;
615
616 /* Make the new block. */
617 old_block = *block;
618 new_block = make_node (BLOCK);
619 TREE_USED (new_block) = TREE_USED (old_block);
620 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
621 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
622 BLOCK_NONLOCALIZED_VARS (new_block)
623 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
624 *block = new_block;
625
626 /* Remap its variables. */
627 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
628 &BLOCK_NONLOCALIZED_VARS (new_block),
629 id);
630
631 if (id->transform_lang_insert_block)
632 id->transform_lang_insert_block (new_block);
633
634 /* Remember the remapped block. */
635 insert_decl_map (id, old_block, new_block);
636 }
637
638 /* Copy the whole block tree and root it in id->block. */
639 static tree
640 remap_blocks (tree block, copy_body_data *id)
641 {
642 tree t;
643 tree new_tree = block;
644
645 if (!block)
646 return NULL;
647
648 remap_block (&new_tree, id);
649 gcc_assert (new_tree != block);
650 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
651 prepend_lexical_block (new_tree, remap_blocks (t, id));
652 /* Blocks are in arbitrary order, but make things slightly prettier and do
653 not swap order when producing a copy. */
654 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
655 return new_tree;
656 }
657
658 /* Remap the block tree rooted at BLOCK to nothing. */
659 static void
660 remap_blocks_to_null (tree block, copy_body_data *id)
661 {
662 tree t;
663 insert_decl_map (id, block, NULL_TREE);
664 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
665 remap_blocks_to_null (t, id);
666 }
667
668 static void
669 copy_statement_list (tree *tp)
670 {
671 tree_stmt_iterator oi, ni;
672 tree new_tree;
673
674 new_tree = alloc_stmt_list ();
675 ni = tsi_start (new_tree);
676 oi = tsi_start (*tp);
677 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
678 *tp = new_tree;
679
680 for (; !tsi_end_p (oi); tsi_next (&oi))
681 {
682 tree stmt = tsi_stmt (oi);
683 if (TREE_CODE (stmt) == STATEMENT_LIST)
684 /* This copy is not redundant; tsi_link_after will smash this
685 STATEMENT_LIST into the end of the one we're building, and we
686 don't want to do that with the original. */
687 copy_statement_list (&stmt);
688 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
689 }
690 }
691
692 static void
693 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
694 {
695 tree block = BIND_EXPR_BLOCK (*tp);
696 /* Copy (and replace) the statement. */
697 copy_tree_r (tp, walk_subtrees, NULL);
698 if (block)
699 {
700 remap_block (&block, id);
701 BIND_EXPR_BLOCK (*tp) = block;
702 }
703
704 if (BIND_EXPR_VARS (*tp))
705 /* This will remap a lot of the same decls again, but this should be
706 harmless. */
707 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
708 }
709
710
711 /* Create a new gimple_seq by remapping all the statements in BODY
712 using the inlining information in ID. */
713
714 static gimple_seq
715 remap_gimple_seq (gimple_seq body, copy_body_data *id)
716 {
717 gimple_stmt_iterator si;
718 gimple_seq new_body = NULL;
719
720 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
721 {
722 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
723 gimple_seq_add_stmt (&new_body, new_stmt);
724 }
725
726 return new_body;
727 }
728
729
730 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
731 block using the mapping information in ID. */
732
733 static gimple
734 copy_gimple_bind (gimple stmt, copy_body_data *id)
735 {
736 gimple new_bind;
737 tree new_block, new_vars;
738 gimple_seq body, new_body;
739
740 /* Copy the statement. Note that we purposely don't use copy_stmt
741 here because we need to remap statements as we copy. */
742 body = gimple_bind_body (stmt);
743 new_body = remap_gimple_seq (body, id);
744
745 new_block = gimple_bind_block (stmt);
746 if (new_block)
747 remap_block (&new_block, id);
748
749 /* This will remap a lot of the same decls again, but this should be
750 harmless. */
751 new_vars = gimple_bind_vars (stmt);
752 if (new_vars)
753 new_vars = remap_decls (new_vars, NULL, id);
754
755 new_bind = gimple_build_bind (new_vars, new_body, new_block);
756
757 return new_bind;
758 }
759
760 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
761
762 static bool
763 is_parm (tree decl)
764 {
765 if (TREE_CODE (decl) == SSA_NAME)
766 {
767 decl = SSA_NAME_VAR (decl);
768 if (!decl)
769 return false;
770 }
771
772 return (TREE_CODE (decl) == PARM_DECL);
773 }
774
775 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
776 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
777 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
778 recursing into the children nodes of *TP. */
779
780 static tree
781 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
782 {
783 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
784 copy_body_data *id = (copy_body_data *) wi_p->info;
785 tree fn = id->src_fn;
786
787 if (TREE_CODE (*tp) == SSA_NAME)
788 {
789 *tp = remap_ssa_name (*tp, id);
790 *walk_subtrees = 0;
791 return NULL;
792 }
793 else if (auto_var_in_fn_p (*tp, fn))
794 {
795 /* Local variables and labels need to be replaced by equivalent
796 variables. We don't want to copy static variables; there's
797 only one of those, no matter how many times we inline the
798 containing function. Similarly for globals from an outer
799 function. */
800 tree new_decl;
801
802 /* Remap the declaration. */
803 new_decl = remap_decl (*tp, id);
804 gcc_assert (new_decl);
805 /* Replace this variable with the copy. */
806 STRIP_TYPE_NOPS (new_decl);
807 /* ??? The C++ frontend uses void * pointer zero to initialize
808 any other type. This confuses the middle-end type verification.
809 As cloned bodies do not go through gimplification again the fixup
810 there doesn't trigger. */
811 if (TREE_CODE (new_decl) == INTEGER_CST
812 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
813 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
814 *tp = new_decl;
815 *walk_subtrees = 0;
816 }
817 else if (TREE_CODE (*tp) == STATEMENT_LIST)
818 gcc_unreachable ();
819 else if (TREE_CODE (*tp) == SAVE_EXPR)
820 gcc_unreachable ();
821 else if (TREE_CODE (*tp) == LABEL_DECL
822 && (!DECL_CONTEXT (*tp)
823 || decl_function_context (*tp) == id->src_fn))
824 /* These may need to be remapped for EH handling. */
825 *tp = remap_decl (*tp, id);
826 else if (TREE_CODE (*tp) == FIELD_DECL)
827 {
828 /* If the enclosing record type is variably_modified_type_p, the field
829 has already been remapped. Otherwise, it need not be. */
830 tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
831 if (n)
832 *tp = *n;
833 *walk_subtrees = 0;
834 }
835 else if (TYPE_P (*tp))
836 /* Types may need remapping as well. */
837 *tp = remap_type (*tp, id);
838 else if (CONSTANT_CLASS_P (*tp))
839 {
840 /* If this is a constant, we have to copy the node iff the type
841 will be remapped. copy_tree_r will not copy a constant. */
842 tree new_type = remap_type (TREE_TYPE (*tp), id);
843
844 if (new_type == TREE_TYPE (*tp))
845 *walk_subtrees = 0;
846
847 else if (TREE_CODE (*tp) == INTEGER_CST)
848 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
849 TREE_INT_CST_HIGH (*tp));
850 else
851 {
852 *tp = copy_node (*tp);
853 TREE_TYPE (*tp) = new_type;
854 }
855 }
856 else
857 {
858 /* Otherwise, just copy the node. Note that copy_tree_r already
859 knows not to copy VAR_DECLs, etc., so this is safe. */
860
861 if (TREE_CODE (*tp) == MEM_REF)
862 {
863 /* We need to re-canonicalize MEM_REFs from inline substitutions
864 that can happen when a pointer argument is an ADDR_EXPR.
865 Recurse here manually to allow that. */
866 tree ptr = TREE_OPERAND (*tp, 0);
867 tree type = remap_type (TREE_TYPE (*tp), id);
868 tree old = *tp;
869 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
870 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
871 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
872 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
873 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
874 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
875 remapped a parameter as the property might be valid only
876 for the parameter itself. */
877 if (TREE_THIS_NOTRAP (old)
878 && (!is_parm (TREE_OPERAND (old, 0))
879 || (!id->transform_parameter && is_parm (ptr))))
880 TREE_THIS_NOTRAP (*tp) = 1;
881 *walk_subtrees = 0;
882 return NULL;
883 }
884
885 /* Here is the "usual case". Copy this tree node, and then
886 tweak some special cases. */
887 copy_tree_r (tp, walk_subtrees, NULL);
888
889 if (TREE_CODE (*tp) != OMP_CLAUSE)
890 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
891
892 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
893 {
894 /* The copied TARGET_EXPR has never been expanded, even if the
895 original node was expanded already. */
896 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
897 TREE_OPERAND (*tp, 3) = NULL_TREE;
898 }
899 else if (TREE_CODE (*tp) == ADDR_EXPR)
900 {
901 /* Variable substitution need not be simple. In particular,
902 the MEM_REF substitution above. Make sure that
903 TREE_CONSTANT and friends are up-to-date. */
904 int invariant = is_gimple_min_invariant (*tp);
905 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
906 recompute_tree_invariant_for_addr_expr (*tp);
907
908 /* If this used to be invariant, but is not any longer,
909 then regimplification is probably needed. */
910 if (invariant && !is_gimple_min_invariant (*tp))
911 id->regimplify = true;
912
913 *walk_subtrees = 0;
914 }
915 }
916
917 /* Update the TREE_BLOCK for the cloned expr. */
918 if (EXPR_P (*tp))
919 {
920 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
921 tree old_block = TREE_BLOCK (*tp);
922 if (old_block)
923 {
924 tree *n;
925 n = (tree *) pointer_map_contains (id->decl_map,
926 TREE_BLOCK (*tp));
927 if (n)
928 new_block = *n;
929 }
930 TREE_SET_BLOCK (*tp, new_block);
931 }
932
933 /* Keep iterating. */
934 return NULL_TREE;
935 }
936
937
938 /* Called from copy_body_id via walk_tree. DATA is really a
939 `copy_body_data *'. */
940
941 tree
942 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
943 {
944 copy_body_data *id = (copy_body_data *) data;
945 tree fn = id->src_fn;
946 tree new_block;
947
948 /* Begin by recognizing trees that we'll completely rewrite for the
949 inlining context. Our output for these trees is completely
950 different from out input (e.g. RETURN_EXPR is deleted, and morphs
951 into an edge). Further down, we'll handle trees that get
952 duplicated and/or tweaked. */
953
954 /* When requested, RETURN_EXPRs should be transformed to just the
955 contained MODIFY_EXPR. The branch semantics of the return will
956 be handled elsewhere by manipulating the CFG rather than a statement. */
957 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
958 {
959 tree assignment = TREE_OPERAND (*tp, 0);
960
961 /* If we're returning something, just turn that into an
962 assignment into the equivalent of the original RESULT_DECL.
963 If the "assignment" is just the result decl, the result
964 decl has already been set (e.g. a recent "foo (&result_decl,
965 ...)"); just toss the entire RETURN_EXPR. */
966 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
967 {
968 /* Replace the RETURN_EXPR with (a copy of) the
969 MODIFY_EXPR hanging underneath. */
970 *tp = copy_node (assignment);
971 }
972 else /* Else the RETURN_EXPR returns no value. */
973 {
974 *tp = NULL;
975 return (tree) (void *)1;
976 }
977 }
978 else if (TREE_CODE (*tp) == SSA_NAME)
979 {
980 *tp = remap_ssa_name (*tp, id);
981 *walk_subtrees = 0;
982 return NULL;
983 }
984
985 /* Local variables and labels need to be replaced by equivalent
986 variables. We don't want to copy static variables; there's only
987 one of those, no matter how many times we inline the containing
988 function. Similarly for globals from an outer function. */
989 else if (auto_var_in_fn_p (*tp, fn))
990 {
991 tree new_decl;
992
993 /* Remap the declaration. */
994 new_decl = remap_decl (*tp, id);
995 gcc_assert (new_decl);
996 /* Replace this variable with the copy. */
997 STRIP_TYPE_NOPS (new_decl);
998 *tp = new_decl;
999 *walk_subtrees = 0;
1000 }
1001 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1002 copy_statement_list (tp);
1003 else if (TREE_CODE (*tp) == SAVE_EXPR
1004 || TREE_CODE (*tp) == TARGET_EXPR)
1005 remap_save_expr (tp, id->decl_map, walk_subtrees);
1006 else if (TREE_CODE (*tp) == LABEL_DECL
1007 && (! DECL_CONTEXT (*tp)
1008 || decl_function_context (*tp) == id->src_fn))
1009 /* These may need to be remapped for EH handling. */
1010 *tp = remap_decl (*tp, id);
1011 else if (TREE_CODE (*tp) == BIND_EXPR)
1012 copy_bind_expr (tp, walk_subtrees, id);
1013 /* Types may need remapping as well. */
1014 else if (TYPE_P (*tp))
1015 *tp = remap_type (*tp, id);
1016
1017 /* If this is a constant, we have to copy the node iff the type will be
1018 remapped. copy_tree_r will not copy a constant. */
1019 else if (CONSTANT_CLASS_P (*tp))
1020 {
1021 tree new_type = remap_type (TREE_TYPE (*tp), id);
1022
1023 if (new_type == TREE_TYPE (*tp))
1024 *walk_subtrees = 0;
1025
1026 else if (TREE_CODE (*tp) == INTEGER_CST)
1027 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1028 TREE_INT_CST_HIGH (*tp));
1029 else
1030 {
1031 *tp = copy_node (*tp);
1032 TREE_TYPE (*tp) = new_type;
1033 }
1034 }
1035
1036 /* Otherwise, just copy the node. Note that copy_tree_r already
1037 knows not to copy VAR_DECLs, etc., so this is safe. */
1038 else
1039 {
1040 /* Here we handle trees that are not completely rewritten.
1041 First we detect some inlining-induced bogosities for
1042 discarding. */
1043 if (TREE_CODE (*tp) == MODIFY_EXPR
1044 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1045 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1046 {
1047 /* Some assignments VAR = VAR; don't generate any rtl code
1048 and thus don't count as variable modification. Avoid
1049 keeping bogosities like 0 = 0. */
1050 tree decl = TREE_OPERAND (*tp, 0), value;
1051 tree *n;
1052
1053 n = (tree *) pointer_map_contains (id->decl_map, decl);
1054 if (n)
1055 {
1056 value = *n;
1057 STRIP_TYPE_NOPS (value);
1058 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1059 {
1060 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1061 return copy_tree_body_r (tp, walk_subtrees, data);
1062 }
1063 }
1064 }
1065 else if (TREE_CODE (*tp) == INDIRECT_REF)
1066 {
1067 /* Get rid of *& from inline substitutions that can happen when a
1068 pointer argument is an ADDR_EXPR. */
1069 tree decl = TREE_OPERAND (*tp, 0);
1070 tree *n = (tree *) pointer_map_contains (id->decl_map, decl);
1071 if (n)
1072 {
1073 /* If we happen to get an ADDR_EXPR in n->value, strip
1074 it manually here as we'll eventually get ADDR_EXPRs
1075 which lie about their types pointed to. In this case
1076 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1077 but we absolutely rely on that. As fold_indirect_ref
1078 does other useful transformations, try that first, though. */
1079 tree type = TREE_TYPE (*tp);
1080 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1081 tree old = *tp;
1082 *tp = gimple_fold_indirect_ref (ptr);
1083 if (! *tp)
1084 {
1085 if (TREE_CODE (ptr) == ADDR_EXPR)
1086 {
1087 *tp
1088 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1089 /* ??? We should either assert here or build
1090 a VIEW_CONVERT_EXPR instead of blindly leaking
1091 incompatible types to our IL. */
1092 if (! *tp)
1093 *tp = TREE_OPERAND (ptr, 0);
1094 }
1095 else
1096 {
1097 *tp = build1 (INDIRECT_REF, type, ptr);
1098 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1099 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1100 TREE_READONLY (*tp) = TREE_READONLY (old);
1101 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1102 have remapped a parameter as the property might be
1103 valid only for the parameter itself. */
1104 if (TREE_THIS_NOTRAP (old)
1105 && (!is_parm (TREE_OPERAND (old, 0))
1106 || (!id->transform_parameter && is_parm (ptr))))
1107 TREE_THIS_NOTRAP (*tp) = 1;
1108 }
1109 }
1110 *walk_subtrees = 0;
1111 return NULL;
1112 }
1113 }
1114 else if (TREE_CODE (*tp) == MEM_REF)
1115 {
1116 /* We need to re-canonicalize MEM_REFs from inline substitutions
1117 that can happen when a pointer argument is an ADDR_EXPR.
1118 Recurse here manually to allow that. */
1119 tree ptr = TREE_OPERAND (*tp, 0);
1120 tree type = remap_type (TREE_TYPE (*tp), id);
1121 tree old = *tp;
1122 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1123 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1124 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1125 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1126 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1127 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1128 remapped a parameter as the property might be valid only
1129 for the parameter itself. */
1130 if (TREE_THIS_NOTRAP (old)
1131 && (!is_parm (TREE_OPERAND (old, 0))
1132 || (!id->transform_parameter && is_parm (ptr))))
1133 TREE_THIS_NOTRAP (*tp) = 1;
1134 *walk_subtrees = 0;
1135 return NULL;
1136 }
1137
1138 /* Here is the "usual case". Copy this tree node, and then
1139 tweak some special cases. */
1140 copy_tree_r (tp, walk_subtrees, NULL);
1141
1142 /* If EXPR has block defined, map it to newly constructed block.
1143 When inlining we want EXPRs without block appear in the block
1144 of function call if we are not remapping a type. */
1145 if (EXPR_P (*tp))
1146 {
1147 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1148 if (TREE_BLOCK (*tp))
1149 {
1150 tree *n;
1151 n = (tree *) pointer_map_contains (id->decl_map,
1152 TREE_BLOCK (*tp));
1153 if (n)
1154 new_block = *n;
1155 }
1156 TREE_SET_BLOCK (*tp, new_block);
1157 }
1158
1159 if (TREE_CODE (*tp) != OMP_CLAUSE)
1160 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1161
1162 /* The copied TARGET_EXPR has never been expanded, even if the
1163 original node was expanded already. */
1164 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1165 {
1166 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1167 TREE_OPERAND (*tp, 3) = NULL_TREE;
1168 }
1169
1170 /* Variable substitution need not be simple. In particular, the
1171 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1172 and friends are up-to-date. */
1173 else if (TREE_CODE (*tp) == ADDR_EXPR)
1174 {
1175 int invariant = is_gimple_min_invariant (*tp);
1176 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1177
1178 /* Handle the case where we substituted an INDIRECT_REF
1179 into the operand of the ADDR_EXPR. */
1180 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1181 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1182 else
1183 recompute_tree_invariant_for_addr_expr (*tp);
1184
1185 /* If this used to be invariant, but is not any longer,
1186 then regimplification is probably needed. */
1187 if (invariant && !is_gimple_min_invariant (*tp))
1188 id->regimplify = true;
1189
1190 *walk_subtrees = 0;
1191 }
1192 }
1193
1194 /* Keep iterating. */
1195 return NULL_TREE;
1196 }
1197
1198 /* Helper for remap_gimple_stmt. Given an EH region number for the
1199 source function, map that to the duplicate EH region number in
1200 the destination function. */
1201
1202 static int
1203 remap_eh_region_nr (int old_nr, copy_body_data *id)
1204 {
1205 eh_region old_r, new_r;
1206 void **slot;
1207
1208 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1209 slot = pointer_map_contains (id->eh_map, old_r);
1210 new_r = (eh_region) *slot;
1211
1212 return new_r->index;
1213 }
1214
1215 /* Similar, but operate on INTEGER_CSTs. */
1216
1217 static tree
1218 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1219 {
1220 int old_nr, new_nr;
1221
1222 old_nr = tree_low_cst (old_t_nr, 0);
1223 new_nr = remap_eh_region_nr (old_nr, id);
1224
1225 return build_int_cst (integer_type_node, new_nr);
1226 }
1227
1228 /* Helper for copy_bb. Remap statement STMT using the inlining
1229 information in ID. Return the new statement copy. */
1230
1231 static gimple
1232 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1233 {
1234 gimple copy = NULL;
1235 struct walk_stmt_info wi;
1236 bool skip_first = false;
1237
1238 /* Begin by recognizing trees that we'll completely rewrite for the
1239 inlining context. Our output for these trees is completely
1240 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1241 into an edge). Further down, we'll handle trees that get
1242 duplicated and/or tweaked. */
1243
1244 /* When requested, GIMPLE_RETURNs should be transformed to just the
1245 contained GIMPLE_ASSIGN. The branch semantics of the return will
1246 be handled elsewhere by manipulating the CFG rather than the
1247 statement. */
1248 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1249 {
1250 tree retval = gimple_return_retval (stmt);
1251
1252 /* If we're returning something, just turn that into an
1253 assignment into the equivalent of the original RESULT_DECL.
1254 If RETVAL is just the result decl, the result decl has
1255 already been set (e.g. a recent "foo (&result_decl, ...)");
1256 just toss the entire GIMPLE_RETURN. */
1257 if (retval
1258 && (TREE_CODE (retval) != RESULT_DECL
1259 && (TREE_CODE (retval) != SSA_NAME
1260 || ! SSA_NAME_VAR (retval)
1261 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1262 {
1263 copy = gimple_build_assign (id->retvar, retval);
1264 /* id->retvar is already substituted. Skip it on later remapping. */
1265 skip_first = true;
1266 }
1267 else
1268 return gimple_build_nop ();
1269 }
1270 else if (gimple_has_substatements (stmt))
1271 {
1272 gimple_seq s1, s2;
1273
1274 /* When cloning bodies from the C++ front end, we will be handed bodies
1275 in High GIMPLE form. Handle here all the High GIMPLE statements that
1276 have embedded statements. */
1277 switch (gimple_code (stmt))
1278 {
1279 case GIMPLE_BIND:
1280 copy = copy_gimple_bind (stmt, id);
1281 break;
1282
1283 case GIMPLE_CATCH:
1284 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1285 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1286 break;
1287
1288 case GIMPLE_EH_FILTER:
1289 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1290 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1291 break;
1292
1293 case GIMPLE_TRY:
1294 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1295 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1296 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1297 break;
1298
1299 case GIMPLE_WITH_CLEANUP_EXPR:
1300 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1301 copy = gimple_build_wce (s1);
1302 break;
1303
1304 case GIMPLE_OMP_PARALLEL:
1305 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1306 copy = gimple_build_omp_parallel
1307 (s1,
1308 gimple_omp_parallel_clauses (stmt),
1309 gimple_omp_parallel_child_fn (stmt),
1310 gimple_omp_parallel_data_arg (stmt));
1311 break;
1312
1313 case GIMPLE_OMP_TASK:
1314 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1315 copy = gimple_build_omp_task
1316 (s1,
1317 gimple_omp_task_clauses (stmt),
1318 gimple_omp_task_child_fn (stmt),
1319 gimple_omp_task_data_arg (stmt),
1320 gimple_omp_task_copy_fn (stmt),
1321 gimple_omp_task_arg_size (stmt),
1322 gimple_omp_task_arg_align (stmt));
1323 break;
1324
1325 case GIMPLE_OMP_FOR:
1326 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1327 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1328 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1329 gimple_omp_for_clauses (stmt),
1330 gimple_omp_for_collapse (stmt), s2);
1331 {
1332 size_t i;
1333 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1334 {
1335 gimple_omp_for_set_index (copy, i,
1336 gimple_omp_for_index (stmt, i));
1337 gimple_omp_for_set_initial (copy, i,
1338 gimple_omp_for_initial (stmt, i));
1339 gimple_omp_for_set_final (copy, i,
1340 gimple_omp_for_final (stmt, i));
1341 gimple_omp_for_set_incr (copy, i,
1342 gimple_omp_for_incr (stmt, i));
1343 gimple_omp_for_set_cond (copy, i,
1344 gimple_omp_for_cond (stmt, i));
1345 }
1346 }
1347 break;
1348
1349 case GIMPLE_OMP_MASTER:
1350 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1351 copy = gimple_build_omp_master (s1);
1352 break;
1353
1354 case GIMPLE_OMP_TASKGROUP:
1355 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1356 copy = gimple_build_omp_taskgroup (s1);
1357 break;
1358
1359 case GIMPLE_OMP_ORDERED:
1360 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1361 copy = gimple_build_omp_ordered (s1);
1362 break;
1363
1364 case GIMPLE_OMP_SECTION:
1365 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1366 copy = gimple_build_omp_section (s1);
1367 break;
1368
1369 case GIMPLE_OMP_SECTIONS:
1370 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1371 copy = gimple_build_omp_sections
1372 (s1, gimple_omp_sections_clauses (stmt));
1373 break;
1374
1375 case GIMPLE_OMP_SINGLE:
1376 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1377 copy = gimple_build_omp_single
1378 (s1, gimple_omp_single_clauses (stmt));
1379 break;
1380
1381 case GIMPLE_OMP_TARGET:
1382 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1383 copy = gimple_build_omp_target
1384 (s1, gimple_omp_target_kind (stmt),
1385 gimple_omp_target_clauses (stmt));
1386 break;
1387
1388 case GIMPLE_OMP_TEAMS:
1389 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1390 copy = gimple_build_omp_teams
1391 (s1, gimple_omp_teams_clauses (stmt));
1392 break;
1393
1394 case GIMPLE_OMP_CRITICAL:
1395 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1396 copy
1397 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1398 break;
1399
1400 case GIMPLE_TRANSACTION:
1401 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1402 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1403 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1404 break;
1405
1406 default:
1407 gcc_unreachable ();
1408 }
1409 }
1410 else
1411 {
1412 if (gimple_assign_copy_p (stmt)
1413 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1414 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1415 {
1416 /* Here we handle statements that are not completely rewritten.
1417 First we detect some inlining-induced bogosities for
1418 discarding. */
1419
1420 /* Some assignments VAR = VAR; don't generate any rtl code
1421 and thus don't count as variable modification. Avoid
1422 keeping bogosities like 0 = 0. */
1423 tree decl = gimple_assign_lhs (stmt), value;
1424 tree *n;
1425
1426 n = (tree *) pointer_map_contains (id->decl_map, decl);
1427 if (n)
1428 {
1429 value = *n;
1430 STRIP_TYPE_NOPS (value);
1431 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1432 return gimple_build_nop ();
1433 }
1434 }
1435
1436 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1437 in a block that we aren't copying during tree_function_versioning,
1438 just drop the clobber stmt. */
1439 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1440 {
1441 tree lhs = gimple_assign_lhs (stmt);
1442 if (TREE_CODE (lhs) == MEM_REF
1443 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1444 {
1445 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1446 if (gimple_bb (def_stmt)
1447 && !bitmap_bit_p (id->blocks_to_copy,
1448 gimple_bb (def_stmt)->index))
1449 return gimple_build_nop ();
1450 }
1451 }
1452
1453 if (gimple_debug_bind_p (stmt))
1454 {
1455 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1456 gimple_debug_bind_get_value (stmt),
1457 stmt);
1458 id->debug_stmts.safe_push (copy);
1459 return copy;
1460 }
1461 if (gimple_debug_source_bind_p (stmt))
1462 {
1463 copy = gimple_build_debug_source_bind
1464 (gimple_debug_source_bind_get_var (stmt),
1465 gimple_debug_source_bind_get_value (stmt), stmt);
1466 id->debug_stmts.safe_push (copy);
1467 return copy;
1468 }
1469
1470 /* Create a new deep copy of the statement. */
1471 copy = gimple_copy (stmt);
1472
1473 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1474 RESX and EH_DISPATCH. */
1475 if (id->eh_map)
1476 switch (gimple_code (copy))
1477 {
1478 case GIMPLE_CALL:
1479 {
1480 tree r, fndecl = gimple_call_fndecl (copy);
1481 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1482 switch (DECL_FUNCTION_CODE (fndecl))
1483 {
1484 case BUILT_IN_EH_COPY_VALUES:
1485 r = gimple_call_arg (copy, 1);
1486 r = remap_eh_region_tree_nr (r, id);
1487 gimple_call_set_arg (copy, 1, r);
1488 /* FALLTHRU */
1489
1490 case BUILT_IN_EH_POINTER:
1491 case BUILT_IN_EH_FILTER:
1492 r = gimple_call_arg (copy, 0);
1493 r = remap_eh_region_tree_nr (r, id);
1494 gimple_call_set_arg (copy, 0, r);
1495 break;
1496
1497 default:
1498 break;
1499 }
1500
1501 /* Reset alias info if we didn't apply measures to
1502 keep it valid over inlining by setting DECL_PT_UID. */
1503 if (!id->src_cfun->gimple_df
1504 || !id->src_cfun->gimple_df->ipa_pta)
1505 gimple_call_reset_alias_info (copy);
1506 }
1507 break;
1508
1509 case GIMPLE_RESX:
1510 {
1511 int r = gimple_resx_region (copy);
1512 r = remap_eh_region_nr (r, id);
1513 gimple_resx_set_region (copy, r);
1514 }
1515 break;
1516
1517 case GIMPLE_EH_DISPATCH:
1518 {
1519 int r = gimple_eh_dispatch_region (copy);
1520 r = remap_eh_region_nr (r, id);
1521 gimple_eh_dispatch_set_region (copy, r);
1522 }
1523 break;
1524
1525 default:
1526 break;
1527 }
1528 }
1529
1530 /* If STMT has a block defined, map it to the newly constructed
1531 block. */
1532 if (gimple_block (copy))
1533 {
1534 tree *n;
1535 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1536 gcc_assert (n);
1537 gimple_set_block (copy, *n);
1538 }
1539
1540 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1541 return copy;
1542
1543 /* Remap all the operands in COPY. */
1544 memset (&wi, 0, sizeof (wi));
1545 wi.info = id;
1546 if (skip_first)
1547 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1548 else
1549 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1550
1551 /* Clear the copied virtual operands. We are not remapping them here
1552 but are going to recreate them from scratch. */
1553 if (gimple_has_mem_ops (copy))
1554 {
1555 gimple_set_vdef (copy, NULL_TREE);
1556 gimple_set_vuse (copy, NULL_TREE);
1557 }
1558
1559 return copy;
1560 }
1561
1562
1563 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1564 later */
1565
1566 static basic_block
1567 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1568 gcov_type count_scale)
1569 {
1570 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1571 basic_block copy_basic_block;
1572 tree decl;
1573 gcov_type freq;
1574 basic_block prev;
1575
1576 /* Search for previous copied basic block. */
1577 prev = bb->prev_bb;
1578 while (!prev->aux)
1579 prev = prev->prev_bb;
1580
1581 /* create_basic_block() will append every new block to
1582 basic_block_info automatically. */
1583 copy_basic_block = create_basic_block (NULL, (void *) 0,
1584 (basic_block) prev->aux);
1585 copy_basic_block->count = apply_scale (bb->count, count_scale);
1586
1587 /* We are going to rebuild frequencies from scratch. These values
1588 have just small importance to drive canonicalize_loop_headers. */
1589 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1590
1591 /* We recompute frequencies after inlining, so this is quite safe. */
1592 if (freq > BB_FREQ_MAX)
1593 freq = BB_FREQ_MAX;
1594 copy_basic_block->frequency = freq;
1595
1596 copy_gsi = gsi_start_bb (copy_basic_block);
1597
1598 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1599 {
1600 gimple stmt = gsi_stmt (gsi);
1601 gimple orig_stmt = stmt;
1602
1603 id->regimplify = false;
1604 stmt = remap_gimple_stmt (stmt, id);
1605 if (gimple_nop_p (stmt))
1606 continue;
1607
1608 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1609 seq_gsi = copy_gsi;
1610
1611 /* With return slot optimization we can end up with
1612 non-gimple (foo *)&this->m, fix that here. */
1613 if (is_gimple_assign (stmt)
1614 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1615 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1616 {
1617 tree new_rhs;
1618 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1619 gimple_assign_rhs1 (stmt),
1620 true, NULL, false,
1621 GSI_CONTINUE_LINKING);
1622 gimple_assign_set_rhs1 (stmt, new_rhs);
1623 id->regimplify = false;
1624 }
1625
1626 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1627
1628 if (id->regimplify)
1629 gimple_regimplify_operands (stmt, &seq_gsi);
1630
1631 /* If copy_basic_block has been empty at the start of this iteration,
1632 call gsi_start_bb again to get at the newly added statements. */
1633 if (gsi_end_p (copy_gsi))
1634 copy_gsi = gsi_start_bb (copy_basic_block);
1635 else
1636 gsi_next (&copy_gsi);
1637
1638 /* Process the new statement. The call to gimple_regimplify_operands
1639 possibly turned the statement into multiple statements, we
1640 need to process all of them. */
1641 do
1642 {
1643 tree fn;
1644
1645 stmt = gsi_stmt (copy_gsi);
1646 if (is_gimple_call (stmt)
1647 && gimple_call_va_arg_pack_p (stmt)
1648 && id->gimple_call)
1649 {
1650 /* __builtin_va_arg_pack () should be replaced by
1651 all arguments corresponding to ... in the caller. */
1652 tree p;
1653 gimple new_call;
1654 vec<tree> argarray;
1655 size_t nargs = gimple_call_num_args (id->gimple_call);
1656 size_t n;
1657
1658 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1659 nargs--;
1660
1661 /* Create the new array of arguments. */
1662 n = nargs + gimple_call_num_args (stmt);
1663 argarray.create (n);
1664 argarray.safe_grow_cleared (n);
1665
1666 /* Copy all the arguments before '...' */
1667 memcpy (argarray.address (),
1668 gimple_call_arg_ptr (stmt, 0),
1669 gimple_call_num_args (stmt) * sizeof (tree));
1670
1671 /* Append the arguments passed in '...' */
1672 memcpy (argarray.address () + gimple_call_num_args (stmt),
1673 gimple_call_arg_ptr (id->gimple_call, 0)
1674 + (gimple_call_num_args (id->gimple_call) - nargs),
1675 nargs * sizeof (tree));
1676
1677 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1678 argarray);
1679
1680 argarray.release ();
1681
1682 /* Copy all GIMPLE_CALL flags, location and block, except
1683 GF_CALL_VA_ARG_PACK. */
1684 gimple_call_copy_flags (new_call, stmt);
1685 gimple_call_set_va_arg_pack (new_call, false);
1686 gimple_set_location (new_call, gimple_location (stmt));
1687 gimple_set_block (new_call, gimple_block (stmt));
1688 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1689
1690 gsi_replace (&copy_gsi, new_call, false);
1691 stmt = new_call;
1692 }
1693 else if (is_gimple_call (stmt)
1694 && id->gimple_call
1695 && (decl = gimple_call_fndecl (stmt))
1696 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1697 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1698 {
1699 /* __builtin_va_arg_pack_len () should be replaced by
1700 the number of anonymous arguments. */
1701 size_t nargs = gimple_call_num_args (id->gimple_call);
1702 tree count, p;
1703 gimple new_stmt;
1704
1705 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1706 nargs--;
1707
1708 count = build_int_cst (integer_type_node, nargs);
1709 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1710 gsi_replace (&copy_gsi, new_stmt, false);
1711 stmt = new_stmt;
1712 }
1713
1714 /* Statements produced by inlining can be unfolded, especially
1715 when we constant propagated some operands. We can't fold
1716 them right now for two reasons:
1717 1) folding require SSA_NAME_DEF_STMTs to be correct
1718 2) we can't change function calls to builtins.
1719 So we just mark statement for later folding. We mark
1720 all new statements, instead just statements that has changed
1721 by some nontrivial substitution so even statements made
1722 foldable indirectly are updated. If this turns out to be
1723 expensive, copy_body can be told to watch for nontrivial
1724 changes. */
1725 if (id->statements_to_fold)
1726 pointer_set_insert (id->statements_to_fold, stmt);
1727
1728 /* We're duplicating a CALL_EXPR. Find any corresponding
1729 callgraph edges and update or duplicate them. */
1730 if (is_gimple_call (stmt))
1731 {
1732 struct cgraph_edge *edge;
1733 int flags;
1734
1735 switch (id->transform_call_graph_edges)
1736 {
1737 case CB_CGE_DUPLICATE:
1738 edge = cgraph_edge (id->src_node, orig_stmt);
1739 if (edge)
1740 {
1741 int edge_freq = edge->frequency;
1742 int new_freq;
1743 struct cgraph_edge *old_edge = edge;
1744 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1745 gimple_uid (stmt),
1746 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1747 true);
1748 /* We could also just rescale the frequency, but
1749 doing so would introduce roundoff errors and make
1750 verifier unhappy. */
1751 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
1752 copy_basic_block);
1753
1754 /* Speculative calls consist of two edges - direct and indirect.
1755 Duplicate the whole thing and distribute frequencies accordingly. */
1756 if (edge->speculative)
1757 {
1758 struct cgraph_edge *direct, *indirect;
1759 struct ipa_ref *ref;
1760
1761 gcc_assert (!edge->indirect_unknown_callee);
1762 cgraph_speculative_call_info (old_edge, direct, indirect, ref);
1763 indirect = cgraph_clone_edge (indirect, id->dst_node, stmt,
1764 gimple_uid (stmt),
1765 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1766 true);
1767 if (old_edge->frequency + indirect->frequency)
1768 {
1769 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
1770 (old_edge->frequency + indirect->frequency)),
1771 CGRAPH_FREQ_MAX);
1772 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
1773 (old_edge->frequency + indirect->frequency)),
1774 CGRAPH_FREQ_MAX);
1775 }
1776 ipa_clone_ref (ref, id->dst_node, stmt);
1777 }
1778 else
1779 {
1780 edge->frequency = new_freq;
1781 if (dump_file
1782 && profile_status_for_function (cfun) != PROFILE_ABSENT
1783 && (edge_freq > edge->frequency + 10
1784 || edge_freq < edge->frequency - 10))
1785 {
1786 fprintf (dump_file, "Edge frequency estimated by "
1787 "cgraph %i diverge from inliner's estimate %i\n",
1788 edge_freq,
1789 edge->frequency);
1790 fprintf (dump_file,
1791 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1792 bb->index,
1793 bb->frequency,
1794 copy_basic_block->frequency);
1795 }
1796 }
1797 }
1798 break;
1799
1800 case CB_CGE_MOVE_CLONES:
1801 cgraph_set_call_stmt_including_clones (id->dst_node,
1802 orig_stmt, stmt);
1803 edge = cgraph_edge (id->dst_node, stmt);
1804 break;
1805
1806 case CB_CGE_MOVE:
1807 edge = cgraph_edge (id->dst_node, orig_stmt);
1808 if (edge)
1809 cgraph_set_call_stmt (edge, stmt);
1810 break;
1811
1812 default:
1813 gcc_unreachable ();
1814 }
1815
1816 /* Constant propagation on argument done during inlining
1817 may create new direct call. Produce an edge for it. */
1818 if ((!edge
1819 || (edge->indirect_inlining_edge
1820 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1821 && id->dst_node->definition
1822 && (fn = gimple_call_fndecl (stmt)) != NULL)
1823 {
1824 struct cgraph_node *dest = cgraph_get_node (fn);
1825
1826 /* We have missing edge in the callgraph. This can happen
1827 when previous inlining turned an indirect call into a
1828 direct call by constant propagating arguments or we are
1829 producing dead clone (for further cloning). In all
1830 other cases we hit a bug (incorrect node sharing is the
1831 most common reason for missing edges). */
1832 gcc_assert (!dest->definition
1833 || dest->address_taken
1834 || !id->src_node->definition
1835 || !id->dst_node->definition);
1836 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1837 cgraph_create_edge_including_clones
1838 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1839 compute_call_stmt_bb_frequency (id->dst_node->decl,
1840 copy_basic_block),
1841 CIF_ORIGINALLY_INDIRECT_CALL);
1842 else
1843 cgraph_create_edge (id->dst_node, dest, stmt,
1844 bb->count,
1845 compute_call_stmt_bb_frequency
1846 (id->dst_node->decl,
1847 copy_basic_block))->inline_failed
1848 = CIF_ORIGINALLY_INDIRECT_CALL;
1849 if (dump_file)
1850 {
1851 fprintf (dump_file, "Created new direct edge to %s\n",
1852 cgraph_node_name (dest));
1853 }
1854 }
1855
1856 flags = gimple_call_flags (stmt);
1857 if (flags & ECF_MAY_BE_ALLOCA)
1858 cfun->calls_alloca = true;
1859 if (flags & ECF_RETURNS_TWICE)
1860 cfun->calls_setjmp = true;
1861 }
1862
1863 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1864 id->eh_map, id->eh_lp_nr);
1865
1866 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1867 {
1868 ssa_op_iter i;
1869 tree def;
1870
1871 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1872 if (TREE_CODE (def) == SSA_NAME)
1873 SSA_NAME_DEF_STMT (def) = stmt;
1874 }
1875
1876 gsi_next (&copy_gsi);
1877 }
1878 while (!gsi_end_p (copy_gsi));
1879
1880 copy_gsi = gsi_last_bb (copy_basic_block);
1881 }
1882
1883 return copy_basic_block;
1884 }
1885
1886 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1887 form is quite easy, since dominator relationship for old basic blocks does
1888 not change.
1889
1890 There is however exception where inlining might change dominator relation
1891 across EH edges from basic block within inlined functions destinating
1892 to landing pads in function we inline into.
1893
1894 The function fills in PHI_RESULTs of such PHI nodes if they refer
1895 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1896 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1897 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1898 set, and this means that there will be no overlapping live ranges
1899 for the underlying symbol.
1900
1901 This might change in future if we allow redirecting of EH edges and
1902 we might want to change way build CFG pre-inlining to include
1903 all the possible edges then. */
1904 static void
1905 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1906 bool can_throw, bool nonlocal_goto)
1907 {
1908 edge e;
1909 edge_iterator ei;
1910
1911 FOR_EACH_EDGE (e, ei, bb->succs)
1912 if (!e->dest->aux
1913 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1914 {
1915 gimple phi;
1916 gimple_stmt_iterator si;
1917
1918 if (!nonlocal_goto)
1919 gcc_assert (e->flags & EDGE_EH);
1920
1921 if (!can_throw)
1922 gcc_assert (!(e->flags & EDGE_EH));
1923
1924 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1925 {
1926 edge re;
1927
1928 phi = gsi_stmt (si);
1929
1930 /* For abnormal goto/call edges the receiver can be the
1931 ENTRY_BLOCK. Do not assert this cannot happen. */
1932
1933 gcc_assert ((e->flags & EDGE_EH)
1934 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1935
1936 re = find_edge (ret_bb, e->dest);
1937 gcc_checking_assert (re);
1938 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1939 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1940
1941 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1942 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1943 }
1944 }
1945 }
1946
1947
1948 /* Copy edges from BB into its copy constructed earlier, scale profile
1949 accordingly. Edges will be taken care of later. Assume aux
1950 pointers to point to the copies of each BB. Return true if any
1951 debug stmts are left after a statement that must end the basic block. */
1952
1953 static bool
1954 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
1955 bool can_make_abnormal_goto)
1956 {
1957 basic_block new_bb = (basic_block) bb->aux;
1958 edge_iterator ei;
1959 edge old_edge;
1960 gimple_stmt_iterator si;
1961 int flags;
1962 bool need_debug_cleanup = false;
1963
1964 /* Use the indices from the original blocks to create edges for the
1965 new ones. */
1966 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1967 if (!(old_edge->flags & EDGE_EH))
1968 {
1969 edge new_edge;
1970
1971 flags = old_edge->flags;
1972
1973 /* Return edges do get a FALLTHRU flag when the get inlined. */
1974 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1975 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1976 flags |= EDGE_FALLTHRU;
1977 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1978 new_edge->count = apply_scale (old_edge->count, count_scale);
1979 new_edge->probability = old_edge->probability;
1980 }
1981
1982 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1983 return false;
1984
1985 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1986 {
1987 gimple copy_stmt;
1988 bool can_throw, nonlocal_goto;
1989
1990 copy_stmt = gsi_stmt (si);
1991 if (!is_gimple_debug (copy_stmt))
1992 update_stmt (copy_stmt);
1993
1994 /* Do this before the possible split_block. */
1995 gsi_next (&si);
1996
1997 /* If this tree could throw an exception, there are two
1998 cases where we need to add abnormal edge(s): the
1999 tree wasn't in a region and there is a "current
2000 region" in the caller; or the original tree had
2001 EH edges. In both cases split the block after the tree,
2002 and add abnormal edge(s) as needed; we need both
2003 those from the callee and the caller.
2004 We check whether the copy can throw, because the const
2005 propagation can change an INDIRECT_REF which throws
2006 into a COMPONENT_REF which doesn't. If the copy
2007 can throw, the original could also throw. */
2008 can_throw = stmt_can_throw_internal (copy_stmt);
2009 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
2010
2011 if (can_throw || nonlocal_goto)
2012 {
2013 if (!gsi_end_p (si))
2014 {
2015 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2016 gsi_next (&si);
2017 if (gsi_end_p (si))
2018 need_debug_cleanup = true;
2019 }
2020 if (!gsi_end_p (si))
2021 /* Note that bb's predecessor edges aren't necessarily
2022 right at this point; split_block doesn't care. */
2023 {
2024 edge e = split_block (new_bb, copy_stmt);
2025
2026 new_bb = e->dest;
2027 new_bb->aux = e->src->aux;
2028 si = gsi_start_bb (new_bb);
2029 }
2030 }
2031
2032 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2033 make_eh_dispatch_edges (copy_stmt);
2034 else if (can_throw)
2035 make_eh_edges (copy_stmt);
2036
2037 /* If the call we inline cannot make abnormal goto do not add
2038 additional abnormal edges but only retain those already present
2039 in the original function body. */
2040 nonlocal_goto &= can_make_abnormal_goto;
2041 if (nonlocal_goto)
2042 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
2043
2044 if ((can_throw || nonlocal_goto)
2045 && gimple_in_ssa_p (cfun))
2046 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2047 can_throw, nonlocal_goto);
2048 }
2049 return need_debug_cleanup;
2050 }
2051
2052 /* Copy the PHIs. All blocks and edges are copied, some blocks
2053 was possibly split and new outgoing EH edges inserted.
2054 BB points to the block of original function and AUX pointers links
2055 the original and newly copied blocks. */
2056
2057 static void
2058 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2059 {
2060 basic_block const new_bb = (basic_block) bb->aux;
2061 edge_iterator ei;
2062 gimple phi;
2063 gimple_stmt_iterator si;
2064 edge new_edge;
2065 bool inserted = false;
2066
2067 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2068 {
2069 tree res, new_res;
2070 gimple new_phi;
2071
2072 phi = gsi_stmt (si);
2073 res = PHI_RESULT (phi);
2074 new_res = res;
2075 if (!virtual_operand_p (res))
2076 {
2077 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2078 new_phi = create_phi_node (new_res, new_bb);
2079 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2080 {
2081 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2082 tree arg;
2083 tree new_arg;
2084 edge_iterator ei2;
2085 location_t locus;
2086
2087 /* When doing partial cloning, we allow PHIs on the entry block
2088 as long as all the arguments are the same. Find any input
2089 edge to see argument to copy. */
2090 if (!old_edge)
2091 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2092 if (!old_edge->src->aux)
2093 break;
2094
2095 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2096 new_arg = arg;
2097 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2098 gcc_assert (new_arg);
2099 /* With return slot optimization we can end up with
2100 non-gimple (foo *)&this->m, fix that here. */
2101 if (TREE_CODE (new_arg) != SSA_NAME
2102 && TREE_CODE (new_arg) != FUNCTION_DECL
2103 && !is_gimple_val (new_arg))
2104 {
2105 gimple_seq stmts = NULL;
2106 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2107 gsi_insert_seq_on_edge (new_edge, stmts);
2108 inserted = true;
2109 }
2110 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2111 if (LOCATION_BLOCK (locus))
2112 {
2113 tree *n;
2114 n = (tree *) pointer_map_contains (id->decl_map,
2115 LOCATION_BLOCK (locus));
2116 gcc_assert (n);
2117 if (*n)
2118 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2119 else
2120 locus = LOCATION_LOCUS (locus);
2121 }
2122 else
2123 locus = LOCATION_LOCUS (locus);
2124
2125 add_phi_arg (new_phi, new_arg, new_edge, locus);
2126 }
2127 }
2128 }
2129
2130 /* Commit the delayed edge insertions. */
2131 if (inserted)
2132 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2133 gsi_commit_one_edge_insert (new_edge, NULL);
2134 }
2135
2136
2137 /* Wrapper for remap_decl so it can be used as a callback. */
2138
2139 static tree
2140 remap_decl_1 (tree decl, void *data)
2141 {
2142 return remap_decl (decl, (copy_body_data *) data);
2143 }
2144
2145 /* Build struct function and associated datastructures for the new clone
2146 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2147 the cfun to the function of new_fndecl (and current_function_decl too). */
2148
2149 static void
2150 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2151 {
2152 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2153 gcov_type count_scale;
2154
2155 if (!DECL_ARGUMENTS (new_fndecl))
2156 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2157 if (!DECL_RESULT (new_fndecl))
2158 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2159
2160 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2161 count_scale
2162 = GCOV_COMPUTE_SCALE (count,
2163 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2164 else
2165 count_scale = REG_BR_PROB_BASE;
2166
2167 /* Register specific tree functions. */
2168 gimple_register_cfg_hooks ();
2169
2170 /* Get clean struct function. */
2171 push_struct_function (new_fndecl);
2172
2173 /* We will rebuild these, so just sanity check that they are empty. */
2174 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2175 gcc_assert (cfun->local_decls == NULL);
2176 gcc_assert (cfun->cfg == NULL);
2177 gcc_assert (cfun->decl == new_fndecl);
2178
2179 /* Copy items we preserve during cloning. */
2180 cfun->static_chain_decl = src_cfun->static_chain_decl;
2181 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2182 cfun->function_end_locus = src_cfun->function_end_locus;
2183 cfun->curr_properties = src_cfun->curr_properties;
2184 cfun->last_verified = src_cfun->last_verified;
2185 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2186 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2187 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2188 cfun->stdarg = src_cfun->stdarg;
2189 cfun->after_inlining = src_cfun->after_inlining;
2190 cfun->can_throw_non_call_exceptions
2191 = src_cfun->can_throw_non_call_exceptions;
2192 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2193 cfun->returns_struct = src_cfun->returns_struct;
2194 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2195
2196 init_empty_tree_cfg ();
2197
2198 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2199 ENTRY_BLOCK_PTR->count =
2200 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2201 REG_BR_PROB_BASE);
2202 ENTRY_BLOCK_PTR->frequency
2203 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2204 EXIT_BLOCK_PTR->count =
2205 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2206 REG_BR_PROB_BASE);
2207 EXIT_BLOCK_PTR->frequency =
2208 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2209 if (src_cfun->eh)
2210 init_eh_for_function ();
2211
2212 if (src_cfun->gimple_df)
2213 {
2214 init_tree_ssa (cfun);
2215 cfun->gimple_df->in_ssa_p = true;
2216 init_ssa_operands (cfun);
2217 }
2218 }
2219
2220 /* Helper function for copy_cfg_body. Move debug stmts from the end
2221 of NEW_BB to the beginning of successor basic blocks when needed. If the
2222 successor has multiple predecessors, reset them, otherwise keep
2223 their value. */
2224
2225 static void
2226 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2227 {
2228 edge e;
2229 edge_iterator ei;
2230 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2231
2232 if (gsi_end_p (si)
2233 || gsi_one_before_end_p (si)
2234 || !(stmt_can_throw_internal (gsi_stmt (si))
2235 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2236 return;
2237
2238 FOR_EACH_EDGE (e, ei, new_bb->succs)
2239 {
2240 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2241 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2242 while (is_gimple_debug (gsi_stmt (ssi)))
2243 {
2244 gimple stmt = gsi_stmt (ssi), new_stmt;
2245 tree var;
2246 tree value;
2247
2248 /* For the last edge move the debug stmts instead of copying
2249 them. */
2250 if (ei_one_before_end_p (ei))
2251 {
2252 si = ssi;
2253 gsi_prev (&ssi);
2254 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2255 gimple_debug_bind_reset_value (stmt);
2256 gsi_remove (&si, false);
2257 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2258 continue;
2259 }
2260
2261 if (gimple_debug_bind_p (stmt))
2262 {
2263 var = gimple_debug_bind_get_var (stmt);
2264 if (single_pred_p (e->dest))
2265 {
2266 value = gimple_debug_bind_get_value (stmt);
2267 value = unshare_expr (value);
2268 }
2269 else
2270 value = NULL_TREE;
2271 new_stmt = gimple_build_debug_bind (var, value, stmt);
2272 }
2273 else if (gimple_debug_source_bind_p (stmt))
2274 {
2275 var = gimple_debug_source_bind_get_var (stmt);
2276 value = gimple_debug_source_bind_get_value (stmt);
2277 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2278 }
2279 else
2280 gcc_unreachable ();
2281 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2282 id->debug_stmts.safe_push (new_stmt);
2283 gsi_prev (&ssi);
2284 }
2285 }
2286 }
2287
2288 /* Make a copy of the sub-loops of SRC_PARENT and place them
2289 as siblings of DEST_PARENT. */
2290
2291 static void
2292 copy_loops (copy_body_data *id,
2293 struct loop *dest_parent, struct loop *src_parent)
2294 {
2295 struct loop *src_loop = src_parent->inner;
2296 while (src_loop)
2297 {
2298 if (!id->blocks_to_copy
2299 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2300 {
2301 struct loop *dest_loop = alloc_loop ();
2302
2303 /* Assign the new loop its header and latch and associate
2304 those with the new loop. */
2305 if (src_loop->header != NULL)
2306 {
2307 dest_loop->header = (basic_block)src_loop->header->aux;
2308 dest_loop->header->loop_father = dest_loop;
2309 }
2310 if (src_loop->latch != NULL)
2311 {
2312 dest_loop->latch = (basic_block)src_loop->latch->aux;
2313 dest_loop->latch->loop_father = dest_loop;
2314 }
2315
2316 /* Copy loop meta-data. */
2317 copy_loop_info (src_loop, dest_loop);
2318
2319 /* Finally place it into the loop array and the loop tree. */
2320 place_new_loop (cfun, dest_loop);
2321 flow_loop_tree_node_add (dest_parent, dest_loop);
2322
2323 if (src_loop->simduid)
2324 {
2325 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2326 cfun->has_simduid_loops = true;
2327 }
2328 if (src_loop->force_vect)
2329 {
2330 dest_loop->force_vect = true;
2331 cfun->has_force_vect_loops = true;
2332 }
2333
2334 /* Recurse. */
2335 copy_loops (id, dest_loop, src_loop);
2336 }
2337 src_loop = src_loop->next;
2338 }
2339 }
2340
2341 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2342
2343 void
2344 redirect_all_calls (copy_body_data * id, basic_block bb)
2345 {
2346 gimple_stmt_iterator si;
2347 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2348 {
2349 if (is_gimple_call (gsi_stmt (si)))
2350 {
2351 struct cgraph_edge *edge = cgraph_edge (id->dst_node, gsi_stmt (si));
2352 if (edge)
2353 cgraph_redirect_edge_call_stmt_to_callee (edge);
2354 }
2355 }
2356 }
2357
2358 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2359 with each bb's frequency. Used when NODE has a 0-weight entry
2360 but we are about to inline it into a non-zero count call bb.
2361 See the comments for handle_missing_profiles() in predict.c for
2362 when this can happen for COMDATs. */
2363
2364 void
2365 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2366 {
2367 basic_block bb;
2368 edge_iterator ei;
2369 edge e;
2370 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2371
2372 FOR_ALL_BB_FN(bb, fn)
2373 {
2374 bb->count = apply_scale (count,
2375 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2376 FOR_EACH_EDGE (e, ei, bb->succs)
2377 e->count = apply_probability (e->src->count, e->probability);
2378 }
2379 }
2380
2381 /* Make a copy of the body of FN so that it can be inserted inline in
2382 another function. Walks FN via CFG, returns new fndecl. */
2383
2384 static tree
2385 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2386 basic_block entry_block_map, basic_block exit_block_map,
2387 basic_block new_entry)
2388 {
2389 tree callee_fndecl = id->src_fn;
2390 /* Original cfun for the callee, doesn't change. */
2391 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2392 struct function *cfun_to_copy;
2393 basic_block bb;
2394 tree new_fndecl = NULL;
2395 bool need_debug_cleanup = false;
2396 gcov_type count_scale;
2397 int last;
2398 int incoming_frequency = 0;
2399 gcov_type incoming_count = 0;
2400
2401 /* This can happen for COMDAT routines that end up with 0 counts
2402 despite being called (see the comments for handle_missing_profiles()
2403 in predict.c as to why). Apply counts to the blocks in the callee
2404 before inlining, using the guessed edge frequencies, so that we don't
2405 end up with a 0-count inline body which can confuse downstream
2406 optimizations such as function splitting. */
2407 if (!ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count && count)
2408 {
2409 /* Apply the larger of the call bb count and the total incoming
2410 call edge count to the callee. */
2411 gcov_type in_count = 0;
2412 struct cgraph_edge *in_edge;
2413 for (in_edge = id->src_node->callers; in_edge;
2414 in_edge = in_edge->next_caller)
2415 in_count += in_edge->count;
2416 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2417 }
2418
2419 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2420 count_scale
2421 = GCOV_COMPUTE_SCALE (count,
2422 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2423 else
2424 count_scale = REG_BR_PROB_BASE;
2425
2426 /* Register specific tree functions. */
2427 gimple_register_cfg_hooks ();
2428
2429 /* If we are inlining just region of the function, make sure to connect new entry
2430 to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
2431 frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
2432 probabilities of edges incoming from nonduplicated region. */
2433 if (new_entry)
2434 {
2435 edge e;
2436 edge_iterator ei;
2437
2438 FOR_EACH_EDGE (e, ei, new_entry->preds)
2439 if (!e->src->aux)
2440 {
2441 incoming_frequency += EDGE_FREQUENCY (e);
2442 incoming_count += e->count;
2443 }
2444 incoming_count = apply_scale (incoming_count, count_scale);
2445 incoming_frequency
2446 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2447 ENTRY_BLOCK_PTR->count = incoming_count;
2448 ENTRY_BLOCK_PTR->frequency = incoming_frequency;
2449 }
2450
2451 /* Must have a CFG here at this point. */
2452 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2453 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2454
2455 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2456
2457 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2458 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2459 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2460 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2461
2462 /* Duplicate any exception-handling regions. */
2463 if (cfun->eh)
2464 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2465 remap_decl_1, id);
2466
2467 /* Use aux pointers to map the original blocks to copy. */
2468 FOR_EACH_BB_FN (bb, cfun_to_copy)
2469 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2470 {
2471 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2472 bb->aux = new_bb;
2473 new_bb->aux = bb;
2474 new_bb->loop_father = entry_block_map->loop_father;
2475 }
2476
2477 last = last_basic_block;
2478
2479 /* Now that we've duplicated the blocks, duplicate their edges. */
2480 bool can_make_abormal_goto
2481 = id->gimple_call && stmt_can_make_abnormal_goto (id->gimple_call);
2482 FOR_ALL_BB_FN (bb, cfun_to_copy)
2483 if (!id->blocks_to_copy
2484 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2485 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2486 can_make_abormal_goto);
2487
2488 if (new_entry)
2489 {
2490 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2491 e->probability = REG_BR_PROB_BASE;
2492 e->count = incoming_count;
2493 }
2494
2495 /* Duplicate the loop tree, if available and wanted. */
2496 if (loops_for_fn (src_cfun) != NULL
2497 && current_loops != NULL)
2498 {
2499 copy_loops (id, entry_block_map->loop_father,
2500 get_loop (src_cfun, 0));
2501 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2502 loops_state_set (LOOPS_NEED_FIXUP);
2503 }
2504
2505 /* If the loop tree in the source function needed fixup, mark the
2506 destination loop tree for fixup, too. */
2507 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2508 loops_state_set (LOOPS_NEED_FIXUP);
2509
2510 if (gimple_in_ssa_p (cfun))
2511 FOR_ALL_BB_FN (bb, cfun_to_copy)
2512 if (!id->blocks_to_copy
2513 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2514 copy_phis_for_bb (bb, id);
2515
2516 FOR_ALL_BB_FN (bb, cfun_to_copy)
2517 if (bb->aux)
2518 {
2519 if (need_debug_cleanup
2520 && bb->index != ENTRY_BLOCK
2521 && bb->index != EXIT_BLOCK)
2522 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2523 /* Update call edge destinations. This can not be done before loop
2524 info is updated, because we may split basic blocks. */
2525 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2526 redirect_all_calls (id, (basic_block)bb->aux);
2527 ((basic_block)bb->aux)->aux = NULL;
2528 bb->aux = NULL;
2529 }
2530
2531 /* Zero out AUX fields of newly created block during EH edge
2532 insertion. */
2533 for (; last < last_basic_block; last++)
2534 {
2535 if (need_debug_cleanup)
2536 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2537 BASIC_BLOCK (last)->aux = NULL;
2538 /* Update call edge destinations. This can not be done before loop
2539 info is updated, because we may split basic blocks. */
2540 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2541 redirect_all_calls (id, BASIC_BLOCK (last));
2542 }
2543 entry_block_map->aux = NULL;
2544 exit_block_map->aux = NULL;
2545
2546 if (id->eh_map)
2547 {
2548 pointer_map_destroy (id->eh_map);
2549 id->eh_map = NULL;
2550 }
2551
2552 return new_fndecl;
2553 }
2554
2555 /* Copy the debug STMT using ID. We deal with these statements in a
2556 special way: if any variable in their VALUE expression wasn't
2557 remapped yet, we won't remap it, because that would get decl uids
2558 out of sync, causing codegen differences between -g and -g0. If
2559 this arises, we drop the VALUE expression altogether. */
2560
2561 static void
2562 copy_debug_stmt (gimple stmt, copy_body_data *id)
2563 {
2564 tree t, *n;
2565 struct walk_stmt_info wi;
2566
2567 if (gimple_block (stmt))
2568 {
2569 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2570 gimple_set_block (stmt, n ? *n : id->block);
2571 }
2572
2573 /* Remap all the operands in COPY. */
2574 memset (&wi, 0, sizeof (wi));
2575 wi.info = id;
2576
2577 processing_debug_stmt = 1;
2578
2579 if (gimple_debug_source_bind_p (stmt))
2580 t = gimple_debug_source_bind_get_var (stmt);
2581 else
2582 t = gimple_debug_bind_get_var (stmt);
2583
2584 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2585 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2586 {
2587 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2588 t = *n;
2589 }
2590 else if (TREE_CODE (t) == VAR_DECL
2591 && !is_global_var (t)
2592 && !pointer_map_contains (id->decl_map, t))
2593 /* T is a non-localized variable. */;
2594 else
2595 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2596
2597 if (gimple_debug_bind_p (stmt))
2598 {
2599 gimple_debug_bind_set_var (stmt, t);
2600
2601 if (gimple_debug_bind_has_value_p (stmt))
2602 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2603 remap_gimple_op_r, &wi, NULL);
2604
2605 /* Punt if any decl couldn't be remapped. */
2606 if (processing_debug_stmt < 0)
2607 gimple_debug_bind_reset_value (stmt);
2608 }
2609 else if (gimple_debug_source_bind_p (stmt))
2610 {
2611 gimple_debug_source_bind_set_var (stmt, t);
2612 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2613 remap_gimple_op_r, &wi, NULL);
2614 /* When inlining and source bind refers to one of the optimized
2615 away parameters, change the source bind into normal debug bind
2616 referring to the corresponding DEBUG_EXPR_DECL that should have
2617 been bound before the call stmt. */
2618 t = gimple_debug_source_bind_get_value (stmt);
2619 if (t != NULL_TREE
2620 && TREE_CODE (t) == PARM_DECL
2621 && id->gimple_call)
2622 {
2623 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2624 unsigned int i;
2625 if (debug_args != NULL)
2626 {
2627 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2628 if ((**debug_args)[i] == DECL_ORIGIN (t)
2629 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2630 {
2631 t = (**debug_args)[i + 1];
2632 stmt->gsbase.subcode = GIMPLE_DEBUG_BIND;
2633 gimple_debug_bind_set_value (stmt, t);
2634 break;
2635 }
2636 }
2637 }
2638 }
2639
2640 processing_debug_stmt = 0;
2641
2642 update_stmt (stmt);
2643 }
2644
2645 /* Process deferred debug stmts. In order to give values better odds
2646 of being successfully remapped, we delay the processing of debug
2647 stmts until all other stmts that might require remapping are
2648 processed. */
2649
2650 static void
2651 copy_debug_stmts (copy_body_data *id)
2652 {
2653 size_t i;
2654 gimple stmt;
2655
2656 if (!id->debug_stmts.exists ())
2657 return;
2658
2659 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2660 copy_debug_stmt (stmt, id);
2661
2662 id->debug_stmts.release ();
2663 }
2664
2665 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2666 another function. */
2667
2668 static tree
2669 copy_tree_body (copy_body_data *id)
2670 {
2671 tree fndecl = id->src_fn;
2672 tree body = DECL_SAVED_TREE (fndecl);
2673
2674 walk_tree (&body, copy_tree_body_r, id, NULL);
2675
2676 return body;
2677 }
2678
2679 /* Make a copy of the body of FN so that it can be inserted inline in
2680 another function. */
2681
2682 static tree
2683 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2684 basic_block entry_block_map, basic_block exit_block_map,
2685 basic_block new_entry)
2686 {
2687 tree fndecl = id->src_fn;
2688 tree body;
2689
2690 /* If this body has a CFG, walk CFG and copy. */
2691 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2692 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2693 new_entry);
2694 copy_debug_stmts (id);
2695
2696 return body;
2697 }
2698
2699 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2700 defined in function FN, or of a data member thereof. */
2701
2702 static bool
2703 self_inlining_addr_expr (tree value, tree fn)
2704 {
2705 tree var;
2706
2707 if (TREE_CODE (value) != ADDR_EXPR)
2708 return false;
2709
2710 var = get_base_address (TREE_OPERAND (value, 0));
2711
2712 return var && auto_var_in_fn_p (var, fn);
2713 }
2714
2715 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2716 lexical block and line number information from base_stmt, if given,
2717 or from the last stmt of the block otherwise. */
2718
2719 static gimple
2720 insert_init_debug_bind (copy_body_data *id,
2721 basic_block bb, tree var, tree value,
2722 gimple base_stmt)
2723 {
2724 gimple note;
2725 gimple_stmt_iterator gsi;
2726 tree tracked_var;
2727
2728 if (!gimple_in_ssa_p (id->src_cfun))
2729 return NULL;
2730
2731 if (!MAY_HAVE_DEBUG_STMTS)
2732 return NULL;
2733
2734 tracked_var = target_for_debug_bind (var);
2735 if (!tracked_var)
2736 return NULL;
2737
2738 if (bb)
2739 {
2740 gsi = gsi_last_bb (bb);
2741 if (!base_stmt && !gsi_end_p (gsi))
2742 base_stmt = gsi_stmt (gsi);
2743 }
2744
2745 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2746
2747 if (bb)
2748 {
2749 if (!gsi_end_p (gsi))
2750 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2751 else
2752 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2753 }
2754
2755 return note;
2756 }
2757
2758 static void
2759 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2760 {
2761 /* If VAR represents a zero-sized variable, it's possible that the
2762 assignment statement may result in no gimple statements. */
2763 if (init_stmt)
2764 {
2765 gimple_stmt_iterator si = gsi_last_bb (bb);
2766
2767 /* We can end up with init statements that store to a non-register
2768 from a rhs with a conversion. Handle that here by forcing the
2769 rhs into a temporary. gimple_regimplify_operands is not
2770 prepared to do this for us. */
2771 if (!is_gimple_debug (init_stmt)
2772 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2773 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2774 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2775 {
2776 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2777 gimple_expr_type (init_stmt),
2778 gimple_assign_rhs1 (init_stmt));
2779 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2780 GSI_NEW_STMT);
2781 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2782 gimple_assign_set_rhs1 (init_stmt, rhs);
2783 }
2784 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2785 gimple_regimplify_operands (init_stmt, &si);
2786
2787 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2788 {
2789 tree def = gimple_assign_lhs (init_stmt);
2790 insert_init_debug_bind (id, bb, def, def, init_stmt);
2791 }
2792 }
2793 }
2794
2795 /* Initialize parameter P with VALUE. If needed, produce init statement
2796 at the end of BB. When BB is NULL, we return init statement to be
2797 output later. */
2798 static gimple
2799 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2800 basic_block bb, tree *vars)
2801 {
2802 gimple init_stmt = NULL;
2803 tree var;
2804 tree rhs = value;
2805 tree def = (gimple_in_ssa_p (cfun)
2806 ? ssa_default_def (id->src_cfun, p) : NULL);
2807
2808 if (value
2809 && value != error_mark_node
2810 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2811 {
2812 /* If we can match up types by promotion/demotion do so. */
2813 if (fold_convertible_p (TREE_TYPE (p), value))
2814 rhs = fold_convert (TREE_TYPE (p), value);
2815 else
2816 {
2817 /* ??? For valid programs we should not end up here.
2818 Still if we end up with truly mismatched types here, fall back
2819 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2820 GIMPLE to the following passes. */
2821 if (!is_gimple_reg_type (TREE_TYPE (value))
2822 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2823 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2824 else
2825 rhs = build_zero_cst (TREE_TYPE (p));
2826 }
2827 }
2828
2829 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2830 here since the type of this decl must be visible to the calling
2831 function. */
2832 var = copy_decl_to_var (p, id);
2833
2834 /* Declare this new variable. */
2835 DECL_CHAIN (var) = *vars;
2836 *vars = var;
2837
2838 /* Make gimplifier happy about this variable. */
2839 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2840
2841 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2842 we would not need to create a new variable here at all, if it
2843 weren't for debug info. Still, we can just use the argument
2844 value. */
2845 if (TREE_READONLY (p)
2846 && !TREE_ADDRESSABLE (p)
2847 && value && !TREE_SIDE_EFFECTS (value)
2848 && !def)
2849 {
2850 /* We may produce non-gimple trees by adding NOPs or introduce
2851 invalid sharing when operand is not really constant.
2852 It is not big deal to prohibit constant propagation here as
2853 we will constant propagate in DOM1 pass anyway. */
2854 if (is_gimple_min_invariant (value)
2855 && useless_type_conversion_p (TREE_TYPE (p),
2856 TREE_TYPE (value))
2857 /* We have to be very careful about ADDR_EXPR. Make sure
2858 the base variable isn't a local variable of the inlined
2859 function, e.g., when doing recursive inlining, direct or
2860 mutually-recursive or whatever, which is why we don't
2861 just test whether fn == current_function_decl. */
2862 && ! self_inlining_addr_expr (value, fn))
2863 {
2864 insert_decl_map (id, p, value);
2865 insert_debug_decl_map (id, p, var);
2866 return insert_init_debug_bind (id, bb, var, value, NULL);
2867 }
2868 }
2869
2870 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2871 that way, when the PARM_DECL is encountered, it will be
2872 automatically replaced by the VAR_DECL. */
2873 insert_decl_map (id, p, var);
2874
2875 /* Even if P was TREE_READONLY, the new VAR should not be.
2876 In the original code, we would have constructed a
2877 temporary, and then the function body would have never
2878 changed the value of P. However, now, we will be
2879 constructing VAR directly. The constructor body may
2880 change its value multiple times as it is being
2881 constructed. Therefore, it must not be TREE_READONLY;
2882 the back-end assumes that TREE_READONLY variable is
2883 assigned to only once. */
2884 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2885 TREE_READONLY (var) = 0;
2886
2887 /* If there is no setup required and we are in SSA, take the easy route
2888 replacing all SSA names representing the function parameter by the
2889 SSA name passed to function.
2890
2891 We need to construct map for the variable anyway as it might be used
2892 in different SSA names when parameter is set in function.
2893
2894 Do replacement at -O0 for const arguments replaced by constant.
2895 This is important for builtin_constant_p and other construct requiring
2896 constant argument to be visible in inlined function body. */
2897 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2898 && (optimize
2899 || (TREE_READONLY (p)
2900 && is_gimple_min_invariant (rhs)))
2901 && (TREE_CODE (rhs) == SSA_NAME
2902 || is_gimple_min_invariant (rhs))
2903 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2904 {
2905 insert_decl_map (id, def, rhs);
2906 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2907 }
2908
2909 /* If the value of argument is never used, don't care about initializing
2910 it. */
2911 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2912 {
2913 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2914 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2915 }
2916
2917 /* Initialize this VAR_DECL from the equivalent argument. Convert
2918 the argument to the proper type in case it was promoted. */
2919 if (value)
2920 {
2921 if (rhs == error_mark_node)
2922 {
2923 insert_decl_map (id, p, var);
2924 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2925 }
2926
2927 STRIP_USELESS_TYPE_CONVERSION (rhs);
2928
2929 /* If we are in SSA form properly remap the default definition
2930 or assign to a dummy SSA name if the parameter is unused and
2931 we are not optimizing. */
2932 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2933 {
2934 if (def)
2935 {
2936 def = remap_ssa_name (def, id);
2937 init_stmt = gimple_build_assign (def, rhs);
2938 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2939 set_ssa_default_def (cfun, var, NULL);
2940 }
2941 else if (!optimize)
2942 {
2943 def = make_ssa_name (var, NULL);
2944 init_stmt = gimple_build_assign (def, rhs);
2945 }
2946 }
2947 else
2948 init_stmt = gimple_build_assign (var, rhs);
2949
2950 if (bb && init_stmt)
2951 insert_init_stmt (id, bb, init_stmt);
2952 }
2953 return init_stmt;
2954 }
2955
2956 /* Generate code to initialize the parameters of the function at the
2957 top of the stack in ID from the GIMPLE_CALL STMT. */
2958
2959 static void
2960 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2961 tree fn, basic_block bb)
2962 {
2963 tree parms;
2964 size_t i;
2965 tree p;
2966 tree vars = NULL_TREE;
2967 tree static_chain = gimple_call_chain (stmt);
2968
2969 /* Figure out what the parameters are. */
2970 parms = DECL_ARGUMENTS (fn);
2971
2972 /* Loop through the parameter declarations, replacing each with an
2973 equivalent VAR_DECL, appropriately initialized. */
2974 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2975 {
2976 tree val;
2977 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2978 setup_one_parameter (id, p, val, fn, bb, &vars);
2979 }
2980 /* After remapping parameters remap their types. This has to be done
2981 in a second loop over all parameters to appropriately remap
2982 variable sized arrays when the size is specified in a
2983 parameter following the array. */
2984 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2985 {
2986 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2987 if (varp
2988 && TREE_CODE (*varp) == VAR_DECL)
2989 {
2990 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
2991 ? ssa_default_def (id->src_cfun, p) : NULL);
2992 tree var = *varp;
2993 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
2994 /* Also remap the default definition if it was remapped
2995 to the default definition of the parameter replacement
2996 by the parameter setup. */
2997 if (def)
2998 {
2999 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
3000 if (defp
3001 && TREE_CODE (*defp) == SSA_NAME
3002 && SSA_NAME_VAR (*defp) == var)
3003 TREE_TYPE (*defp) = TREE_TYPE (var);
3004 }
3005 }
3006 }
3007
3008 /* Initialize the static chain. */
3009 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3010 gcc_assert (fn != current_function_decl);
3011 if (p)
3012 {
3013 /* No static chain? Seems like a bug in tree-nested.c. */
3014 gcc_assert (static_chain);
3015
3016 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3017 }
3018
3019 declare_inline_vars (id->block, vars);
3020 }
3021
3022
3023 /* Declare a return variable to replace the RESULT_DECL for the
3024 function we are calling. An appropriate DECL_STMT is returned.
3025 The USE_STMT is filled to contain a use of the declaration to
3026 indicate the return value of the function.
3027
3028 RETURN_SLOT, if non-null is place where to store the result. It
3029 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3030 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3031
3032 The return value is a (possibly null) value that holds the result
3033 as seen by the caller. */
3034
3035 static tree
3036 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3037 basic_block entry_bb)
3038 {
3039 tree callee = id->src_fn;
3040 tree result = DECL_RESULT (callee);
3041 tree callee_type = TREE_TYPE (result);
3042 tree caller_type;
3043 tree var, use;
3044
3045 /* Handle type-mismatches in the function declaration return type
3046 vs. the call expression. */
3047 if (modify_dest)
3048 caller_type = TREE_TYPE (modify_dest);
3049 else
3050 caller_type = TREE_TYPE (TREE_TYPE (callee));
3051
3052 /* We don't need to do anything for functions that don't return anything. */
3053 if (VOID_TYPE_P (callee_type))
3054 return NULL_TREE;
3055
3056 /* If there was a return slot, then the return value is the
3057 dereferenced address of that object. */
3058 if (return_slot)
3059 {
3060 /* The front end shouldn't have used both return_slot and
3061 a modify expression. */
3062 gcc_assert (!modify_dest);
3063 if (DECL_BY_REFERENCE (result))
3064 {
3065 tree return_slot_addr = build_fold_addr_expr (return_slot);
3066 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3067
3068 /* We are going to construct *&return_slot and we can't do that
3069 for variables believed to be not addressable.
3070
3071 FIXME: This check possibly can match, because values returned
3072 via return slot optimization are not believed to have address
3073 taken by alias analysis. */
3074 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3075 var = return_slot_addr;
3076 }
3077 else
3078 {
3079 var = return_slot;
3080 gcc_assert (TREE_CODE (var) != SSA_NAME);
3081 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
3082 }
3083 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3084 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3085 && !DECL_GIMPLE_REG_P (result)
3086 && DECL_P (var))
3087 DECL_GIMPLE_REG_P (var) = 0;
3088 use = NULL;
3089 goto done;
3090 }
3091
3092 /* All types requiring non-trivial constructors should have been handled. */
3093 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3094
3095 /* Attempt to avoid creating a new temporary variable. */
3096 if (modify_dest
3097 && TREE_CODE (modify_dest) != SSA_NAME)
3098 {
3099 bool use_it = false;
3100
3101 /* We can't use MODIFY_DEST if there's type promotion involved. */
3102 if (!useless_type_conversion_p (callee_type, caller_type))
3103 use_it = false;
3104
3105 /* ??? If we're assigning to a variable sized type, then we must
3106 reuse the destination variable, because we've no good way to
3107 create variable sized temporaries at this point. */
3108 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3109 use_it = true;
3110
3111 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3112 reuse it as the result of the call directly. Don't do this if
3113 it would promote MODIFY_DEST to addressable. */
3114 else if (TREE_ADDRESSABLE (result))
3115 use_it = false;
3116 else
3117 {
3118 tree base_m = get_base_address (modify_dest);
3119
3120 /* If the base isn't a decl, then it's a pointer, and we don't
3121 know where that's going to go. */
3122 if (!DECL_P (base_m))
3123 use_it = false;
3124 else if (is_global_var (base_m))
3125 use_it = false;
3126 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3127 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3128 && !DECL_GIMPLE_REG_P (result)
3129 && DECL_GIMPLE_REG_P (base_m))
3130 use_it = false;
3131 else if (!TREE_ADDRESSABLE (base_m))
3132 use_it = true;
3133 }
3134
3135 if (use_it)
3136 {
3137 var = modify_dest;
3138 use = NULL;
3139 goto done;
3140 }
3141 }
3142
3143 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3144
3145 var = copy_result_decl_to_var (result, id);
3146 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3147
3148 /* Do not have the rest of GCC warn about this variable as it should
3149 not be visible to the user. */
3150 TREE_NO_WARNING (var) = 1;
3151
3152 declare_inline_vars (id->block, var);
3153
3154 /* Build the use expr. If the return type of the function was
3155 promoted, convert it back to the expected type. */
3156 use = var;
3157 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3158 {
3159 /* If we can match up types by promotion/demotion do so. */
3160 if (fold_convertible_p (caller_type, var))
3161 use = fold_convert (caller_type, var);
3162 else
3163 {
3164 /* ??? For valid programs we should not end up here.
3165 Still if we end up with truly mismatched types here, fall back
3166 to using a MEM_REF to not leak invalid GIMPLE to the following
3167 passes. */
3168 /* Prevent var from being written into SSA form. */
3169 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3170 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3171 DECL_GIMPLE_REG_P (var) = false;
3172 else if (is_gimple_reg_type (TREE_TYPE (var)))
3173 TREE_ADDRESSABLE (var) = true;
3174 use = fold_build2 (MEM_REF, caller_type,
3175 build_fold_addr_expr (var),
3176 build_int_cst (ptr_type_node, 0));
3177 }
3178 }
3179
3180 STRIP_USELESS_TYPE_CONVERSION (use);
3181
3182 if (DECL_BY_REFERENCE (result))
3183 {
3184 TREE_ADDRESSABLE (var) = 1;
3185 var = build_fold_addr_expr (var);
3186 }
3187
3188 done:
3189 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3190 way, when the RESULT_DECL is encountered, it will be
3191 automatically replaced by the VAR_DECL.
3192
3193 When returning by reference, ensure that RESULT_DECL remaps to
3194 gimple_val. */
3195 if (DECL_BY_REFERENCE (result)
3196 && !is_gimple_val (var))
3197 {
3198 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3199 insert_decl_map (id, result, temp);
3200 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3201 it's default_def SSA_NAME. */
3202 if (gimple_in_ssa_p (id->src_cfun)
3203 && is_gimple_reg (result))
3204 {
3205 temp = make_ssa_name (temp, NULL);
3206 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3207 }
3208 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3209 }
3210 else
3211 insert_decl_map (id, result, var);
3212
3213 /* Remember this so we can ignore it in remap_decls. */
3214 id->retvar = var;
3215
3216 return use;
3217 }
3218
3219 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3220 to a local label. */
3221
3222 static tree
3223 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3224 {
3225 tree node = *nodep;
3226 tree fn = (tree) fnp;
3227
3228 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3229 return node;
3230
3231 if (TYPE_P (node))
3232 *walk_subtrees = 0;
3233
3234 return NULL_TREE;
3235 }
3236
3237 /* Determine if the function can be copied. If so return NULL. If
3238 not return a string describng the reason for failure. */
3239
3240 static const char *
3241 copy_forbidden (struct function *fun, tree fndecl)
3242 {
3243 const char *reason = fun->cannot_be_copied_reason;
3244 tree decl;
3245 unsigned ix;
3246
3247 /* Only examine the function once. */
3248 if (fun->cannot_be_copied_set)
3249 return reason;
3250
3251 /* We cannot copy a function that receives a non-local goto
3252 because we cannot remap the destination label used in the
3253 function that is performing the non-local goto. */
3254 /* ??? Actually, this should be possible, if we work at it.
3255 No doubt there's just a handful of places that simply
3256 assume it doesn't happen and don't substitute properly. */
3257 if (fun->has_nonlocal_label)
3258 {
3259 reason = G_("function %q+F can never be copied "
3260 "because it receives a non-local goto");
3261 goto fail;
3262 }
3263
3264 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3265 if (TREE_CODE (decl) == VAR_DECL
3266 && TREE_STATIC (decl)
3267 && !DECL_EXTERNAL (decl)
3268 && DECL_INITIAL (decl)
3269 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3270 has_label_address_in_static_1,
3271 fndecl))
3272 {
3273 reason = G_("function %q+F can never be copied because it saves "
3274 "address of local label in a static variable");
3275 goto fail;
3276 }
3277
3278 fail:
3279 fun->cannot_be_copied_reason = reason;
3280 fun->cannot_be_copied_set = true;
3281 return reason;
3282 }
3283
3284
3285 static const char *inline_forbidden_reason;
3286
3287 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3288 iff a function can not be inlined. Also sets the reason why. */
3289
3290 static tree
3291 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3292 struct walk_stmt_info *wip)
3293 {
3294 tree fn = (tree) wip->info;
3295 tree t;
3296 gimple stmt = gsi_stmt (*gsi);
3297
3298 switch (gimple_code (stmt))
3299 {
3300 case GIMPLE_CALL:
3301 /* Refuse to inline alloca call unless user explicitly forced so as
3302 this may change program's memory overhead drastically when the
3303 function using alloca is called in loop. In GCC present in
3304 SPEC2000 inlining into schedule_block cause it to require 2GB of
3305 RAM instead of 256MB. Don't do so for alloca calls emitted for
3306 VLA objects as those can't cause unbounded growth (they're always
3307 wrapped inside stack_save/stack_restore regions. */
3308 if (gimple_alloca_call_p (stmt)
3309 && !gimple_call_alloca_for_var_p (stmt)
3310 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3311 {
3312 inline_forbidden_reason
3313 = G_("function %q+F can never be inlined because it uses "
3314 "alloca (override using the always_inline attribute)");
3315 *handled_ops_p = true;
3316 return fn;
3317 }
3318
3319 t = gimple_call_fndecl (stmt);
3320 if (t == NULL_TREE)
3321 break;
3322
3323 /* We cannot inline functions that call setjmp. */
3324 if (setjmp_call_p (t))
3325 {
3326 inline_forbidden_reason
3327 = G_("function %q+F can never be inlined because it uses setjmp");
3328 *handled_ops_p = true;
3329 return t;
3330 }
3331
3332 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3333 switch (DECL_FUNCTION_CODE (t))
3334 {
3335 /* We cannot inline functions that take a variable number of
3336 arguments. */
3337 case BUILT_IN_VA_START:
3338 case BUILT_IN_NEXT_ARG:
3339 case BUILT_IN_VA_END:
3340 inline_forbidden_reason
3341 = G_("function %q+F can never be inlined because it "
3342 "uses variable argument lists");
3343 *handled_ops_p = true;
3344 return t;
3345
3346 case BUILT_IN_LONGJMP:
3347 /* We can't inline functions that call __builtin_longjmp at
3348 all. The non-local goto machinery really requires the
3349 destination be in a different function. If we allow the
3350 function calling __builtin_longjmp to be inlined into the
3351 function calling __builtin_setjmp, Things will Go Awry. */
3352 inline_forbidden_reason
3353 = G_("function %q+F can never be inlined because "
3354 "it uses setjmp-longjmp exception handling");
3355 *handled_ops_p = true;
3356 return t;
3357
3358 case BUILT_IN_NONLOCAL_GOTO:
3359 /* Similarly. */
3360 inline_forbidden_reason
3361 = G_("function %q+F can never be inlined because "
3362 "it uses non-local goto");
3363 *handled_ops_p = true;
3364 return t;
3365
3366 case BUILT_IN_RETURN:
3367 case BUILT_IN_APPLY_ARGS:
3368 /* If a __builtin_apply_args caller would be inlined,
3369 it would be saving arguments of the function it has
3370 been inlined into. Similarly __builtin_return would
3371 return from the function the inline has been inlined into. */
3372 inline_forbidden_reason
3373 = G_("function %q+F can never be inlined because "
3374 "it uses __builtin_return or __builtin_apply_args");
3375 *handled_ops_p = true;
3376 return t;
3377
3378 default:
3379 break;
3380 }
3381 break;
3382
3383 case GIMPLE_GOTO:
3384 t = gimple_goto_dest (stmt);
3385
3386 /* We will not inline a function which uses computed goto. The
3387 addresses of its local labels, which may be tucked into
3388 global storage, are of course not constant across
3389 instantiations, which causes unexpected behavior. */
3390 if (TREE_CODE (t) != LABEL_DECL)
3391 {
3392 inline_forbidden_reason
3393 = G_("function %q+F can never be inlined "
3394 "because it contains a computed goto");
3395 *handled_ops_p = true;
3396 return t;
3397 }
3398 break;
3399
3400 default:
3401 break;
3402 }
3403
3404 *handled_ops_p = false;
3405 return NULL_TREE;
3406 }
3407
3408 /* Return true if FNDECL is a function that cannot be inlined into
3409 another one. */
3410
3411 static bool
3412 inline_forbidden_p (tree fndecl)
3413 {
3414 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3415 struct walk_stmt_info wi;
3416 struct pointer_set_t *visited_nodes;
3417 basic_block bb;
3418 bool forbidden_p = false;
3419
3420 /* First check for shared reasons not to copy the code. */
3421 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3422 if (inline_forbidden_reason != NULL)
3423 return true;
3424
3425 /* Next, walk the statements of the function looking for
3426 constraucts we can't handle, or are non-optimal for inlining. */
3427 visited_nodes = pointer_set_create ();
3428 memset (&wi, 0, sizeof (wi));
3429 wi.info = (void *) fndecl;
3430 wi.pset = visited_nodes;
3431
3432 FOR_EACH_BB_FN (bb, fun)
3433 {
3434 gimple ret;
3435 gimple_seq seq = bb_seq (bb);
3436 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3437 forbidden_p = (ret != NULL);
3438 if (forbidden_p)
3439 break;
3440 }
3441
3442 pointer_set_destroy (visited_nodes);
3443 return forbidden_p;
3444 }
3445 \f
3446 /* Return false if the function FNDECL cannot be inlined on account of its
3447 attributes, true otherwise. */
3448 static bool
3449 function_attribute_inlinable_p (const_tree fndecl)
3450 {
3451 if (targetm.attribute_table)
3452 {
3453 const_tree a;
3454
3455 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3456 {
3457 const_tree name = TREE_PURPOSE (a);
3458 int i;
3459
3460 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3461 if (is_attribute_p (targetm.attribute_table[i].name, name))
3462 return targetm.function_attribute_inlinable_p (fndecl);
3463 }
3464 }
3465
3466 return true;
3467 }
3468
3469 /* Returns nonzero if FN is a function that does not have any
3470 fundamental inline blocking properties. */
3471
3472 bool
3473 tree_inlinable_function_p (tree fn)
3474 {
3475 bool inlinable = true;
3476 bool do_warning;
3477 tree always_inline;
3478
3479 /* If we've already decided this function shouldn't be inlined,
3480 there's no need to check again. */
3481 if (DECL_UNINLINABLE (fn))
3482 return false;
3483
3484 /* We only warn for functions declared `inline' by the user. */
3485 do_warning = (warn_inline
3486 && DECL_DECLARED_INLINE_P (fn)
3487 && !DECL_NO_INLINE_WARNING_P (fn)
3488 && !DECL_IN_SYSTEM_HEADER (fn));
3489
3490 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3491
3492 if (flag_no_inline
3493 && always_inline == NULL)
3494 {
3495 if (do_warning)
3496 warning (OPT_Winline, "function %q+F can never be inlined because it "
3497 "is suppressed using -fno-inline", fn);
3498 inlinable = false;
3499 }
3500
3501 else if (!function_attribute_inlinable_p (fn))
3502 {
3503 if (do_warning)
3504 warning (OPT_Winline, "function %q+F can never be inlined because it "
3505 "uses attributes conflicting with inlining", fn);
3506 inlinable = false;
3507 }
3508
3509 else if (inline_forbidden_p (fn))
3510 {
3511 /* See if we should warn about uninlinable functions. Previously,
3512 some of these warnings would be issued while trying to expand
3513 the function inline, but that would cause multiple warnings
3514 about functions that would for example call alloca. But since
3515 this a property of the function, just one warning is enough.
3516 As a bonus we can now give more details about the reason why a
3517 function is not inlinable. */
3518 if (always_inline)
3519 error (inline_forbidden_reason, fn);
3520 else if (do_warning)
3521 warning (OPT_Winline, inline_forbidden_reason, fn);
3522
3523 inlinable = false;
3524 }
3525
3526 /* Squirrel away the result so that we don't have to check again. */
3527 DECL_UNINLINABLE (fn) = !inlinable;
3528
3529 return inlinable;
3530 }
3531
3532 /* Estimate the cost of a memory move. Use machine dependent
3533 word size and take possible memcpy call into account. */
3534
3535 int
3536 estimate_move_cost (tree type)
3537 {
3538 HOST_WIDE_INT size;
3539
3540 gcc_assert (!VOID_TYPE_P (type));
3541
3542 if (TREE_CODE (type) == VECTOR_TYPE)
3543 {
3544 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3545 enum machine_mode simd
3546 = targetm.vectorize.preferred_simd_mode (inner);
3547 int simd_mode_size = GET_MODE_SIZE (simd);
3548 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3549 / simd_mode_size);
3550 }
3551
3552 size = int_size_in_bytes (type);
3553
3554 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3555 /* Cost of a memcpy call, 3 arguments and the call. */
3556 return 4;
3557 else
3558 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3559 }
3560
3561 /* Returns cost of operation CODE, according to WEIGHTS */
3562
3563 static int
3564 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3565 tree op1 ATTRIBUTE_UNUSED, tree op2)
3566 {
3567 switch (code)
3568 {
3569 /* These are "free" conversions, or their presumed cost
3570 is folded into other operations. */
3571 case RANGE_EXPR:
3572 CASE_CONVERT:
3573 case COMPLEX_EXPR:
3574 case PAREN_EXPR:
3575 case VIEW_CONVERT_EXPR:
3576 return 0;
3577
3578 /* Assign cost of 1 to usual operations.
3579 ??? We may consider mapping RTL costs to this. */
3580 case COND_EXPR:
3581 case VEC_COND_EXPR:
3582 case VEC_PERM_EXPR:
3583
3584 case PLUS_EXPR:
3585 case POINTER_PLUS_EXPR:
3586 case MINUS_EXPR:
3587 case MULT_EXPR:
3588 case MULT_HIGHPART_EXPR:
3589 case FMA_EXPR:
3590
3591 case ADDR_SPACE_CONVERT_EXPR:
3592 case FIXED_CONVERT_EXPR:
3593 case FIX_TRUNC_EXPR:
3594
3595 case NEGATE_EXPR:
3596 case FLOAT_EXPR:
3597 case MIN_EXPR:
3598 case MAX_EXPR:
3599 case ABS_EXPR:
3600
3601 case LSHIFT_EXPR:
3602 case RSHIFT_EXPR:
3603 case LROTATE_EXPR:
3604 case RROTATE_EXPR:
3605 case VEC_LSHIFT_EXPR:
3606 case VEC_RSHIFT_EXPR:
3607
3608 case BIT_IOR_EXPR:
3609 case BIT_XOR_EXPR:
3610 case BIT_AND_EXPR:
3611 case BIT_NOT_EXPR:
3612
3613 case TRUTH_ANDIF_EXPR:
3614 case TRUTH_ORIF_EXPR:
3615 case TRUTH_AND_EXPR:
3616 case TRUTH_OR_EXPR:
3617 case TRUTH_XOR_EXPR:
3618 case TRUTH_NOT_EXPR:
3619
3620 case LT_EXPR:
3621 case LE_EXPR:
3622 case GT_EXPR:
3623 case GE_EXPR:
3624 case EQ_EXPR:
3625 case NE_EXPR:
3626 case ORDERED_EXPR:
3627 case UNORDERED_EXPR:
3628
3629 case UNLT_EXPR:
3630 case UNLE_EXPR:
3631 case UNGT_EXPR:
3632 case UNGE_EXPR:
3633 case UNEQ_EXPR:
3634 case LTGT_EXPR:
3635
3636 case CONJ_EXPR:
3637
3638 case PREDECREMENT_EXPR:
3639 case PREINCREMENT_EXPR:
3640 case POSTDECREMENT_EXPR:
3641 case POSTINCREMENT_EXPR:
3642
3643 case REALIGN_LOAD_EXPR:
3644
3645 case REDUC_MAX_EXPR:
3646 case REDUC_MIN_EXPR:
3647 case REDUC_PLUS_EXPR:
3648 case WIDEN_SUM_EXPR:
3649 case WIDEN_MULT_EXPR:
3650 case DOT_PROD_EXPR:
3651 case WIDEN_MULT_PLUS_EXPR:
3652 case WIDEN_MULT_MINUS_EXPR:
3653 case WIDEN_LSHIFT_EXPR:
3654
3655 case VEC_WIDEN_MULT_HI_EXPR:
3656 case VEC_WIDEN_MULT_LO_EXPR:
3657 case VEC_WIDEN_MULT_EVEN_EXPR:
3658 case VEC_WIDEN_MULT_ODD_EXPR:
3659 case VEC_UNPACK_HI_EXPR:
3660 case VEC_UNPACK_LO_EXPR:
3661 case VEC_UNPACK_FLOAT_HI_EXPR:
3662 case VEC_UNPACK_FLOAT_LO_EXPR:
3663 case VEC_PACK_TRUNC_EXPR:
3664 case VEC_PACK_SAT_EXPR:
3665 case VEC_PACK_FIX_TRUNC_EXPR:
3666 case VEC_WIDEN_LSHIFT_HI_EXPR:
3667 case VEC_WIDEN_LSHIFT_LO_EXPR:
3668
3669 return 1;
3670
3671 /* Few special cases of expensive operations. This is useful
3672 to avoid inlining on functions having too many of these. */
3673 case TRUNC_DIV_EXPR:
3674 case CEIL_DIV_EXPR:
3675 case FLOOR_DIV_EXPR:
3676 case ROUND_DIV_EXPR:
3677 case EXACT_DIV_EXPR:
3678 case TRUNC_MOD_EXPR:
3679 case CEIL_MOD_EXPR:
3680 case FLOOR_MOD_EXPR:
3681 case ROUND_MOD_EXPR:
3682 case RDIV_EXPR:
3683 if (TREE_CODE (op2) != INTEGER_CST)
3684 return weights->div_mod_cost;
3685 return 1;
3686
3687 default:
3688 /* We expect a copy assignment with no operator. */
3689 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3690 return 0;
3691 }
3692 }
3693
3694
3695 /* Estimate number of instructions that will be created by expanding
3696 the statements in the statement sequence STMTS.
3697 WEIGHTS contains weights attributed to various constructs. */
3698
3699 static
3700 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3701 {
3702 int cost;
3703 gimple_stmt_iterator gsi;
3704
3705 cost = 0;
3706 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3707 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3708
3709 return cost;
3710 }
3711
3712
3713 /* Estimate number of instructions that will be created by expanding STMT.
3714 WEIGHTS contains weights attributed to various constructs. */
3715
3716 int
3717 estimate_num_insns (gimple stmt, eni_weights *weights)
3718 {
3719 unsigned cost, i;
3720 enum gimple_code code = gimple_code (stmt);
3721 tree lhs;
3722 tree rhs;
3723
3724 switch (code)
3725 {
3726 case GIMPLE_ASSIGN:
3727 /* Try to estimate the cost of assignments. We have three cases to
3728 deal with:
3729 1) Simple assignments to registers;
3730 2) Stores to things that must live in memory. This includes
3731 "normal" stores to scalars, but also assignments of large
3732 structures, or constructors of big arrays;
3733
3734 Let us look at the first two cases, assuming we have "a = b + C":
3735 <GIMPLE_ASSIGN <var_decl "a">
3736 <plus_expr <var_decl "b"> <constant C>>
3737 If "a" is a GIMPLE register, the assignment to it is free on almost
3738 any target, because "a" usually ends up in a real register. Hence
3739 the only cost of this expression comes from the PLUS_EXPR, and we
3740 can ignore the GIMPLE_ASSIGN.
3741 If "a" is not a GIMPLE register, the assignment to "a" will most
3742 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3743 of moving something into "a", which we compute using the function
3744 estimate_move_cost. */
3745 if (gimple_clobber_p (stmt))
3746 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3747
3748 lhs = gimple_assign_lhs (stmt);
3749 rhs = gimple_assign_rhs1 (stmt);
3750
3751 cost = 0;
3752
3753 /* Account for the cost of moving to / from memory. */
3754 if (gimple_store_p (stmt))
3755 cost += estimate_move_cost (TREE_TYPE (lhs));
3756 if (gimple_assign_load_p (stmt))
3757 cost += estimate_move_cost (TREE_TYPE (rhs));
3758
3759 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3760 gimple_assign_rhs1 (stmt),
3761 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3762 == GIMPLE_BINARY_RHS
3763 ? gimple_assign_rhs2 (stmt) : NULL);
3764 break;
3765
3766 case GIMPLE_COND:
3767 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3768 gimple_op (stmt, 0),
3769 gimple_op (stmt, 1));
3770 break;
3771
3772 case GIMPLE_SWITCH:
3773 /* Take into account cost of the switch + guess 2 conditional jumps for
3774 each case label.
3775
3776 TODO: once the switch expansion logic is sufficiently separated, we can
3777 do better job on estimating cost of the switch. */
3778 if (weights->time_based)
3779 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3780 else
3781 cost = gimple_switch_num_labels (stmt) * 2;
3782 break;
3783
3784 case GIMPLE_CALL:
3785 {
3786 tree decl = gimple_call_fndecl (stmt);
3787 struct cgraph_node *node = NULL;
3788
3789 /* Do not special case builtins where we see the body.
3790 This just confuse inliner. */
3791 if (!decl || !(node = cgraph_get_node (decl)) || node->definition)
3792 ;
3793 /* For buitins that are likely expanded to nothing or
3794 inlined do not account operand costs. */
3795 else if (is_simple_builtin (decl))
3796 return 0;
3797 else if (is_inexpensive_builtin (decl))
3798 return weights->target_builtin_call_cost;
3799 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3800 {
3801 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3802 specialize the cheap expansion we do here.
3803 ??? This asks for a more general solution. */
3804 switch (DECL_FUNCTION_CODE (decl))
3805 {
3806 case BUILT_IN_POW:
3807 case BUILT_IN_POWF:
3808 case BUILT_IN_POWL:
3809 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3810 && REAL_VALUES_EQUAL
3811 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3812 return estimate_operator_cost (MULT_EXPR, weights,
3813 gimple_call_arg (stmt, 0),
3814 gimple_call_arg (stmt, 0));
3815 break;
3816
3817 default:
3818 break;
3819 }
3820 }
3821
3822 cost = node ? weights->call_cost : weights->indirect_call_cost;
3823 if (gimple_call_lhs (stmt))
3824 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3825 for (i = 0; i < gimple_call_num_args (stmt); i++)
3826 {
3827 tree arg = gimple_call_arg (stmt, i);
3828 cost += estimate_move_cost (TREE_TYPE (arg));
3829 }
3830 break;
3831 }
3832
3833 case GIMPLE_RETURN:
3834 return weights->return_cost;
3835
3836 case GIMPLE_GOTO:
3837 case GIMPLE_LABEL:
3838 case GIMPLE_NOP:
3839 case GIMPLE_PHI:
3840 case GIMPLE_PREDICT:
3841 case GIMPLE_DEBUG:
3842 return 0;
3843
3844 case GIMPLE_ASM:
3845 {
3846 int count = asm_str_count (gimple_asm_string (stmt));
3847 /* 1000 means infinity. This avoids overflows later
3848 with very long asm statements. */
3849 if (count > 1000)
3850 count = 1000;
3851 return count;
3852 }
3853
3854 case GIMPLE_RESX:
3855 /* This is either going to be an external function call with one
3856 argument, or two register copy statements plus a goto. */
3857 return 2;
3858
3859 case GIMPLE_EH_DISPATCH:
3860 /* ??? This is going to turn into a switch statement. Ideally
3861 we'd have a look at the eh region and estimate the number of
3862 edges involved. */
3863 return 10;
3864
3865 case GIMPLE_BIND:
3866 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3867
3868 case GIMPLE_EH_FILTER:
3869 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3870
3871 case GIMPLE_CATCH:
3872 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3873
3874 case GIMPLE_TRY:
3875 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3876 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3877
3878 /* OpenMP directives are generally very expensive. */
3879
3880 case GIMPLE_OMP_RETURN:
3881 case GIMPLE_OMP_SECTIONS_SWITCH:
3882 case GIMPLE_OMP_ATOMIC_STORE:
3883 case GIMPLE_OMP_CONTINUE:
3884 /* ...except these, which are cheap. */
3885 return 0;
3886
3887 case GIMPLE_OMP_ATOMIC_LOAD:
3888 return weights->omp_cost;
3889
3890 case GIMPLE_OMP_FOR:
3891 return (weights->omp_cost
3892 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3893 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3894
3895 case GIMPLE_OMP_PARALLEL:
3896 case GIMPLE_OMP_TASK:
3897 case GIMPLE_OMP_CRITICAL:
3898 case GIMPLE_OMP_MASTER:
3899 case GIMPLE_OMP_TASKGROUP:
3900 case GIMPLE_OMP_ORDERED:
3901 case GIMPLE_OMP_SECTION:
3902 case GIMPLE_OMP_SECTIONS:
3903 case GIMPLE_OMP_SINGLE:
3904 case GIMPLE_OMP_TARGET:
3905 case GIMPLE_OMP_TEAMS:
3906 return (weights->omp_cost
3907 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3908
3909 case GIMPLE_TRANSACTION:
3910 return (weights->tm_cost
3911 + estimate_num_insns_seq (gimple_transaction_body (stmt),
3912 weights));
3913
3914 default:
3915 gcc_unreachable ();
3916 }
3917
3918 return cost;
3919 }
3920
3921 /* Estimate number of instructions that will be created by expanding
3922 function FNDECL. WEIGHTS contains weights attributed to various
3923 constructs. */
3924
3925 int
3926 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3927 {
3928 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3929 gimple_stmt_iterator bsi;
3930 basic_block bb;
3931 int n = 0;
3932
3933 gcc_assert (my_function && my_function->cfg);
3934 FOR_EACH_BB_FN (bb, my_function)
3935 {
3936 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3937 n += estimate_num_insns (gsi_stmt (bsi), weights);
3938 }
3939
3940 return n;
3941 }
3942
3943
3944 /* Initializes weights used by estimate_num_insns. */
3945
3946 void
3947 init_inline_once (void)
3948 {
3949 eni_size_weights.call_cost = 1;
3950 eni_size_weights.indirect_call_cost = 3;
3951 eni_size_weights.target_builtin_call_cost = 1;
3952 eni_size_weights.div_mod_cost = 1;
3953 eni_size_weights.omp_cost = 40;
3954 eni_size_weights.tm_cost = 10;
3955 eni_size_weights.time_based = false;
3956 eni_size_weights.return_cost = 1;
3957
3958 /* Estimating time for call is difficult, since we have no idea what the
3959 called function does. In the current uses of eni_time_weights,
3960 underestimating the cost does less harm than overestimating it, so
3961 we choose a rather small value here. */
3962 eni_time_weights.call_cost = 10;
3963 eni_time_weights.indirect_call_cost = 15;
3964 eni_time_weights.target_builtin_call_cost = 1;
3965 eni_time_weights.div_mod_cost = 10;
3966 eni_time_weights.omp_cost = 40;
3967 eni_time_weights.tm_cost = 40;
3968 eni_time_weights.time_based = true;
3969 eni_time_weights.return_cost = 2;
3970 }
3971
3972 /* Estimate the number of instructions in a gimple_seq. */
3973
3974 int
3975 count_insns_seq (gimple_seq seq, eni_weights *weights)
3976 {
3977 gimple_stmt_iterator gsi;
3978 int n = 0;
3979 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3980 n += estimate_num_insns (gsi_stmt (gsi), weights);
3981
3982 return n;
3983 }
3984
3985
3986 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3987
3988 static void
3989 prepend_lexical_block (tree current_block, tree new_block)
3990 {
3991 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3992 BLOCK_SUBBLOCKS (current_block) = new_block;
3993 BLOCK_SUPERCONTEXT (new_block) = current_block;
3994 }
3995
3996 /* Add local variables from CALLEE to CALLER. */
3997
3998 static inline void
3999 add_local_variables (struct function *callee, struct function *caller,
4000 copy_body_data *id)
4001 {
4002 tree var;
4003 unsigned ix;
4004
4005 FOR_EACH_LOCAL_DECL (callee, ix, var)
4006 if (!can_be_nonlocal (var, id))
4007 {
4008 tree new_var = remap_decl (var, id);
4009
4010 /* Remap debug-expressions. */
4011 if (TREE_CODE (new_var) == VAR_DECL
4012 && DECL_HAS_DEBUG_EXPR_P (var)
4013 && new_var != var)
4014 {
4015 tree tem = DECL_DEBUG_EXPR (var);
4016 bool old_regimplify = id->regimplify;
4017 id->remapping_type_depth++;
4018 walk_tree (&tem, copy_tree_body_r, id, NULL);
4019 id->remapping_type_depth--;
4020 id->regimplify = old_regimplify;
4021 SET_DECL_DEBUG_EXPR (new_var, tem);
4022 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4023 }
4024 add_local_decl (caller, new_var);
4025 }
4026 }
4027
4028 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4029
4030 static bool
4031 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
4032 {
4033 tree use_retvar;
4034 tree fn;
4035 struct pointer_map_t *st, *dst;
4036 tree return_slot;
4037 tree modify_dest;
4038 location_t saved_location;
4039 struct cgraph_edge *cg_edge;
4040 cgraph_inline_failed_t reason;
4041 basic_block return_block;
4042 edge e;
4043 gimple_stmt_iterator gsi, stmt_gsi;
4044 bool successfully_inlined = FALSE;
4045 bool purge_dead_abnormal_edges;
4046
4047 /* Set input_location here so we get the right instantiation context
4048 if we call instantiate_decl from inlinable_function_p. */
4049 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4050 saved_location = input_location;
4051 input_location = gimple_location (stmt);
4052
4053 /* From here on, we're only interested in CALL_EXPRs. */
4054 if (gimple_code (stmt) != GIMPLE_CALL)
4055 goto egress;
4056
4057 cg_edge = cgraph_edge (id->dst_node, stmt);
4058 gcc_checking_assert (cg_edge);
4059 /* First, see if we can figure out what function is being called.
4060 If we cannot, then there is no hope of inlining the function. */
4061 if (cg_edge->indirect_unknown_callee)
4062 goto egress;
4063 fn = cg_edge->callee->decl;
4064 gcc_checking_assert (fn);
4065
4066 /* If FN is a declaration of a function in a nested scope that was
4067 globally declared inline, we don't set its DECL_INITIAL.
4068 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4069 C++ front-end uses it for cdtors to refer to their internal
4070 declarations, that are not real functions. Fortunately those
4071 don't have trees to be saved, so we can tell by checking their
4072 gimple_body. */
4073 if (!DECL_INITIAL (fn)
4074 && DECL_ABSTRACT_ORIGIN (fn)
4075 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4076 fn = DECL_ABSTRACT_ORIGIN (fn);
4077
4078 /* Don't try to inline functions that are not well-suited to inlining. */
4079 if (cg_edge->inline_failed)
4080 {
4081 reason = cg_edge->inline_failed;
4082 /* If this call was originally indirect, we do not want to emit any
4083 inlining related warnings or sorry messages because there are no
4084 guarantees regarding those. */
4085 if (cg_edge->indirect_inlining_edge)
4086 goto egress;
4087
4088 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4089 /* For extern inline functions that get redefined we always
4090 silently ignored always_inline flag. Better behaviour would
4091 be to be able to keep both bodies and use extern inline body
4092 for inlining, but we can't do that because frontends overwrite
4093 the body. */
4094 && !cg_edge->callee->local.redefined_extern_inline
4095 /* During early inline pass, report only when optimization is
4096 not turned on. */
4097 && (cgraph_global_info_ready
4098 || !optimize)
4099 /* PR 20090218-1_0.c. Body can be provided by another module. */
4100 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4101 {
4102 error ("inlining failed in call to always_inline %q+F: %s", fn,
4103 cgraph_inline_failed_string (reason));
4104 error ("called from here");
4105 }
4106 else if (warn_inline
4107 && DECL_DECLARED_INLINE_P (fn)
4108 && !DECL_NO_INLINE_WARNING_P (fn)
4109 && !DECL_IN_SYSTEM_HEADER (fn)
4110 && reason != CIF_UNSPECIFIED
4111 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4112 /* Do not warn about not inlined recursive calls. */
4113 && !cgraph_edge_recursive_p (cg_edge)
4114 /* Avoid warnings during early inline pass. */
4115 && cgraph_global_info_ready)
4116 {
4117 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4118 fn, _(cgraph_inline_failed_string (reason)));
4119 warning (OPT_Winline, "called from here");
4120 }
4121 goto egress;
4122 }
4123 fn = cg_edge->callee->decl;
4124 cgraph_get_body (cg_edge->callee);
4125
4126 #ifdef ENABLE_CHECKING
4127 if (cg_edge->callee->decl != id->dst_node->decl)
4128 verify_cgraph_node (cg_edge->callee);
4129 #endif
4130
4131 /* We will be inlining this callee. */
4132 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4133
4134 /* Update the callers EH personality. */
4135 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4136 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4137 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4138
4139 /* Split the block holding the GIMPLE_CALL. */
4140 e = split_block (bb, stmt);
4141 bb = e->src;
4142 return_block = e->dest;
4143 remove_edge (e);
4144
4145 /* split_block splits after the statement; work around this by
4146 moving the call into the second block manually. Not pretty,
4147 but seems easier than doing the CFG manipulation by hand
4148 when the GIMPLE_CALL is in the last statement of BB. */
4149 stmt_gsi = gsi_last_bb (bb);
4150 gsi_remove (&stmt_gsi, false);
4151
4152 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4153 been the source of abnormal edges. In this case, schedule
4154 the removal of dead abnormal edges. */
4155 gsi = gsi_start_bb (return_block);
4156 if (gsi_end_p (gsi))
4157 {
4158 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4159 purge_dead_abnormal_edges = true;
4160 }
4161 else
4162 {
4163 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4164 purge_dead_abnormal_edges = false;
4165 }
4166
4167 stmt_gsi = gsi_start_bb (return_block);
4168
4169 /* Build a block containing code to initialize the arguments, the
4170 actual inline expansion of the body, and a label for the return
4171 statements within the function to jump to. The type of the
4172 statement expression is the return type of the function call.
4173 ??? If the call does not have an associated block then we will
4174 remap all callee blocks to NULL, effectively dropping most of
4175 its debug information. This should only happen for calls to
4176 artificial decls inserted by the compiler itself. We need to
4177 either link the inlined blocks into the caller block tree or
4178 not refer to them in any way to not break GC for locations. */
4179 if (gimple_block (stmt))
4180 {
4181 id->block = make_node (BLOCK);
4182 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4183 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4184 prepend_lexical_block (gimple_block (stmt), id->block);
4185 }
4186
4187 /* Local declarations will be replaced by their equivalents in this
4188 map. */
4189 st = id->decl_map;
4190 id->decl_map = pointer_map_create ();
4191 dst = id->debug_map;
4192 id->debug_map = NULL;
4193
4194 /* Record the function we are about to inline. */
4195 id->src_fn = fn;
4196 id->src_node = cg_edge->callee;
4197 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4198 id->gimple_call = stmt;
4199
4200 gcc_assert (!id->src_cfun->after_inlining);
4201
4202 id->entry_bb = bb;
4203 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4204 {
4205 gimple_stmt_iterator si = gsi_last_bb (bb);
4206 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4207 NOT_TAKEN),
4208 GSI_NEW_STMT);
4209 }
4210 initialize_inlined_parameters (id, stmt, fn, bb);
4211
4212 if (DECL_INITIAL (fn))
4213 {
4214 if (gimple_block (stmt))
4215 {
4216 tree *var;
4217
4218 prepend_lexical_block (id->block,
4219 remap_blocks (DECL_INITIAL (fn), id));
4220 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4221 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4222 == NULL_TREE));
4223 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4224 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4225 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4226 under it. The parameters can be then evaluated in the debugger,
4227 but don't show in backtraces. */
4228 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4229 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4230 {
4231 tree v = *var;
4232 *var = TREE_CHAIN (v);
4233 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4234 BLOCK_VARS (id->block) = v;
4235 }
4236 else
4237 var = &TREE_CHAIN (*var);
4238 }
4239 else
4240 remap_blocks_to_null (DECL_INITIAL (fn), id);
4241 }
4242
4243 /* Return statements in the function body will be replaced by jumps
4244 to the RET_LABEL. */
4245 gcc_assert (DECL_INITIAL (fn));
4246 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4247
4248 /* Find the LHS to which the result of this call is assigned. */
4249 return_slot = NULL;
4250 if (gimple_call_lhs (stmt))
4251 {
4252 modify_dest = gimple_call_lhs (stmt);
4253
4254 /* The function which we are inlining might not return a value,
4255 in which case we should issue a warning that the function
4256 does not return a value. In that case the optimizers will
4257 see that the variable to which the value is assigned was not
4258 initialized. We do not want to issue a warning about that
4259 uninitialized variable. */
4260 if (DECL_P (modify_dest))
4261 TREE_NO_WARNING (modify_dest) = 1;
4262
4263 if (gimple_call_return_slot_opt_p (stmt))
4264 {
4265 return_slot = modify_dest;
4266 modify_dest = NULL;
4267 }
4268 }
4269 else
4270 modify_dest = NULL;
4271
4272 /* If we are inlining a call to the C++ operator new, we don't want
4273 to use type based alias analysis on the return value. Otherwise
4274 we may get confused if the compiler sees that the inlined new
4275 function returns a pointer which was just deleted. See bug
4276 33407. */
4277 if (DECL_IS_OPERATOR_NEW (fn))
4278 {
4279 return_slot = NULL;
4280 modify_dest = NULL;
4281 }
4282
4283 /* Declare the return variable for the function. */
4284 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4285
4286 /* Add local vars in this inlined callee to caller. */
4287 add_local_variables (id->src_cfun, cfun, id);
4288
4289 if (dump_file && (dump_flags & TDF_DETAILS))
4290 {
4291 fprintf (dump_file, "Inlining ");
4292 print_generic_expr (dump_file, id->src_fn, 0);
4293 fprintf (dump_file, " to ");
4294 print_generic_expr (dump_file, id->dst_fn, 0);
4295 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4296 }
4297
4298 /* This is it. Duplicate the callee body. Assume callee is
4299 pre-gimplified. Note that we must not alter the caller
4300 function in any way before this point, as this CALL_EXPR may be
4301 a self-referential call; if we're calling ourselves, we need to
4302 duplicate our body before altering anything. */
4303 copy_body (id, bb->count,
4304 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4305 bb, return_block, NULL);
4306
4307 /* Reset the escaped solution. */
4308 if (cfun->gimple_df)
4309 pt_solution_reset (&cfun->gimple_df->escaped);
4310
4311 /* Clean up. */
4312 if (id->debug_map)
4313 {
4314 pointer_map_destroy (id->debug_map);
4315 id->debug_map = dst;
4316 }
4317 pointer_map_destroy (id->decl_map);
4318 id->decl_map = st;
4319
4320 /* Unlink the calls virtual operands before replacing it. */
4321 unlink_stmt_vdef (stmt);
4322
4323 /* If the inlined function returns a result that we care about,
4324 substitute the GIMPLE_CALL with an assignment of the return
4325 variable to the LHS of the call. That is, if STMT was
4326 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4327 if (use_retvar && gimple_call_lhs (stmt))
4328 {
4329 gimple old_stmt = stmt;
4330 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4331 gsi_replace (&stmt_gsi, stmt, false);
4332 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4333 }
4334 else
4335 {
4336 /* Handle the case of inlining a function with no return
4337 statement, which causes the return value to become undefined. */
4338 if (gimple_call_lhs (stmt)
4339 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4340 {
4341 tree name = gimple_call_lhs (stmt);
4342 tree var = SSA_NAME_VAR (name);
4343 tree def = ssa_default_def (cfun, var);
4344
4345 if (def)
4346 {
4347 /* If the variable is used undefined, make this name
4348 undefined via a move. */
4349 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4350 gsi_replace (&stmt_gsi, stmt, true);
4351 }
4352 else
4353 {
4354 /* Otherwise make this variable undefined. */
4355 gsi_remove (&stmt_gsi, true);
4356 set_ssa_default_def (cfun, var, name);
4357 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4358 }
4359 }
4360 else
4361 gsi_remove (&stmt_gsi, true);
4362 }
4363
4364 if (purge_dead_abnormal_edges)
4365 {
4366 gimple_purge_dead_eh_edges (return_block);
4367 gimple_purge_dead_abnormal_call_edges (return_block);
4368 }
4369
4370 /* If the value of the new expression is ignored, that's OK. We
4371 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4372 the equivalent inlined version either. */
4373 if (is_gimple_assign (stmt))
4374 {
4375 gcc_assert (gimple_assign_single_p (stmt)
4376 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4377 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4378 }
4379
4380 /* Output the inlining info for this abstract function, since it has been
4381 inlined. If we don't do this now, we can lose the information about the
4382 variables in the function when the blocks get blown away as soon as we
4383 remove the cgraph node. */
4384 if (gimple_block (stmt))
4385 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4386
4387 /* Update callgraph if needed. */
4388 cgraph_remove_node (cg_edge->callee);
4389
4390 id->block = NULL_TREE;
4391 successfully_inlined = TRUE;
4392
4393 egress:
4394 input_location = saved_location;
4395 return successfully_inlined;
4396 }
4397
4398 /* Expand call statements reachable from STMT_P.
4399 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4400 in a MODIFY_EXPR. */
4401
4402 static bool
4403 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4404 {
4405 gimple_stmt_iterator gsi;
4406
4407 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4408 {
4409 gimple stmt = gsi_stmt (gsi);
4410
4411 if (is_gimple_call (stmt)
4412 && expand_call_inline (bb, stmt, id))
4413 return true;
4414 }
4415
4416 return false;
4417 }
4418
4419
4420 /* Walk all basic blocks created after FIRST and try to fold every statement
4421 in the STATEMENTS pointer set. */
4422
4423 static void
4424 fold_marked_statements (int first, struct pointer_set_t *statements)
4425 {
4426 for (; first < n_basic_blocks; first++)
4427 if (BASIC_BLOCK (first))
4428 {
4429 gimple_stmt_iterator gsi;
4430
4431 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4432 !gsi_end_p (gsi);
4433 gsi_next (&gsi))
4434 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4435 {
4436 gimple old_stmt = gsi_stmt (gsi);
4437 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4438
4439 if (old_decl && DECL_BUILT_IN (old_decl))
4440 {
4441 /* Folding builtins can create multiple instructions,
4442 we need to look at all of them. */
4443 gimple_stmt_iterator i2 = gsi;
4444 gsi_prev (&i2);
4445 if (fold_stmt (&gsi))
4446 {
4447 gimple new_stmt;
4448 /* If a builtin at the end of a bb folded into nothing,
4449 the following loop won't work. */
4450 if (gsi_end_p (gsi))
4451 {
4452 cgraph_update_edges_for_call_stmt (old_stmt,
4453 old_decl, NULL);
4454 break;
4455 }
4456 if (gsi_end_p (i2))
4457 i2 = gsi_start_bb (BASIC_BLOCK (first));
4458 else
4459 gsi_next (&i2);
4460 while (1)
4461 {
4462 new_stmt = gsi_stmt (i2);
4463 update_stmt (new_stmt);
4464 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4465 new_stmt);
4466
4467 if (new_stmt == gsi_stmt (gsi))
4468 {
4469 /* It is okay to check only for the very last
4470 of these statements. If it is a throwing
4471 statement nothing will change. If it isn't
4472 this can remove EH edges. If that weren't
4473 correct then because some intermediate stmts
4474 throw, but not the last one. That would mean
4475 we'd have to split the block, which we can't
4476 here and we'd loose anyway. And as builtins
4477 probably never throw, this all
4478 is mood anyway. */
4479 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4480 new_stmt))
4481 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4482 break;
4483 }
4484 gsi_next (&i2);
4485 }
4486 }
4487 }
4488 else if (fold_stmt (&gsi))
4489 {
4490 /* Re-read the statement from GSI as fold_stmt() may
4491 have changed it. */
4492 gimple new_stmt = gsi_stmt (gsi);
4493 update_stmt (new_stmt);
4494
4495 if (is_gimple_call (old_stmt)
4496 || is_gimple_call (new_stmt))
4497 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4498 new_stmt);
4499
4500 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4501 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4502 }
4503 }
4504 }
4505 }
4506
4507 /* Return true if BB has at least one abnormal outgoing edge. */
4508
4509 static inline bool
4510 has_abnormal_outgoing_edge_p (basic_block bb)
4511 {
4512 edge e;
4513 edge_iterator ei;
4514
4515 FOR_EACH_EDGE (e, ei, bb->succs)
4516 if (e->flags & EDGE_ABNORMAL)
4517 return true;
4518
4519 return false;
4520 }
4521
4522 /* Expand calls to inline functions in the body of FN. */
4523
4524 unsigned int
4525 optimize_inline_calls (tree fn)
4526 {
4527 copy_body_data id;
4528 basic_block bb;
4529 int last = n_basic_blocks;
4530 struct gimplify_ctx gctx;
4531 bool inlined_p = false;
4532
4533 /* Clear out ID. */
4534 memset (&id, 0, sizeof (id));
4535
4536 id.src_node = id.dst_node = cgraph_get_node (fn);
4537 gcc_assert (id.dst_node->definition);
4538 id.dst_fn = fn;
4539 /* Or any functions that aren't finished yet. */
4540 if (current_function_decl)
4541 id.dst_fn = current_function_decl;
4542
4543 id.copy_decl = copy_decl_maybe_to_var;
4544 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4545 id.transform_new_cfg = false;
4546 id.transform_return_to_modify = true;
4547 id.transform_parameter = true;
4548 id.transform_lang_insert_block = NULL;
4549 id.statements_to_fold = pointer_set_create ();
4550
4551 push_gimplify_context (&gctx);
4552
4553 /* We make no attempts to keep dominance info up-to-date. */
4554 free_dominance_info (CDI_DOMINATORS);
4555 free_dominance_info (CDI_POST_DOMINATORS);
4556
4557 /* Register specific gimple functions. */
4558 gimple_register_cfg_hooks ();
4559
4560 /* Reach the trees by walking over the CFG, and note the
4561 enclosing basic-blocks in the call edges. */
4562 /* We walk the blocks going forward, because inlined function bodies
4563 will split id->current_basic_block, and the new blocks will
4564 follow it; we'll trudge through them, processing their CALL_EXPRs
4565 along the way. */
4566 FOR_EACH_BB (bb)
4567 inlined_p |= gimple_expand_calls_inline (bb, &id);
4568
4569 pop_gimplify_context (NULL);
4570
4571 #ifdef ENABLE_CHECKING
4572 {
4573 struct cgraph_edge *e;
4574
4575 verify_cgraph_node (id.dst_node);
4576
4577 /* Double check that we inlined everything we are supposed to inline. */
4578 for (e = id.dst_node->callees; e; e = e->next_callee)
4579 gcc_assert (e->inline_failed);
4580 }
4581 #endif
4582
4583 /* Fold queued statements. */
4584 fold_marked_statements (last, id.statements_to_fold);
4585 pointer_set_destroy (id.statements_to_fold);
4586
4587 gcc_assert (!id.debug_stmts.exists ());
4588
4589 /* If we didn't inline into the function there is nothing to do. */
4590 if (!inlined_p)
4591 return 0;
4592
4593 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4594 number_blocks (fn);
4595
4596 delete_unreachable_blocks_update_callgraph (&id);
4597 #ifdef ENABLE_CHECKING
4598 verify_cgraph_node (id.dst_node);
4599 #endif
4600
4601 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4602 not possible yet - the IPA passes might make various functions to not
4603 throw and they don't care to proactively update local EH info. This is
4604 done later in fixup_cfg pass that also execute the verification. */
4605 return (TODO_update_ssa
4606 | TODO_cleanup_cfg
4607 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4608 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4609 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4610 }
4611
4612 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4613
4614 tree
4615 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4616 {
4617 enum tree_code code = TREE_CODE (*tp);
4618 enum tree_code_class cl = TREE_CODE_CLASS (code);
4619
4620 /* We make copies of most nodes. */
4621 if (IS_EXPR_CODE_CLASS (cl)
4622 || code == TREE_LIST
4623 || code == TREE_VEC
4624 || code == TYPE_DECL
4625 || code == OMP_CLAUSE)
4626 {
4627 /* Because the chain gets clobbered when we make a copy, we save it
4628 here. */
4629 tree chain = NULL_TREE, new_tree;
4630
4631 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4632 chain = TREE_CHAIN (*tp);
4633
4634 /* Copy the node. */
4635 new_tree = copy_node (*tp);
4636
4637 *tp = new_tree;
4638
4639 /* Now, restore the chain, if appropriate. That will cause
4640 walk_tree to walk into the chain as well. */
4641 if (code == PARM_DECL
4642 || code == TREE_LIST
4643 || code == OMP_CLAUSE)
4644 TREE_CHAIN (*tp) = chain;
4645
4646 /* For now, we don't update BLOCKs when we make copies. So, we
4647 have to nullify all BIND_EXPRs. */
4648 if (TREE_CODE (*tp) == BIND_EXPR)
4649 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4650 }
4651 else if (code == CONSTRUCTOR)
4652 {
4653 /* CONSTRUCTOR nodes need special handling because
4654 we need to duplicate the vector of elements. */
4655 tree new_tree;
4656
4657 new_tree = copy_node (*tp);
4658 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
4659 *tp = new_tree;
4660 }
4661 else if (code == STATEMENT_LIST)
4662 /* We used to just abort on STATEMENT_LIST, but we can run into them
4663 with statement-expressions (c++/40975). */
4664 copy_statement_list (tp);
4665 else if (TREE_CODE_CLASS (code) == tcc_type)
4666 *walk_subtrees = 0;
4667 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4668 *walk_subtrees = 0;
4669 else if (TREE_CODE_CLASS (code) == tcc_constant)
4670 *walk_subtrees = 0;
4671 return NULL_TREE;
4672 }
4673
4674 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4675 information indicating to what new SAVE_EXPR this one should be mapped,
4676 use that one. Otherwise, create a new node and enter it in ST. FN is
4677 the function into which the copy will be placed. */
4678
4679 static void
4680 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4681 {
4682 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4683 tree *n;
4684 tree t;
4685
4686 /* See if we already encountered this SAVE_EXPR. */
4687 n = (tree *) pointer_map_contains (st, *tp);
4688
4689 /* If we didn't already remap this SAVE_EXPR, do so now. */
4690 if (!n)
4691 {
4692 t = copy_node (*tp);
4693
4694 /* Remember this SAVE_EXPR. */
4695 *pointer_map_insert (st, *tp) = t;
4696 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4697 *pointer_map_insert (st, t) = t;
4698 }
4699 else
4700 {
4701 /* We've already walked into this SAVE_EXPR; don't do it again. */
4702 *walk_subtrees = 0;
4703 t = *n;
4704 }
4705
4706 /* Replace this SAVE_EXPR with the copy. */
4707 *tp = t;
4708 }
4709
4710 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4711 label, copies the declaration and enters it in the splay_tree in DATA (which
4712 is really a 'copy_body_data *'. */
4713
4714 static tree
4715 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4716 bool *handled_ops_p ATTRIBUTE_UNUSED,
4717 struct walk_stmt_info *wi)
4718 {
4719 copy_body_data *id = (copy_body_data *) wi->info;
4720 gimple stmt = gsi_stmt (*gsip);
4721
4722 if (gimple_code (stmt) == GIMPLE_LABEL)
4723 {
4724 tree decl = gimple_label_label (stmt);
4725
4726 /* Copy the decl and remember the copy. */
4727 insert_decl_map (id, decl, id->copy_decl (decl, id));
4728 }
4729
4730 return NULL_TREE;
4731 }
4732
4733
4734 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4735 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4736 remaps all local declarations to appropriate replacements in gimple
4737 operands. */
4738
4739 static tree
4740 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4741 {
4742 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4743 copy_body_data *id = (copy_body_data *) wi->info;
4744 struct pointer_map_t *st = id->decl_map;
4745 tree *n;
4746 tree expr = *tp;
4747
4748 /* Only a local declaration (variable or label). */
4749 if ((TREE_CODE (expr) == VAR_DECL
4750 && !TREE_STATIC (expr))
4751 || TREE_CODE (expr) == LABEL_DECL)
4752 {
4753 /* Lookup the declaration. */
4754 n = (tree *) pointer_map_contains (st, expr);
4755
4756 /* If it's there, remap it. */
4757 if (n)
4758 *tp = *n;
4759 *walk_subtrees = 0;
4760 }
4761 else if (TREE_CODE (expr) == STATEMENT_LIST
4762 || TREE_CODE (expr) == BIND_EXPR
4763 || TREE_CODE (expr) == SAVE_EXPR)
4764 gcc_unreachable ();
4765 else if (TREE_CODE (expr) == TARGET_EXPR)
4766 {
4767 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4768 It's OK for this to happen if it was part of a subtree that
4769 isn't immediately expanded, such as operand 2 of another
4770 TARGET_EXPR. */
4771 if (!TREE_OPERAND (expr, 1))
4772 {
4773 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4774 TREE_OPERAND (expr, 3) = NULL_TREE;
4775 }
4776 }
4777
4778 /* Keep iterating. */
4779 return NULL_TREE;
4780 }
4781
4782
4783 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4784 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4785 remaps all local declarations to appropriate replacements in gimple
4786 statements. */
4787
4788 static tree
4789 replace_locals_stmt (gimple_stmt_iterator *gsip,
4790 bool *handled_ops_p ATTRIBUTE_UNUSED,
4791 struct walk_stmt_info *wi)
4792 {
4793 copy_body_data *id = (copy_body_data *) wi->info;
4794 gimple stmt = gsi_stmt (*gsip);
4795
4796 if (gimple_code (stmt) == GIMPLE_BIND)
4797 {
4798 tree block = gimple_bind_block (stmt);
4799
4800 if (block)
4801 {
4802 remap_block (&block, id);
4803 gimple_bind_set_block (stmt, block);
4804 }
4805
4806 /* This will remap a lot of the same decls again, but this should be
4807 harmless. */
4808 if (gimple_bind_vars (stmt))
4809 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
4810 NULL, id));
4811 }
4812
4813 /* Keep iterating. */
4814 return NULL_TREE;
4815 }
4816
4817
4818 /* Copies everything in SEQ and replaces variables and labels local to
4819 current_function_decl. */
4820
4821 gimple_seq
4822 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4823 {
4824 copy_body_data id;
4825 struct walk_stmt_info wi;
4826 struct pointer_set_t *visited;
4827 gimple_seq copy;
4828
4829 /* There's nothing to do for NULL_TREE. */
4830 if (seq == NULL)
4831 return seq;
4832
4833 /* Set up ID. */
4834 memset (&id, 0, sizeof (id));
4835 id.src_fn = current_function_decl;
4836 id.dst_fn = current_function_decl;
4837 id.decl_map = pointer_map_create ();
4838 id.debug_map = NULL;
4839
4840 id.copy_decl = copy_decl_no_change;
4841 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4842 id.transform_new_cfg = false;
4843 id.transform_return_to_modify = false;
4844 id.transform_parameter = false;
4845 id.transform_lang_insert_block = NULL;
4846
4847 /* Walk the tree once to find local labels. */
4848 memset (&wi, 0, sizeof (wi));
4849 visited = pointer_set_create ();
4850 wi.info = &id;
4851 wi.pset = visited;
4852 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4853 pointer_set_destroy (visited);
4854
4855 copy = gimple_seq_copy (seq);
4856
4857 /* Walk the copy, remapping decls. */
4858 memset (&wi, 0, sizeof (wi));
4859 wi.info = &id;
4860 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4861
4862 /* Clean up. */
4863 pointer_map_destroy (id.decl_map);
4864 if (id.debug_map)
4865 pointer_map_destroy (id.debug_map);
4866
4867 return copy;
4868 }
4869
4870
4871 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4872
4873 static tree
4874 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4875 {
4876 if (*tp == data)
4877 return (tree) data;
4878 else
4879 return NULL;
4880 }
4881
4882 DEBUG_FUNCTION bool
4883 debug_find_tree (tree top, tree search)
4884 {
4885 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4886 }
4887
4888
4889 /* Declare the variables created by the inliner. Add all the variables in
4890 VARS to BIND_EXPR. */
4891
4892 static void
4893 declare_inline_vars (tree block, tree vars)
4894 {
4895 tree t;
4896 for (t = vars; t; t = DECL_CHAIN (t))
4897 {
4898 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4899 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4900 add_local_decl (cfun, t);
4901 }
4902
4903 if (block)
4904 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4905 }
4906
4907 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4908 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4909 VAR_DECL translation. */
4910
4911 static tree
4912 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4913 {
4914 /* Don't generate debug information for the copy if we wouldn't have
4915 generated it for the copy either. */
4916 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4917 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4918
4919 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4920 declaration inspired this copy. */
4921 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4922
4923 /* The new variable/label has no RTL, yet. */
4924 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4925 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4926 SET_DECL_RTL (copy, 0);
4927
4928 /* These args would always appear unused, if not for this. */
4929 TREE_USED (copy) = 1;
4930
4931 /* Set the context for the new declaration. */
4932 if (!DECL_CONTEXT (decl))
4933 /* Globals stay global. */
4934 ;
4935 else if (DECL_CONTEXT (decl) != id->src_fn)
4936 /* Things that weren't in the scope of the function we're inlining
4937 from aren't in the scope we're inlining to, either. */
4938 ;
4939 else if (TREE_STATIC (decl))
4940 /* Function-scoped static variables should stay in the original
4941 function. */
4942 ;
4943 else
4944 /* Ordinary automatic local variables are now in the scope of the
4945 new function. */
4946 DECL_CONTEXT (copy) = id->dst_fn;
4947
4948 return copy;
4949 }
4950
4951 static tree
4952 copy_decl_to_var (tree decl, copy_body_data *id)
4953 {
4954 tree copy, type;
4955
4956 gcc_assert (TREE_CODE (decl) == PARM_DECL
4957 || TREE_CODE (decl) == RESULT_DECL);
4958
4959 type = TREE_TYPE (decl);
4960
4961 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4962 VAR_DECL, DECL_NAME (decl), type);
4963 if (DECL_PT_UID_SET_P (decl))
4964 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4965 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4966 TREE_READONLY (copy) = TREE_READONLY (decl);
4967 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4968 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4969
4970 return copy_decl_for_dup_finish (id, decl, copy);
4971 }
4972
4973 /* Like copy_decl_to_var, but create a return slot object instead of a
4974 pointer variable for return by invisible reference. */
4975
4976 static tree
4977 copy_result_decl_to_var (tree decl, copy_body_data *id)
4978 {
4979 tree copy, type;
4980
4981 gcc_assert (TREE_CODE (decl) == PARM_DECL
4982 || TREE_CODE (decl) == RESULT_DECL);
4983
4984 type = TREE_TYPE (decl);
4985 if (DECL_BY_REFERENCE (decl))
4986 type = TREE_TYPE (type);
4987
4988 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4989 VAR_DECL, DECL_NAME (decl), type);
4990 if (DECL_PT_UID_SET_P (decl))
4991 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4992 TREE_READONLY (copy) = TREE_READONLY (decl);
4993 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4994 if (!DECL_BY_REFERENCE (decl))
4995 {
4996 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4997 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4998 }
4999
5000 return copy_decl_for_dup_finish (id, decl, copy);
5001 }
5002
5003 tree
5004 copy_decl_no_change (tree decl, copy_body_data *id)
5005 {
5006 tree copy;
5007
5008 copy = copy_node (decl);
5009
5010 /* The COPY is not abstract; it will be generated in DST_FN. */
5011 DECL_ABSTRACT (copy) = 0;
5012 lang_hooks.dup_lang_specific_decl (copy);
5013
5014 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5015 been taken; it's for internal bookkeeping in expand_goto_internal. */
5016 if (TREE_CODE (copy) == LABEL_DECL)
5017 {
5018 TREE_ADDRESSABLE (copy) = 0;
5019 LABEL_DECL_UID (copy) = -1;
5020 }
5021
5022 return copy_decl_for_dup_finish (id, decl, copy);
5023 }
5024
5025 static tree
5026 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5027 {
5028 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5029 return copy_decl_to_var (decl, id);
5030 else
5031 return copy_decl_no_change (decl, id);
5032 }
5033
5034 /* Return a copy of the function's argument tree. */
5035 static tree
5036 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5037 bitmap args_to_skip, tree *vars)
5038 {
5039 tree arg, *parg;
5040 tree new_parm = NULL;
5041 int i = 0;
5042
5043 parg = &new_parm;
5044
5045 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5046 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5047 {
5048 tree new_tree = remap_decl (arg, id);
5049 if (TREE_CODE (new_tree) != PARM_DECL)
5050 new_tree = id->copy_decl (arg, id);
5051 lang_hooks.dup_lang_specific_decl (new_tree);
5052 *parg = new_tree;
5053 parg = &DECL_CHAIN (new_tree);
5054 }
5055 else if (!pointer_map_contains (id->decl_map, arg))
5056 {
5057 /* Make an equivalent VAR_DECL. If the argument was used
5058 as temporary variable later in function, the uses will be
5059 replaced by local variable. */
5060 tree var = copy_decl_to_var (arg, id);
5061 insert_decl_map (id, arg, var);
5062 /* Declare this new variable. */
5063 DECL_CHAIN (var) = *vars;
5064 *vars = var;
5065 }
5066 return new_parm;
5067 }
5068
5069 /* Return a copy of the function's static chain. */
5070 static tree
5071 copy_static_chain (tree static_chain, copy_body_data * id)
5072 {
5073 tree *chain_copy, *pvar;
5074
5075 chain_copy = &static_chain;
5076 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5077 {
5078 tree new_tree = remap_decl (*pvar, id);
5079 lang_hooks.dup_lang_specific_decl (new_tree);
5080 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5081 *pvar = new_tree;
5082 }
5083 return static_chain;
5084 }
5085
5086 /* Return true if the function is allowed to be versioned.
5087 This is a guard for the versioning functionality. */
5088
5089 bool
5090 tree_versionable_function_p (tree fndecl)
5091 {
5092 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5093 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5094 }
5095
5096 /* Delete all unreachable basic blocks and update callgraph.
5097 Doing so is somewhat nontrivial because we need to update all clones and
5098 remove inline function that become unreachable. */
5099
5100 static bool
5101 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5102 {
5103 bool changed = false;
5104 basic_block b, next_bb;
5105
5106 find_unreachable_blocks ();
5107
5108 /* Delete all unreachable basic blocks. */
5109
5110 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
5111 {
5112 next_bb = b->next_bb;
5113
5114 if (!(b->flags & BB_REACHABLE))
5115 {
5116 gimple_stmt_iterator bsi;
5117
5118 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5119 {
5120 struct cgraph_edge *e;
5121 struct cgraph_node *node;
5122
5123 ipa_remove_stmt_references (id->dst_node, gsi_stmt (bsi));
5124
5125 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5126 &&(e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
5127 {
5128 if (!e->inline_failed)
5129 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5130 else
5131 cgraph_remove_edge (e);
5132 }
5133 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5134 && id->dst_node->clones)
5135 for (node = id->dst_node->clones; node != id->dst_node;)
5136 {
5137 ipa_remove_stmt_references (node, gsi_stmt (bsi));
5138 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5139 && (e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
5140 {
5141 if (!e->inline_failed)
5142 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
5143 else
5144 cgraph_remove_edge (e);
5145 }
5146
5147 if (node->clones)
5148 node = node->clones;
5149 else if (node->next_sibling_clone)
5150 node = node->next_sibling_clone;
5151 else
5152 {
5153 while (node != id->dst_node && !node->next_sibling_clone)
5154 node = node->clone_of;
5155 if (node != id->dst_node)
5156 node = node->next_sibling_clone;
5157 }
5158 }
5159 }
5160 delete_basic_block (b);
5161 changed = true;
5162 }
5163 }
5164
5165 return changed;
5166 }
5167
5168 /* Update clone info after duplication. */
5169
5170 static void
5171 update_clone_info (copy_body_data * id)
5172 {
5173 struct cgraph_node *node;
5174 if (!id->dst_node->clones)
5175 return;
5176 for (node = id->dst_node->clones; node != id->dst_node;)
5177 {
5178 /* First update replace maps to match the new body. */
5179 if (node->clone.tree_map)
5180 {
5181 unsigned int i;
5182 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5183 {
5184 struct ipa_replace_map *replace_info;
5185 replace_info = (*node->clone.tree_map)[i];
5186 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5187 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5188 }
5189 }
5190 if (node->clones)
5191 node = node->clones;
5192 else if (node->next_sibling_clone)
5193 node = node->next_sibling_clone;
5194 else
5195 {
5196 while (node != id->dst_node && !node->next_sibling_clone)
5197 node = node->clone_of;
5198 if (node != id->dst_node)
5199 node = node->next_sibling_clone;
5200 }
5201 }
5202 }
5203
5204 /* Create a copy of a function's tree.
5205 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5206 of the original function and the new copied function
5207 respectively. In case we want to replace a DECL
5208 tree with another tree while duplicating the function's
5209 body, TREE_MAP represents the mapping between these
5210 trees. If UPDATE_CLONES is set, the call_stmt fields
5211 of edges of clones of the function will be updated.
5212
5213 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5214 from new version.
5215 If SKIP_RETURN is true, the new version will return void.
5216 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5217 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5218 */
5219 void
5220 tree_function_versioning (tree old_decl, tree new_decl,
5221 vec<ipa_replace_map_p, va_gc> *tree_map,
5222 bool update_clones, bitmap args_to_skip,
5223 bool skip_return, bitmap blocks_to_copy,
5224 basic_block new_entry)
5225 {
5226 struct cgraph_node *old_version_node;
5227 struct cgraph_node *new_version_node;
5228 copy_body_data id;
5229 tree p;
5230 unsigned i;
5231 struct ipa_replace_map *replace_info;
5232 basic_block old_entry_block, bb;
5233 stack_vec<gimple, 10> init_stmts;
5234 tree vars = NULL_TREE;
5235
5236 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5237 && TREE_CODE (new_decl) == FUNCTION_DECL);
5238 DECL_POSSIBLY_INLINED (old_decl) = 1;
5239
5240 old_version_node = cgraph_get_node (old_decl);
5241 gcc_checking_assert (old_version_node);
5242 new_version_node = cgraph_get_node (new_decl);
5243 gcc_checking_assert (new_version_node);
5244
5245 /* Copy over debug args. */
5246 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5247 {
5248 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5249 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5250 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5251 old_debug_args = decl_debug_args_lookup (old_decl);
5252 if (old_debug_args)
5253 {
5254 new_debug_args = decl_debug_args_insert (new_decl);
5255 *new_debug_args = vec_safe_copy (*old_debug_args);
5256 }
5257 }
5258
5259 /* Output the inlining info for this abstract function, since it has been
5260 inlined. If we don't do this now, we can lose the information about the
5261 variables in the function when the blocks get blown away as soon as we
5262 remove the cgraph node. */
5263 (*debug_hooks->outlining_inline_function) (old_decl);
5264
5265 DECL_ARTIFICIAL (new_decl) = 1;
5266 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5267 if (DECL_ORIGIN (old_decl) == old_decl)
5268 old_version_node->used_as_abstract_origin = true;
5269 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5270
5271 /* Prepare the data structures for the tree copy. */
5272 memset (&id, 0, sizeof (id));
5273
5274 /* Generate a new name for the new version. */
5275 id.statements_to_fold = pointer_set_create ();
5276
5277 id.decl_map = pointer_map_create ();
5278 id.debug_map = NULL;
5279 id.src_fn = old_decl;
5280 id.dst_fn = new_decl;
5281 id.src_node = old_version_node;
5282 id.dst_node = new_version_node;
5283 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5284 id.blocks_to_copy = blocks_to_copy;
5285 if (id.src_node->ipa_transforms_to_apply.exists ())
5286 {
5287 vec<ipa_opt_pass> old_transforms_to_apply
5288 = id.dst_node->ipa_transforms_to_apply;
5289 unsigned int i;
5290
5291 id.dst_node->ipa_transforms_to_apply
5292 = id.src_node->ipa_transforms_to_apply.copy ();
5293 for (i = 0; i < old_transforms_to_apply.length (); i++)
5294 id.dst_node->ipa_transforms_to_apply.safe_push (old_transforms_to_apply[i]);
5295 old_transforms_to_apply.release ();
5296 }
5297
5298 id.copy_decl = copy_decl_no_change;
5299 id.transform_call_graph_edges
5300 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5301 id.transform_new_cfg = true;
5302 id.transform_return_to_modify = false;
5303 id.transform_parameter = false;
5304 id.transform_lang_insert_block = NULL;
5305
5306 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5307 (DECL_STRUCT_FUNCTION (old_decl));
5308 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5309 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5310 initialize_cfun (new_decl, old_decl,
5311 old_entry_block->count);
5312 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5313 = id.src_cfun->gimple_df->ipa_pta;
5314
5315 /* Copy the function's static chain. */
5316 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5317 if (p)
5318 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5319 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5320 &id);
5321
5322 /* If there's a tree_map, prepare for substitution. */
5323 if (tree_map)
5324 for (i = 0; i < tree_map->length (); i++)
5325 {
5326 gimple init;
5327 replace_info = (*tree_map)[i];
5328 if (replace_info->replace_p)
5329 {
5330 if (!replace_info->old_tree)
5331 {
5332 int i = replace_info->parm_num;
5333 tree parm;
5334 tree req_type;
5335
5336 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5337 i --;
5338 replace_info->old_tree = parm;
5339 req_type = TREE_TYPE (parm);
5340 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5341 {
5342 if (fold_convertible_p (req_type, replace_info->new_tree))
5343 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5344 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5345 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5346 else
5347 {
5348 if (dump_file)
5349 {
5350 fprintf (dump_file, " const ");
5351 print_generic_expr (dump_file, replace_info->new_tree, 0);
5352 fprintf (dump_file, " can't be converted to param ");
5353 print_generic_expr (dump_file, parm, 0);
5354 fprintf (dump_file, "\n");
5355 }
5356 replace_info->old_tree = NULL;
5357 }
5358 }
5359 }
5360 else
5361 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5362 if (replace_info->old_tree)
5363 {
5364 init = setup_one_parameter (&id, replace_info->old_tree,
5365 replace_info->new_tree, id.src_fn,
5366 NULL,
5367 &vars);
5368 if (init)
5369 init_stmts.safe_push (init);
5370 }
5371 }
5372 }
5373 /* Copy the function's arguments. */
5374 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5375 DECL_ARGUMENTS (new_decl) =
5376 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5377 args_to_skip, &vars);
5378
5379 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5380 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5381
5382 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5383
5384 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5385 /* Add local vars. */
5386 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5387
5388 if (DECL_RESULT (old_decl) == NULL_TREE)
5389 ;
5390 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5391 {
5392 DECL_RESULT (new_decl)
5393 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5394 RESULT_DECL, NULL_TREE, void_type_node);
5395 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5396 cfun->returns_struct = 0;
5397 cfun->returns_pcc_struct = 0;
5398 }
5399 else
5400 {
5401 tree old_name;
5402 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5403 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5404 if (gimple_in_ssa_p (id.src_cfun)
5405 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5406 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5407 {
5408 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5409 insert_decl_map (&id, old_name, new_name);
5410 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5411 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5412 }
5413 }
5414
5415 /* Set up the destination functions loop tree. */
5416 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5417 {
5418 cfun->curr_properties &= ~PROP_loops;
5419 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5420 cfun->curr_properties |= PROP_loops;
5421 }
5422
5423 /* Copy the Function's body. */
5424 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5425 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, new_entry);
5426
5427 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5428 number_blocks (new_decl);
5429
5430 /* We want to create the BB unconditionally, so that the addition of
5431 debug stmts doesn't affect BB count, which may in the end cause
5432 codegen differences. */
5433 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5434 while (init_stmts.length ())
5435 insert_init_stmt (&id, bb, init_stmts.pop ());
5436 update_clone_info (&id);
5437
5438 /* Remap the nonlocal_goto_save_area, if any. */
5439 if (cfun->nonlocal_goto_save_area)
5440 {
5441 struct walk_stmt_info wi;
5442
5443 memset (&wi, 0, sizeof (wi));
5444 wi.info = &id;
5445 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5446 }
5447
5448 /* Clean up. */
5449 pointer_map_destroy (id.decl_map);
5450 if (id.debug_map)
5451 pointer_map_destroy (id.debug_map);
5452 free_dominance_info (CDI_DOMINATORS);
5453 free_dominance_info (CDI_POST_DOMINATORS);
5454
5455 fold_marked_statements (0, id.statements_to_fold);
5456 pointer_set_destroy (id.statements_to_fold);
5457 fold_cond_expr_cond ();
5458 delete_unreachable_blocks_update_callgraph (&id);
5459 if (id.dst_node->definition)
5460 cgraph_rebuild_references ();
5461 update_ssa (TODO_update_ssa);
5462
5463 /* After partial cloning we need to rescale frequencies, so they are
5464 within proper range in the cloned function. */
5465 if (new_entry)
5466 {
5467 struct cgraph_edge *e;
5468 rebuild_frequencies ();
5469
5470 new_version_node->count = ENTRY_BLOCK_PTR->count;
5471 for (e = new_version_node->callees; e; e = e->next_callee)
5472 {
5473 basic_block bb = gimple_bb (e->call_stmt);
5474 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5475 bb);
5476 e->count = bb->count;
5477 }
5478 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5479 {
5480 basic_block bb = gimple_bb (e->call_stmt);
5481 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5482 bb);
5483 e->count = bb->count;
5484 }
5485 }
5486
5487 free_dominance_info (CDI_DOMINATORS);
5488 free_dominance_info (CDI_POST_DOMINATORS);
5489
5490 gcc_assert (!id.debug_stmts.exists ());
5491 pop_cfun ();
5492 return;
5493 }
5494
5495 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5496 the callee and return the inlined body on success. */
5497
5498 tree
5499 maybe_inline_call_in_expr (tree exp)
5500 {
5501 tree fn = get_callee_fndecl (exp);
5502
5503 /* We can only try to inline "const" functions. */
5504 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5505 {
5506 struct pointer_map_t *decl_map = pointer_map_create ();
5507 call_expr_arg_iterator iter;
5508 copy_body_data id;
5509 tree param, arg, t;
5510
5511 /* Remap the parameters. */
5512 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5513 param;
5514 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5515 *pointer_map_insert (decl_map, param) = arg;
5516
5517 memset (&id, 0, sizeof (id));
5518 id.src_fn = fn;
5519 id.dst_fn = current_function_decl;
5520 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5521 id.decl_map = decl_map;
5522
5523 id.copy_decl = copy_decl_no_change;
5524 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5525 id.transform_new_cfg = false;
5526 id.transform_return_to_modify = true;
5527 id.transform_parameter = true;
5528 id.transform_lang_insert_block = NULL;
5529
5530 /* Make sure not to unshare trees behind the front-end's back
5531 since front-end specific mechanisms may rely on sharing. */
5532 id.regimplify = false;
5533 id.do_not_unshare = true;
5534
5535 /* We're not inside any EH region. */
5536 id.eh_lp_nr = 0;
5537
5538 t = copy_tree_body (&id);
5539 pointer_map_destroy (decl_map);
5540
5541 /* We can only return something suitable for use in a GENERIC
5542 expression tree. */
5543 if (TREE_CODE (t) == MODIFY_EXPR)
5544 return TREE_OPERAND (t, 1);
5545 }
5546
5547 return NULL_TREE;
5548 }
5549
5550 /* Duplicate a type, fields and all. */
5551
5552 tree
5553 build_duplicate_type (tree type)
5554 {
5555 struct copy_body_data id;
5556
5557 memset (&id, 0, sizeof (id));
5558 id.src_fn = current_function_decl;
5559 id.dst_fn = current_function_decl;
5560 id.src_cfun = cfun;
5561 id.decl_map = pointer_map_create ();
5562 id.debug_map = NULL;
5563 id.copy_decl = copy_decl_no_change;
5564
5565 type = remap_type_1 (type, &id);
5566
5567 pointer_map_destroy (id.decl_map);
5568 if (id.debug_map)
5569 pointer_map_destroy (id.debug_map);
5570
5571 TYPE_CANONICAL (type) = type;
5572
5573 return type;
5574 }