]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-inline.c
tree-flow.h (set_default_def): Rename to ...
[thirdparty/gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
3 2012 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "diagnostic-core.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "flags.h"
30 #include "params.h"
31 #include "input.h"
32 #include "insn-config.h"
33 #include "hashtab.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-iterator.h"
37 #include "cgraph.h"
38 #include "intl.h"
39 #include "tree-mudflap.h"
40 #include "tree-flow.h"
41 #include "function.h"
42 #include "tree-flow.h"
43 #include "tree-pretty-print.h"
44 #include "except.h"
45 #include "debug.h"
46 #include "pointer-set.h"
47 #include "ipa-prop.h"
48 #include "value-prof.h"
49 #include "tree-pass.h"
50 #include "target.h"
51
52 #include "rtl.h" /* FIXME: For asm_str_count. */
53
54 /* I'm not real happy about this, but we need to handle gimple and
55 non-gimple trees. */
56 #include "gimple.h"
57
58 /* Inlining, Cloning, Versioning, Parallelization
59
60 Inlining: a function body is duplicated, but the PARM_DECLs are
61 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
62 MODIFY_EXPRs that store to a dedicated returned-value variable.
63 The duplicated eh_region info of the copy will later be appended
64 to the info for the caller; the eh_region info in copied throwing
65 statements and RESX statements are adjusted accordingly.
66
67 Cloning: (only in C++) We have one body for a con/de/structor, and
68 multiple function decls, each with a unique parameter list.
69 Duplicate the body, using the given splay tree; some parameters
70 will become constants (like 0 or 1).
71
72 Versioning: a function body is duplicated and the result is a new
73 function rather than into blocks of an existing function as with
74 inlining. Some parameters will become constants.
75
76 Parallelization: a region of a function is duplicated resulting in
77 a new function. Variables may be replaced with complex expressions
78 to enable shared variable semantics.
79
80 All of these will simultaneously lookup any callgraph edges. If
81 we're going to inline the duplicated function body, and the given
82 function has some cloned callgraph nodes (one for each place this
83 function will be inlined) those callgraph edges will be duplicated.
84 If we're cloning the body, those callgraph edges will be
85 updated to point into the new body. (Note that the original
86 callgraph node and edge list will not be altered.)
87
88 See the CALL_EXPR handling case in copy_tree_body_r (). */
89
90 /* To Do:
91
92 o In order to make inlining-on-trees work, we pessimized
93 function-local static constants. In particular, they are now
94 always output, even when not addressed. Fix this by treating
95 function-local static constants just like global static
96 constants; the back-end already knows not to output them if they
97 are not needed.
98
99 o Provide heuristics to clamp inlining of recursive template
100 calls? */
101
102
103 /* Weights that estimate_num_insns uses to estimate the size of the
104 produced code. */
105
106 eni_weights eni_size_weights;
107
108 /* Weights that estimate_num_insns uses to estimate the time necessary
109 to execute the produced code. */
110
111 eni_weights eni_time_weights;
112
113 /* Prototypes. */
114
115 static tree declare_return_variable (copy_body_data *, tree, tree, basic_block);
116 static void remap_block (tree *, copy_body_data *);
117 static void copy_bind_expr (tree *, int *, copy_body_data *);
118 static tree mark_local_for_remap_r (tree *, int *, void *);
119 static void unsave_expr_1 (tree);
120 static tree unsave_r (tree *, int *, void *);
121 static void declare_inline_vars (tree, tree);
122 static void remap_save_expr (tree *, void *, int *);
123 static void prepend_lexical_block (tree current_block, tree new_block);
124 static tree copy_decl_to_var (tree, copy_body_data *);
125 static tree copy_result_decl_to_var (tree, copy_body_data *);
126 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
127 static gimple remap_gimple_stmt (gimple, copy_body_data *);
128 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
129
130 /* Insert a tree->tree mapping for ID. Despite the name suggests
131 that the trees should be variables, it is used for more than that. */
132
133 void
134 insert_decl_map (copy_body_data *id, tree key, tree value)
135 {
136 *pointer_map_insert (id->decl_map, key) = value;
137
138 /* Always insert an identity map as well. If we see this same new
139 node again, we won't want to duplicate it a second time. */
140 if (key != value)
141 *pointer_map_insert (id->decl_map, value) = value;
142 }
143
144 /* Insert a tree->tree mapping for ID. This is only used for
145 variables. */
146
147 static void
148 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
149 {
150 if (!gimple_in_ssa_p (id->src_cfun))
151 return;
152
153 if (!MAY_HAVE_DEBUG_STMTS)
154 return;
155
156 if (!target_for_debug_bind (key))
157 return;
158
159 gcc_assert (TREE_CODE (key) == PARM_DECL);
160 gcc_assert (TREE_CODE (value) == VAR_DECL);
161
162 if (!id->debug_map)
163 id->debug_map = pointer_map_create ();
164
165 *pointer_map_insert (id->debug_map, key) = value;
166 }
167
168 /* If nonzero, we're remapping the contents of inlined debug
169 statements. If negative, an error has occurred, such as a
170 reference to a variable that isn't available in the inlined
171 context. */
172 static int processing_debug_stmt = 0;
173
174 /* Construct new SSA name for old NAME. ID is the inline context. */
175
176 static tree
177 remap_ssa_name (tree name, copy_body_data *id)
178 {
179 tree new_tree;
180 tree *n;
181
182 gcc_assert (TREE_CODE (name) == SSA_NAME);
183
184 n = (tree *) pointer_map_contains (id->decl_map, name);
185 if (n)
186 return unshare_expr (*n);
187
188 if (processing_debug_stmt)
189 {
190 if (TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
191 && SSA_NAME_IS_DEFAULT_DEF (name)
192 && id->entry_bb == NULL
193 && single_succ_p (ENTRY_BLOCK_PTR))
194 {
195 tree vexpr = make_node (DEBUG_EXPR_DECL);
196 gimple def_temp;
197 gimple_stmt_iterator gsi;
198 tree val = SSA_NAME_VAR (name);
199
200 n = (tree *) pointer_map_contains (id->decl_map, val);
201 if (n != NULL)
202 val = *n;
203 if (TREE_CODE (val) != PARM_DECL)
204 {
205 processing_debug_stmt = -1;
206 return name;
207 }
208 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
209 DECL_ARTIFICIAL (vexpr) = 1;
210 TREE_TYPE (vexpr) = TREE_TYPE (name);
211 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
212 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
213 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
214 return vexpr;
215 }
216
217 processing_debug_stmt = -1;
218 return name;
219 }
220
221 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
222 in copy_bb. */
223 new_tree = remap_decl (SSA_NAME_VAR (name), id);
224
225 /* We might've substituted constant or another SSA_NAME for
226 the variable.
227
228 Replace the SSA name representing RESULT_DECL by variable during
229 inlining: this saves us from need to introduce PHI node in a case
230 return value is just partly initialized. */
231 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
232 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
233 || !id->transform_return_to_modify))
234 {
235 struct ptr_info_def *pi;
236 new_tree = make_ssa_name (new_tree, NULL);
237 insert_decl_map (id, name, new_tree);
238 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
239 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
240 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
241 /* At least IPA points-to info can be directly transferred. */
242 if (id->src_cfun->gimple_df
243 && id->src_cfun->gimple_df->ipa_pta
244 && (pi = SSA_NAME_PTR_INFO (name))
245 && !pi->pt.anything)
246 {
247 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
248 new_pi->pt = pi->pt;
249 }
250 if (SSA_NAME_IS_DEFAULT_DEF (name))
251 {
252 /* By inlining function having uninitialized variable, we might
253 extend the lifetime (variable might get reused). This cause
254 ICE in the case we end up extending lifetime of SSA name across
255 abnormal edge, but also increase register pressure.
256
257 We simply initialize all uninitialized vars by 0 except
258 for case we are inlining to very first BB. We can avoid
259 this for all BBs that are not inside strongly connected
260 regions of the CFG, but this is expensive to test. */
261 if (id->entry_bb
262 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
263 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
264 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
265 || EDGE_COUNT (id->entry_bb->preds) != 1))
266 {
267 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
268 gimple init_stmt;
269 tree zero = build_zero_cst (TREE_TYPE (new_tree));
270
271 init_stmt = gimple_build_assign (new_tree, zero);
272 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
273 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
274 }
275 else
276 {
277 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
278 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
279 }
280 }
281 }
282 else
283 insert_decl_map (id, name, new_tree);
284 return new_tree;
285 }
286
287 /* Remap DECL during the copying of the BLOCK tree for the function. */
288
289 tree
290 remap_decl (tree decl, copy_body_data *id)
291 {
292 tree *n;
293
294 /* We only remap local variables in the current function. */
295
296 /* See if we have remapped this declaration. */
297
298 n = (tree *) pointer_map_contains (id->decl_map, decl);
299
300 if (!n && processing_debug_stmt)
301 {
302 processing_debug_stmt = -1;
303 return decl;
304 }
305
306 /* If we didn't already have an equivalent for this declaration,
307 create one now. */
308 if (!n)
309 {
310 /* Make a copy of the variable or label. */
311 tree t = id->copy_decl (decl, id);
312
313 /* Remember it, so that if we encounter this local entity again
314 we can reuse this copy. Do this early because remap_type may
315 need this decl for TYPE_STUB_DECL. */
316 insert_decl_map (id, decl, t);
317
318 if (!DECL_P (t))
319 return t;
320
321 /* Remap types, if necessary. */
322 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
323 if (TREE_CODE (t) == TYPE_DECL)
324 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
325
326 /* Remap sizes as necessary. */
327 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
328 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
329
330 /* If fields, do likewise for offset and qualifier. */
331 if (TREE_CODE (t) == FIELD_DECL)
332 {
333 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
334 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
335 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
336 }
337
338 return t;
339 }
340
341 if (id->do_not_unshare)
342 return *n;
343 else
344 return unshare_expr (*n);
345 }
346
347 static tree
348 remap_type_1 (tree type, copy_body_data *id)
349 {
350 tree new_tree, t;
351
352 /* We do need a copy. build and register it now. If this is a pointer or
353 reference type, remap the designated type and make a new pointer or
354 reference type. */
355 if (TREE_CODE (type) == POINTER_TYPE)
356 {
357 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
358 TYPE_MODE (type),
359 TYPE_REF_CAN_ALIAS_ALL (type));
360 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
361 new_tree = build_type_attribute_qual_variant (new_tree,
362 TYPE_ATTRIBUTES (type),
363 TYPE_QUALS (type));
364 insert_decl_map (id, type, new_tree);
365 return new_tree;
366 }
367 else if (TREE_CODE (type) == REFERENCE_TYPE)
368 {
369 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
370 TYPE_MODE (type),
371 TYPE_REF_CAN_ALIAS_ALL (type));
372 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
373 new_tree = build_type_attribute_qual_variant (new_tree,
374 TYPE_ATTRIBUTES (type),
375 TYPE_QUALS (type));
376 insert_decl_map (id, type, new_tree);
377 return new_tree;
378 }
379 else
380 new_tree = copy_node (type);
381
382 insert_decl_map (id, type, new_tree);
383
384 /* This is a new type, not a copy of an old type. Need to reassociate
385 variants. We can handle everything except the main variant lazily. */
386 t = TYPE_MAIN_VARIANT (type);
387 if (type != t)
388 {
389 t = remap_type (t, id);
390 TYPE_MAIN_VARIANT (new_tree) = t;
391 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
392 TYPE_NEXT_VARIANT (t) = new_tree;
393 }
394 else
395 {
396 TYPE_MAIN_VARIANT (new_tree) = new_tree;
397 TYPE_NEXT_VARIANT (new_tree) = NULL;
398 }
399
400 if (TYPE_STUB_DECL (type))
401 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
402
403 /* Lazily create pointer and reference types. */
404 TYPE_POINTER_TO (new_tree) = NULL;
405 TYPE_REFERENCE_TO (new_tree) = NULL;
406
407 switch (TREE_CODE (new_tree))
408 {
409 case INTEGER_TYPE:
410 case REAL_TYPE:
411 case FIXED_POINT_TYPE:
412 case ENUMERAL_TYPE:
413 case BOOLEAN_TYPE:
414 t = TYPE_MIN_VALUE (new_tree);
415 if (t && TREE_CODE (t) != INTEGER_CST)
416 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
417
418 t = TYPE_MAX_VALUE (new_tree);
419 if (t && TREE_CODE (t) != INTEGER_CST)
420 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
421 return new_tree;
422
423 case FUNCTION_TYPE:
424 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
425 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
426 return new_tree;
427
428 case ARRAY_TYPE:
429 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
430 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
431 break;
432
433 case RECORD_TYPE:
434 case UNION_TYPE:
435 case QUAL_UNION_TYPE:
436 {
437 tree f, nf = NULL;
438
439 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
440 {
441 t = remap_decl (f, id);
442 DECL_CONTEXT (t) = new_tree;
443 DECL_CHAIN (t) = nf;
444 nf = t;
445 }
446 TYPE_FIELDS (new_tree) = nreverse (nf);
447 }
448 break;
449
450 case OFFSET_TYPE:
451 default:
452 /* Shouldn't have been thought variable sized. */
453 gcc_unreachable ();
454 }
455
456 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
457 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
458
459 return new_tree;
460 }
461
462 tree
463 remap_type (tree type, copy_body_data *id)
464 {
465 tree *node;
466 tree tmp;
467
468 if (type == NULL)
469 return type;
470
471 /* See if we have remapped this type. */
472 node = (tree *) pointer_map_contains (id->decl_map, type);
473 if (node)
474 return *node;
475
476 /* The type only needs remapping if it's variably modified. */
477 if (! variably_modified_type_p (type, id->src_fn))
478 {
479 insert_decl_map (id, type, type);
480 return type;
481 }
482
483 id->remapping_type_depth++;
484 tmp = remap_type_1 (type, id);
485 id->remapping_type_depth--;
486
487 return tmp;
488 }
489
490 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
491
492 static bool
493 can_be_nonlocal (tree decl, copy_body_data *id)
494 {
495 /* We can not duplicate function decls. */
496 if (TREE_CODE (decl) == FUNCTION_DECL)
497 return true;
498
499 /* Local static vars must be non-local or we get multiple declaration
500 problems. */
501 if (TREE_CODE (decl) == VAR_DECL
502 && !auto_var_in_fn_p (decl, id->src_fn))
503 return true;
504
505 return false;
506 }
507
508 static tree
509 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
510 {
511 tree old_var;
512 tree new_decls = NULL_TREE;
513
514 /* Remap its variables. */
515 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
516 {
517 tree new_var;
518
519 if (can_be_nonlocal (old_var, id))
520 {
521 /* We need to add this variable to the local decls as otherwise
522 nothing else will do so. */
523 if (TREE_CODE (old_var) == VAR_DECL
524 && ! DECL_EXTERNAL (old_var))
525 add_local_decl (cfun, old_var);
526 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
527 && !DECL_IGNORED_P (old_var)
528 && nonlocalized_list)
529 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
530 continue;
531 }
532
533 /* Remap the variable. */
534 new_var = remap_decl (old_var, id);
535
536 /* If we didn't remap this variable, we can't mess with its
537 TREE_CHAIN. If we remapped this variable to the return slot, it's
538 already declared somewhere else, so don't declare it here. */
539
540 if (new_var == id->retvar)
541 ;
542 else if (!new_var)
543 {
544 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
545 && !DECL_IGNORED_P (old_var)
546 && nonlocalized_list)
547 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
548 }
549 else
550 {
551 gcc_assert (DECL_P (new_var));
552 DECL_CHAIN (new_var) = new_decls;
553 new_decls = new_var;
554
555 /* Also copy value-expressions. */
556 if (TREE_CODE (new_var) == VAR_DECL
557 && DECL_HAS_VALUE_EXPR_P (new_var))
558 {
559 tree tem = DECL_VALUE_EXPR (new_var);
560 bool old_regimplify = id->regimplify;
561 id->remapping_type_depth++;
562 walk_tree (&tem, copy_tree_body_r, id, NULL);
563 id->remapping_type_depth--;
564 id->regimplify = old_regimplify;
565 SET_DECL_VALUE_EXPR (new_var, tem);
566 }
567 }
568 }
569
570 return nreverse (new_decls);
571 }
572
573 /* Copy the BLOCK to contain remapped versions of the variables
574 therein. And hook the new block into the block-tree. */
575
576 static void
577 remap_block (tree *block, copy_body_data *id)
578 {
579 tree old_block;
580 tree new_block;
581
582 /* Make the new block. */
583 old_block = *block;
584 new_block = make_node (BLOCK);
585 TREE_USED (new_block) = TREE_USED (old_block);
586 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
587 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
588 BLOCK_NONLOCALIZED_VARS (new_block)
589 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
590 *block = new_block;
591
592 /* Remap its variables. */
593 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
594 &BLOCK_NONLOCALIZED_VARS (new_block),
595 id);
596
597 if (id->transform_lang_insert_block)
598 id->transform_lang_insert_block (new_block);
599
600 /* Remember the remapped block. */
601 insert_decl_map (id, old_block, new_block);
602 }
603
604 /* Copy the whole block tree and root it in id->block. */
605 static tree
606 remap_blocks (tree block, copy_body_data *id)
607 {
608 tree t;
609 tree new_tree = block;
610
611 if (!block)
612 return NULL;
613
614 remap_block (&new_tree, id);
615 gcc_assert (new_tree != block);
616 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
617 prepend_lexical_block (new_tree, remap_blocks (t, id));
618 /* Blocks are in arbitrary order, but make things slightly prettier and do
619 not swap order when producing a copy. */
620 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
621 return new_tree;
622 }
623
624 static void
625 copy_statement_list (tree *tp)
626 {
627 tree_stmt_iterator oi, ni;
628 tree new_tree;
629
630 new_tree = alloc_stmt_list ();
631 ni = tsi_start (new_tree);
632 oi = tsi_start (*tp);
633 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
634 *tp = new_tree;
635
636 for (; !tsi_end_p (oi); tsi_next (&oi))
637 {
638 tree stmt = tsi_stmt (oi);
639 if (TREE_CODE (stmt) == STATEMENT_LIST)
640 /* This copy is not redundant; tsi_link_after will smash this
641 STATEMENT_LIST into the end of the one we're building, and we
642 don't want to do that with the original. */
643 copy_statement_list (&stmt);
644 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
645 }
646 }
647
648 static void
649 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
650 {
651 tree block = BIND_EXPR_BLOCK (*tp);
652 /* Copy (and replace) the statement. */
653 copy_tree_r (tp, walk_subtrees, NULL);
654 if (block)
655 {
656 remap_block (&block, id);
657 BIND_EXPR_BLOCK (*tp) = block;
658 }
659
660 if (BIND_EXPR_VARS (*tp))
661 /* This will remap a lot of the same decls again, but this should be
662 harmless. */
663 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
664 }
665
666
667 /* Create a new gimple_seq by remapping all the statements in BODY
668 using the inlining information in ID. */
669
670 static gimple_seq
671 remap_gimple_seq (gimple_seq body, copy_body_data *id)
672 {
673 gimple_stmt_iterator si;
674 gimple_seq new_body = NULL;
675
676 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
677 {
678 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
679 gimple_seq_add_stmt (&new_body, new_stmt);
680 }
681
682 return new_body;
683 }
684
685
686 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
687 block using the mapping information in ID. */
688
689 static gimple
690 copy_gimple_bind (gimple stmt, copy_body_data *id)
691 {
692 gimple new_bind;
693 tree new_block, new_vars;
694 gimple_seq body, new_body;
695
696 /* Copy the statement. Note that we purposely don't use copy_stmt
697 here because we need to remap statements as we copy. */
698 body = gimple_bind_body (stmt);
699 new_body = remap_gimple_seq (body, id);
700
701 new_block = gimple_bind_block (stmt);
702 if (new_block)
703 remap_block (&new_block, id);
704
705 /* This will remap a lot of the same decls again, but this should be
706 harmless. */
707 new_vars = gimple_bind_vars (stmt);
708 if (new_vars)
709 new_vars = remap_decls (new_vars, NULL, id);
710
711 new_bind = gimple_build_bind (new_vars, new_body, new_block);
712
713 return new_bind;
714 }
715
716
717 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
718 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
719 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
720 recursing into the children nodes of *TP. */
721
722 static tree
723 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
724 {
725 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
726 copy_body_data *id = (copy_body_data *) wi_p->info;
727 tree fn = id->src_fn;
728
729 if (TREE_CODE (*tp) == SSA_NAME)
730 {
731 *tp = remap_ssa_name (*tp, id);
732 *walk_subtrees = 0;
733 return NULL;
734 }
735 else if (auto_var_in_fn_p (*tp, fn))
736 {
737 /* Local variables and labels need to be replaced by equivalent
738 variables. We don't want to copy static variables; there's
739 only one of those, no matter how many times we inline the
740 containing function. Similarly for globals from an outer
741 function. */
742 tree new_decl;
743
744 /* Remap the declaration. */
745 new_decl = remap_decl (*tp, id);
746 gcc_assert (new_decl);
747 /* Replace this variable with the copy. */
748 STRIP_TYPE_NOPS (new_decl);
749 /* ??? The C++ frontend uses void * pointer zero to initialize
750 any other type. This confuses the middle-end type verification.
751 As cloned bodies do not go through gimplification again the fixup
752 there doesn't trigger. */
753 if (TREE_CODE (new_decl) == INTEGER_CST
754 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
755 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
756 *tp = new_decl;
757 *walk_subtrees = 0;
758 }
759 else if (TREE_CODE (*tp) == STATEMENT_LIST)
760 gcc_unreachable ();
761 else if (TREE_CODE (*tp) == SAVE_EXPR)
762 gcc_unreachable ();
763 else if (TREE_CODE (*tp) == LABEL_DECL
764 && (!DECL_CONTEXT (*tp)
765 || decl_function_context (*tp) == id->src_fn))
766 /* These may need to be remapped for EH handling. */
767 *tp = remap_decl (*tp, id);
768 else if (TREE_CODE (*tp) == FIELD_DECL)
769 {
770 /* If the enclosing record type is variably_modified_type_p, the field
771 has already been remapped. Otherwise, it need not be. */
772 tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
773 if (n)
774 *tp = *n;
775 *walk_subtrees = 0;
776 }
777 else if (TYPE_P (*tp))
778 /* Types may need remapping as well. */
779 *tp = remap_type (*tp, id);
780 else if (CONSTANT_CLASS_P (*tp))
781 {
782 /* If this is a constant, we have to copy the node iff the type
783 will be remapped. copy_tree_r will not copy a constant. */
784 tree new_type = remap_type (TREE_TYPE (*tp), id);
785
786 if (new_type == TREE_TYPE (*tp))
787 *walk_subtrees = 0;
788
789 else if (TREE_CODE (*tp) == INTEGER_CST)
790 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
791 TREE_INT_CST_HIGH (*tp));
792 else
793 {
794 *tp = copy_node (*tp);
795 TREE_TYPE (*tp) = new_type;
796 }
797 }
798 else
799 {
800 /* Otherwise, just copy the node. Note that copy_tree_r already
801 knows not to copy VAR_DECLs, etc., so this is safe. */
802
803 /* We should never have TREE_BLOCK set on non-statements. */
804 if (EXPR_P (*tp))
805 gcc_assert (!TREE_BLOCK (*tp));
806
807 if (TREE_CODE (*tp) == MEM_REF)
808 {
809 tree ptr = TREE_OPERAND (*tp, 0);
810 tree type = remap_type (TREE_TYPE (*tp), id);
811 tree old = *tp;
812
813 /* We need to re-canonicalize MEM_REFs from inline substitutions
814 that can happen when a pointer argument is an ADDR_EXPR.
815 Recurse here manually to allow that. */
816 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
817 *tp = fold_build2 (MEM_REF, type,
818 ptr, TREE_OPERAND (*tp, 1));
819 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
820 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
821 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
822 *walk_subtrees = 0;
823 return NULL;
824 }
825
826 /* Here is the "usual case". Copy this tree node, and then
827 tweak some special cases. */
828 copy_tree_r (tp, walk_subtrees, NULL);
829
830 if (TREE_CODE (*tp) != OMP_CLAUSE)
831 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
832
833 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
834 {
835 /* The copied TARGET_EXPR has never been expanded, even if the
836 original node was expanded already. */
837 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
838 TREE_OPERAND (*tp, 3) = NULL_TREE;
839 }
840 else if (TREE_CODE (*tp) == ADDR_EXPR)
841 {
842 /* Variable substitution need not be simple. In particular,
843 the MEM_REF substitution above. Make sure that
844 TREE_CONSTANT and friends are up-to-date. But make sure
845 to not improperly set TREE_BLOCK on some sub-expressions. */
846 int invariant = is_gimple_min_invariant (*tp);
847 tree block = id->block;
848 id->block = NULL_TREE;
849 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
850 id->block = block;
851 recompute_tree_invariant_for_addr_expr (*tp);
852
853 /* If this used to be invariant, but is not any longer,
854 then regimplification is probably needed. */
855 if (invariant && !is_gimple_min_invariant (*tp))
856 id->regimplify = true;
857
858 *walk_subtrees = 0;
859 }
860 }
861
862 /* Keep iterating. */
863 return NULL_TREE;
864 }
865
866
867 /* Called from copy_body_id via walk_tree. DATA is really a
868 `copy_body_data *'. */
869
870 tree
871 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
872 {
873 copy_body_data *id = (copy_body_data *) data;
874 tree fn = id->src_fn;
875 tree new_block;
876
877 /* Begin by recognizing trees that we'll completely rewrite for the
878 inlining context. Our output for these trees is completely
879 different from out input (e.g. RETURN_EXPR is deleted, and morphs
880 into an edge). Further down, we'll handle trees that get
881 duplicated and/or tweaked. */
882
883 /* When requested, RETURN_EXPRs should be transformed to just the
884 contained MODIFY_EXPR. The branch semantics of the return will
885 be handled elsewhere by manipulating the CFG rather than a statement. */
886 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
887 {
888 tree assignment = TREE_OPERAND (*tp, 0);
889
890 /* If we're returning something, just turn that into an
891 assignment into the equivalent of the original RESULT_DECL.
892 If the "assignment" is just the result decl, the result
893 decl has already been set (e.g. a recent "foo (&result_decl,
894 ...)"); just toss the entire RETURN_EXPR. */
895 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
896 {
897 /* Replace the RETURN_EXPR with (a copy of) the
898 MODIFY_EXPR hanging underneath. */
899 *tp = copy_node (assignment);
900 }
901 else /* Else the RETURN_EXPR returns no value. */
902 {
903 *tp = NULL;
904 return (tree) (void *)1;
905 }
906 }
907 else if (TREE_CODE (*tp) == SSA_NAME)
908 {
909 *tp = remap_ssa_name (*tp, id);
910 *walk_subtrees = 0;
911 return NULL;
912 }
913
914 /* Local variables and labels need to be replaced by equivalent
915 variables. We don't want to copy static variables; there's only
916 one of those, no matter how many times we inline the containing
917 function. Similarly for globals from an outer function. */
918 else if (auto_var_in_fn_p (*tp, fn))
919 {
920 tree new_decl;
921
922 /* Remap the declaration. */
923 new_decl = remap_decl (*tp, id);
924 gcc_assert (new_decl);
925 /* Replace this variable with the copy. */
926 STRIP_TYPE_NOPS (new_decl);
927 *tp = new_decl;
928 *walk_subtrees = 0;
929 }
930 else if (TREE_CODE (*tp) == STATEMENT_LIST)
931 copy_statement_list (tp);
932 else if (TREE_CODE (*tp) == SAVE_EXPR
933 || TREE_CODE (*tp) == TARGET_EXPR)
934 remap_save_expr (tp, id->decl_map, walk_subtrees);
935 else if (TREE_CODE (*tp) == LABEL_DECL
936 && (! DECL_CONTEXT (*tp)
937 || decl_function_context (*tp) == id->src_fn))
938 /* These may need to be remapped for EH handling. */
939 *tp = remap_decl (*tp, id);
940 else if (TREE_CODE (*tp) == BIND_EXPR)
941 copy_bind_expr (tp, walk_subtrees, id);
942 /* Types may need remapping as well. */
943 else if (TYPE_P (*tp))
944 *tp = remap_type (*tp, id);
945
946 /* If this is a constant, we have to copy the node iff the type will be
947 remapped. copy_tree_r will not copy a constant. */
948 else if (CONSTANT_CLASS_P (*tp))
949 {
950 tree new_type = remap_type (TREE_TYPE (*tp), id);
951
952 if (new_type == TREE_TYPE (*tp))
953 *walk_subtrees = 0;
954
955 else if (TREE_CODE (*tp) == INTEGER_CST)
956 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
957 TREE_INT_CST_HIGH (*tp));
958 else
959 {
960 *tp = copy_node (*tp);
961 TREE_TYPE (*tp) = new_type;
962 }
963 }
964
965 /* Otherwise, just copy the node. Note that copy_tree_r already
966 knows not to copy VAR_DECLs, etc., so this is safe. */
967 else
968 {
969 /* Here we handle trees that are not completely rewritten.
970 First we detect some inlining-induced bogosities for
971 discarding. */
972 if (TREE_CODE (*tp) == MODIFY_EXPR
973 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
974 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
975 {
976 /* Some assignments VAR = VAR; don't generate any rtl code
977 and thus don't count as variable modification. Avoid
978 keeping bogosities like 0 = 0. */
979 tree decl = TREE_OPERAND (*tp, 0), value;
980 tree *n;
981
982 n = (tree *) pointer_map_contains (id->decl_map, decl);
983 if (n)
984 {
985 value = *n;
986 STRIP_TYPE_NOPS (value);
987 if (TREE_CONSTANT (value) || TREE_READONLY (value))
988 {
989 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
990 return copy_tree_body_r (tp, walk_subtrees, data);
991 }
992 }
993 }
994 else if (TREE_CODE (*tp) == INDIRECT_REF)
995 {
996 /* Get rid of *& from inline substitutions that can happen when a
997 pointer argument is an ADDR_EXPR. */
998 tree decl = TREE_OPERAND (*tp, 0);
999 tree *n;
1000
1001 n = (tree *) pointer_map_contains (id->decl_map, decl);
1002 if (n)
1003 {
1004 tree new_tree;
1005 tree old;
1006 /* If we happen to get an ADDR_EXPR in n->value, strip
1007 it manually here as we'll eventually get ADDR_EXPRs
1008 which lie about their types pointed to. In this case
1009 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1010 but we absolutely rely on that. As fold_indirect_ref
1011 does other useful transformations, try that first, though. */
1012 tree type = TREE_TYPE (TREE_TYPE (*n));
1013 if (id->do_not_unshare)
1014 new_tree = *n;
1015 else
1016 new_tree = unshare_expr (*n);
1017 old = *tp;
1018 *tp = gimple_fold_indirect_ref (new_tree);
1019 if (! *tp)
1020 {
1021 if (TREE_CODE (new_tree) == ADDR_EXPR)
1022 {
1023 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1024 type, new_tree);
1025 /* ??? We should either assert here or build
1026 a VIEW_CONVERT_EXPR instead of blindly leaking
1027 incompatible types to our IL. */
1028 if (! *tp)
1029 *tp = TREE_OPERAND (new_tree, 0);
1030 }
1031 else
1032 {
1033 *tp = build1 (INDIRECT_REF, type, new_tree);
1034 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1035 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1036 TREE_READONLY (*tp) = TREE_READONLY (old);
1037 TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
1038 }
1039 }
1040 *walk_subtrees = 0;
1041 return NULL;
1042 }
1043 }
1044 else if (TREE_CODE (*tp) == MEM_REF)
1045 {
1046 /* We need to re-canonicalize MEM_REFs from inline substitutions
1047 that can happen when a pointer argument is an ADDR_EXPR. */
1048 tree decl = TREE_OPERAND (*tp, 0);
1049 tree *n;
1050
1051 n = (tree *) pointer_map_contains (id->decl_map, decl);
1052 if (n)
1053 {
1054 tree old = *tp;
1055 *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp),
1056 unshare_expr (*n), TREE_OPERAND (*tp, 1));
1057 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1058 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1059 *walk_subtrees = 0;
1060 return NULL;
1061 }
1062 }
1063
1064 /* Here is the "usual case". Copy this tree node, and then
1065 tweak some special cases. */
1066 copy_tree_r (tp, walk_subtrees, NULL);
1067
1068 /* If EXPR has block defined, map it to newly constructed block.
1069 When inlining we want EXPRs without block appear in the block
1070 of function call if we are not remapping a type. */
1071 if (EXPR_P (*tp))
1072 {
1073 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1074 if (TREE_BLOCK (*tp))
1075 {
1076 tree *n;
1077 n = (tree *) pointer_map_contains (id->decl_map,
1078 TREE_BLOCK (*tp));
1079 gcc_assert (n || id->remapping_type_depth != 0);
1080 if (n)
1081 new_block = *n;
1082 }
1083 TREE_BLOCK (*tp) = new_block;
1084 }
1085
1086 if (TREE_CODE (*tp) != OMP_CLAUSE)
1087 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1088
1089 /* The copied TARGET_EXPR has never been expanded, even if the
1090 original node was expanded already. */
1091 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1092 {
1093 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1094 TREE_OPERAND (*tp, 3) = NULL_TREE;
1095 }
1096
1097 /* Variable substitution need not be simple. In particular, the
1098 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1099 and friends are up-to-date. */
1100 else if (TREE_CODE (*tp) == ADDR_EXPR)
1101 {
1102 int invariant = is_gimple_min_invariant (*tp);
1103 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1104
1105 /* Handle the case where we substituted an INDIRECT_REF
1106 into the operand of the ADDR_EXPR. */
1107 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1108 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1109 else
1110 recompute_tree_invariant_for_addr_expr (*tp);
1111
1112 /* If this used to be invariant, but is not any longer,
1113 then regimplification is probably needed. */
1114 if (invariant && !is_gimple_min_invariant (*tp))
1115 id->regimplify = true;
1116
1117 *walk_subtrees = 0;
1118 }
1119 }
1120
1121 /* Keep iterating. */
1122 return NULL_TREE;
1123 }
1124
1125 /* Helper for remap_gimple_stmt. Given an EH region number for the
1126 source function, map that to the duplicate EH region number in
1127 the destination function. */
1128
1129 static int
1130 remap_eh_region_nr (int old_nr, copy_body_data *id)
1131 {
1132 eh_region old_r, new_r;
1133 void **slot;
1134
1135 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1136 slot = pointer_map_contains (id->eh_map, old_r);
1137 new_r = (eh_region) *slot;
1138
1139 return new_r->index;
1140 }
1141
1142 /* Similar, but operate on INTEGER_CSTs. */
1143
1144 static tree
1145 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1146 {
1147 int old_nr, new_nr;
1148
1149 old_nr = tree_low_cst (old_t_nr, 0);
1150 new_nr = remap_eh_region_nr (old_nr, id);
1151
1152 return build_int_cst (integer_type_node, new_nr);
1153 }
1154
1155 /* Helper for copy_bb. Remap statement STMT using the inlining
1156 information in ID. Return the new statement copy. */
1157
1158 static gimple
1159 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1160 {
1161 gimple copy = NULL;
1162 struct walk_stmt_info wi;
1163 tree new_block;
1164 bool skip_first = false;
1165
1166 /* Begin by recognizing trees that we'll completely rewrite for the
1167 inlining context. Our output for these trees is completely
1168 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1169 into an edge). Further down, we'll handle trees that get
1170 duplicated and/or tweaked. */
1171
1172 /* When requested, GIMPLE_RETURNs should be transformed to just the
1173 contained GIMPLE_ASSIGN. The branch semantics of the return will
1174 be handled elsewhere by manipulating the CFG rather than the
1175 statement. */
1176 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1177 {
1178 tree retval = gimple_return_retval (stmt);
1179
1180 /* If we're returning something, just turn that into an
1181 assignment into the equivalent of the original RESULT_DECL.
1182 If RETVAL is just the result decl, the result decl has
1183 already been set (e.g. a recent "foo (&result_decl, ...)");
1184 just toss the entire GIMPLE_RETURN. */
1185 if (retval
1186 && (TREE_CODE (retval) != RESULT_DECL
1187 && (TREE_CODE (retval) != SSA_NAME
1188 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1189 {
1190 copy = gimple_build_assign (id->retvar, retval);
1191 /* id->retvar is already substituted. Skip it on later remapping. */
1192 skip_first = true;
1193 }
1194 else
1195 return gimple_build_nop ();
1196 }
1197 else if (gimple_has_substatements (stmt))
1198 {
1199 gimple_seq s1, s2;
1200
1201 /* When cloning bodies from the C++ front end, we will be handed bodies
1202 in High GIMPLE form. Handle here all the High GIMPLE statements that
1203 have embedded statements. */
1204 switch (gimple_code (stmt))
1205 {
1206 case GIMPLE_BIND:
1207 copy = copy_gimple_bind (stmt, id);
1208 break;
1209
1210 case GIMPLE_CATCH:
1211 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1212 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1213 break;
1214
1215 case GIMPLE_EH_FILTER:
1216 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1217 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1218 break;
1219
1220 case GIMPLE_TRY:
1221 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1222 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1223 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1224 break;
1225
1226 case GIMPLE_WITH_CLEANUP_EXPR:
1227 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1228 copy = gimple_build_wce (s1);
1229 break;
1230
1231 case GIMPLE_OMP_PARALLEL:
1232 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1233 copy = gimple_build_omp_parallel
1234 (s1,
1235 gimple_omp_parallel_clauses (stmt),
1236 gimple_omp_parallel_child_fn (stmt),
1237 gimple_omp_parallel_data_arg (stmt));
1238 break;
1239
1240 case GIMPLE_OMP_TASK:
1241 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1242 copy = gimple_build_omp_task
1243 (s1,
1244 gimple_omp_task_clauses (stmt),
1245 gimple_omp_task_child_fn (stmt),
1246 gimple_omp_task_data_arg (stmt),
1247 gimple_omp_task_copy_fn (stmt),
1248 gimple_omp_task_arg_size (stmt),
1249 gimple_omp_task_arg_align (stmt));
1250 break;
1251
1252 case GIMPLE_OMP_FOR:
1253 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1254 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1255 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1256 gimple_omp_for_collapse (stmt), s2);
1257 {
1258 size_t i;
1259 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1260 {
1261 gimple_omp_for_set_index (copy, i,
1262 gimple_omp_for_index (stmt, i));
1263 gimple_omp_for_set_initial (copy, i,
1264 gimple_omp_for_initial (stmt, i));
1265 gimple_omp_for_set_final (copy, i,
1266 gimple_omp_for_final (stmt, i));
1267 gimple_omp_for_set_incr (copy, i,
1268 gimple_omp_for_incr (stmt, i));
1269 gimple_omp_for_set_cond (copy, i,
1270 gimple_omp_for_cond (stmt, i));
1271 }
1272 }
1273 break;
1274
1275 case GIMPLE_OMP_MASTER:
1276 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1277 copy = gimple_build_omp_master (s1);
1278 break;
1279
1280 case GIMPLE_OMP_ORDERED:
1281 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1282 copy = gimple_build_omp_ordered (s1);
1283 break;
1284
1285 case GIMPLE_OMP_SECTION:
1286 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1287 copy = gimple_build_omp_section (s1);
1288 break;
1289
1290 case GIMPLE_OMP_SECTIONS:
1291 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1292 copy = gimple_build_omp_sections
1293 (s1, gimple_omp_sections_clauses (stmt));
1294 break;
1295
1296 case GIMPLE_OMP_SINGLE:
1297 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1298 copy = gimple_build_omp_single
1299 (s1, gimple_omp_single_clauses (stmt));
1300 break;
1301
1302 case GIMPLE_OMP_CRITICAL:
1303 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1304 copy
1305 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1306 break;
1307
1308 case GIMPLE_TRANSACTION:
1309 s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
1310 copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
1311 gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
1312 break;
1313
1314 default:
1315 gcc_unreachable ();
1316 }
1317 }
1318 else
1319 {
1320 if (gimple_assign_copy_p (stmt)
1321 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1322 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1323 {
1324 /* Here we handle statements that are not completely rewritten.
1325 First we detect some inlining-induced bogosities for
1326 discarding. */
1327
1328 /* Some assignments VAR = VAR; don't generate any rtl code
1329 and thus don't count as variable modification. Avoid
1330 keeping bogosities like 0 = 0. */
1331 tree decl = gimple_assign_lhs (stmt), value;
1332 tree *n;
1333
1334 n = (tree *) pointer_map_contains (id->decl_map, decl);
1335 if (n)
1336 {
1337 value = *n;
1338 STRIP_TYPE_NOPS (value);
1339 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1340 return gimple_build_nop ();
1341 }
1342 }
1343
1344 if (gimple_debug_bind_p (stmt))
1345 {
1346 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1347 gimple_debug_bind_get_value (stmt),
1348 stmt);
1349 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1350 return copy;
1351 }
1352 if (gimple_debug_source_bind_p (stmt))
1353 {
1354 copy = gimple_build_debug_source_bind
1355 (gimple_debug_source_bind_get_var (stmt),
1356 gimple_debug_source_bind_get_value (stmt), stmt);
1357 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1358 return copy;
1359 }
1360
1361 /* Create a new deep copy of the statement. */
1362 copy = gimple_copy (stmt);
1363
1364 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1365 RESX and EH_DISPATCH. */
1366 if (id->eh_map)
1367 switch (gimple_code (copy))
1368 {
1369 case GIMPLE_CALL:
1370 {
1371 tree r, fndecl = gimple_call_fndecl (copy);
1372 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1373 switch (DECL_FUNCTION_CODE (fndecl))
1374 {
1375 case BUILT_IN_EH_COPY_VALUES:
1376 r = gimple_call_arg (copy, 1);
1377 r = remap_eh_region_tree_nr (r, id);
1378 gimple_call_set_arg (copy, 1, r);
1379 /* FALLTHRU */
1380
1381 case BUILT_IN_EH_POINTER:
1382 case BUILT_IN_EH_FILTER:
1383 r = gimple_call_arg (copy, 0);
1384 r = remap_eh_region_tree_nr (r, id);
1385 gimple_call_set_arg (copy, 0, r);
1386 break;
1387
1388 default:
1389 break;
1390 }
1391
1392 /* Reset alias info if we didn't apply measures to
1393 keep it valid over inlining by setting DECL_PT_UID. */
1394 if (!id->src_cfun->gimple_df
1395 || !id->src_cfun->gimple_df->ipa_pta)
1396 gimple_call_reset_alias_info (copy);
1397 }
1398 break;
1399
1400 case GIMPLE_RESX:
1401 {
1402 int r = gimple_resx_region (copy);
1403 r = remap_eh_region_nr (r, id);
1404 gimple_resx_set_region (copy, r);
1405 }
1406 break;
1407
1408 case GIMPLE_EH_DISPATCH:
1409 {
1410 int r = gimple_eh_dispatch_region (copy);
1411 r = remap_eh_region_nr (r, id);
1412 gimple_eh_dispatch_set_region (copy, r);
1413 }
1414 break;
1415
1416 default:
1417 break;
1418 }
1419 }
1420
1421 /* If STMT has a block defined, map it to the newly constructed
1422 block. When inlining we want statements without a block to
1423 appear in the block of the function call. */
1424 new_block = id->block;
1425 if (gimple_block (copy))
1426 {
1427 tree *n;
1428 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1429 gcc_assert (n);
1430 new_block = *n;
1431 }
1432
1433 gimple_set_block (copy, new_block);
1434
1435 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1436 return copy;
1437
1438 /* Remap all the operands in COPY. */
1439 memset (&wi, 0, sizeof (wi));
1440 wi.info = id;
1441 if (skip_first)
1442 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1443 else
1444 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1445
1446 /* Clear the copied virtual operands. We are not remapping them here
1447 but are going to recreate them from scratch. */
1448 if (gimple_has_mem_ops (copy))
1449 {
1450 gimple_set_vdef (copy, NULL_TREE);
1451 gimple_set_vuse (copy, NULL_TREE);
1452 }
1453
1454 return copy;
1455 }
1456
1457
1458 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1459 later */
1460
1461 static basic_block
1462 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1463 gcov_type count_scale)
1464 {
1465 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1466 basic_block copy_basic_block;
1467 tree decl;
1468 gcov_type freq;
1469 basic_block prev;
1470
1471 /* Search for previous copied basic block. */
1472 prev = bb->prev_bb;
1473 while (!prev->aux)
1474 prev = prev->prev_bb;
1475
1476 /* create_basic_block() will append every new block to
1477 basic_block_info automatically. */
1478 copy_basic_block = create_basic_block (NULL, (void *) 0,
1479 (basic_block) prev->aux);
1480 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1481
1482 /* We are going to rebuild frequencies from scratch. These values
1483 have just small importance to drive canonicalize_loop_headers. */
1484 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1485
1486 /* We recompute frequencies after inlining, so this is quite safe. */
1487 if (freq > BB_FREQ_MAX)
1488 freq = BB_FREQ_MAX;
1489 copy_basic_block->frequency = freq;
1490
1491 copy_gsi = gsi_start_bb (copy_basic_block);
1492
1493 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1494 {
1495 gimple stmt = gsi_stmt (gsi);
1496 gimple orig_stmt = stmt;
1497
1498 id->regimplify = false;
1499 stmt = remap_gimple_stmt (stmt, id);
1500 if (gimple_nop_p (stmt))
1501 continue;
1502
1503 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1504 seq_gsi = copy_gsi;
1505
1506 /* With return slot optimization we can end up with
1507 non-gimple (foo *)&this->m, fix that here. */
1508 if (is_gimple_assign (stmt)
1509 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1510 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1511 {
1512 tree new_rhs;
1513 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1514 gimple_assign_rhs1 (stmt),
1515 true, NULL, false,
1516 GSI_CONTINUE_LINKING);
1517 gimple_assign_set_rhs1 (stmt, new_rhs);
1518 id->regimplify = false;
1519 }
1520
1521 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1522
1523 if (id->regimplify)
1524 gimple_regimplify_operands (stmt, &seq_gsi);
1525
1526 /* If copy_basic_block has been empty at the start of this iteration,
1527 call gsi_start_bb again to get at the newly added statements. */
1528 if (gsi_end_p (copy_gsi))
1529 copy_gsi = gsi_start_bb (copy_basic_block);
1530 else
1531 gsi_next (&copy_gsi);
1532
1533 /* Process the new statement. The call to gimple_regimplify_operands
1534 possibly turned the statement into multiple statements, we
1535 need to process all of them. */
1536 do
1537 {
1538 tree fn;
1539
1540 stmt = gsi_stmt (copy_gsi);
1541 if (is_gimple_call (stmt)
1542 && gimple_call_va_arg_pack_p (stmt)
1543 && id->gimple_call)
1544 {
1545 /* __builtin_va_arg_pack () should be replaced by
1546 all arguments corresponding to ... in the caller. */
1547 tree p;
1548 gimple new_call;
1549 VEC(tree, heap) *argarray;
1550 size_t nargs = gimple_call_num_args (id->gimple_call);
1551 size_t n;
1552
1553 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1554 nargs--;
1555
1556 /* Create the new array of arguments. */
1557 n = nargs + gimple_call_num_args (stmt);
1558 argarray = VEC_alloc (tree, heap, n);
1559 VEC_safe_grow (tree, heap, argarray, n);
1560
1561 /* Copy all the arguments before '...' */
1562 memcpy (VEC_address (tree, argarray),
1563 gimple_call_arg_ptr (stmt, 0),
1564 gimple_call_num_args (stmt) * sizeof (tree));
1565
1566 /* Append the arguments passed in '...' */
1567 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1568 gimple_call_arg_ptr (id->gimple_call, 0)
1569 + (gimple_call_num_args (id->gimple_call) - nargs),
1570 nargs * sizeof (tree));
1571
1572 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1573 argarray);
1574
1575 VEC_free (tree, heap, argarray);
1576
1577 /* Copy all GIMPLE_CALL flags, location and block, except
1578 GF_CALL_VA_ARG_PACK. */
1579 gimple_call_copy_flags (new_call, stmt);
1580 gimple_call_set_va_arg_pack (new_call, false);
1581 gimple_set_location (new_call, gimple_location (stmt));
1582 gimple_set_block (new_call, gimple_block (stmt));
1583 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1584
1585 gsi_replace (&copy_gsi, new_call, false);
1586 stmt = new_call;
1587 }
1588 else if (is_gimple_call (stmt)
1589 && id->gimple_call
1590 && (decl = gimple_call_fndecl (stmt))
1591 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1592 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1593 {
1594 /* __builtin_va_arg_pack_len () should be replaced by
1595 the number of anonymous arguments. */
1596 size_t nargs = gimple_call_num_args (id->gimple_call);
1597 tree count, p;
1598 gimple new_stmt;
1599
1600 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1601 nargs--;
1602
1603 count = build_int_cst (integer_type_node, nargs);
1604 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1605 gsi_replace (&copy_gsi, new_stmt, false);
1606 stmt = new_stmt;
1607 }
1608
1609 /* Statements produced by inlining can be unfolded, especially
1610 when we constant propagated some operands. We can't fold
1611 them right now for two reasons:
1612 1) folding require SSA_NAME_DEF_STMTs to be correct
1613 2) we can't change function calls to builtins.
1614 So we just mark statement for later folding. We mark
1615 all new statements, instead just statements that has changed
1616 by some nontrivial substitution so even statements made
1617 foldable indirectly are updated. If this turns out to be
1618 expensive, copy_body can be told to watch for nontrivial
1619 changes. */
1620 if (id->statements_to_fold)
1621 pointer_set_insert (id->statements_to_fold, stmt);
1622
1623 /* We're duplicating a CALL_EXPR. Find any corresponding
1624 callgraph edges and update or duplicate them. */
1625 if (is_gimple_call (stmt))
1626 {
1627 struct cgraph_edge *edge;
1628 int flags;
1629
1630 switch (id->transform_call_graph_edges)
1631 {
1632 case CB_CGE_DUPLICATE:
1633 edge = cgraph_edge (id->src_node, orig_stmt);
1634 if (edge)
1635 {
1636 int edge_freq = edge->frequency;
1637 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1638 gimple_uid (stmt),
1639 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1640 true);
1641 /* We could also just rescale the frequency, but
1642 doing so would introduce roundoff errors and make
1643 verifier unhappy. */
1644 edge->frequency
1645 = compute_call_stmt_bb_frequency (id->dst_node->symbol.decl,
1646 copy_basic_block);
1647 if (dump_file
1648 && profile_status_for_function (cfun) != PROFILE_ABSENT
1649 && (edge_freq > edge->frequency + 10
1650 || edge_freq < edge->frequency - 10))
1651 {
1652 fprintf (dump_file, "Edge frequency estimated by "
1653 "cgraph %i diverge from inliner's estimate %i\n",
1654 edge_freq,
1655 edge->frequency);
1656 fprintf (dump_file,
1657 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1658 bb->index,
1659 bb->frequency,
1660 copy_basic_block->frequency);
1661 }
1662 stmt = cgraph_redirect_edge_call_stmt_to_callee (edge);
1663 }
1664 break;
1665
1666 case CB_CGE_MOVE_CLONES:
1667 cgraph_set_call_stmt_including_clones (id->dst_node,
1668 orig_stmt, stmt);
1669 edge = cgraph_edge (id->dst_node, stmt);
1670 break;
1671
1672 case CB_CGE_MOVE:
1673 edge = cgraph_edge (id->dst_node, orig_stmt);
1674 if (edge)
1675 cgraph_set_call_stmt (edge, stmt);
1676 break;
1677
1678 default:
1679 gcc_unreachable ();
1680 }
1681
1682 /* Constant propagation on argument done during inlining
1683 may create new direct call. Produce an edge for it. */
1684 if ((!edge
1685 || (edge->indirect_inlining_edge
1686 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1687 && id->dst_node->analyzed
1688 && (fn = gimple_call_fndecl (stmt)) != NULL)
1689 {
1690 struct cgraph_node *dest = cgraph_get_node (fn);
1691
1692 /* We have missing edge in the callgraph. This can happen
1693 when previous inlining turned an indirect call into a
1694 direct call by constant propagating arguments or we are
1695 producing dead clone (for further cloning). In all
1696 other cases we hit a bug (incorrect node sharing is the
1697 most common reason for missing edges). */
1698 gcc_assert (!dest->analyzed
1699 || dest->symbol.address_taken
1700 || !id->src_node->analyzed
1701 || !id->dst_node->analyzed);
1702 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1703 cgraph_create_edge_including_clones
1704 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1705 compute_call_stmt_bb_frequency (id->dst_node->symbol.decl,
1706 copy_basic_block),
1707 CIF_ORIGINALLY_INDIRECT_CALL);
1708 else
1709 cgraph_create_edge (id->dst_node, dest, stmt,
1710 bb->count,
1711 compute_call_stmt_bb_frequency
1712 (id->dst_node->symbol.decl,
1713 copy_basic_block))->inline_failed
1714 = CIF_ORIGINALLY_INDIRECT_CALL;
1715 if (dump_file)
1716 {
1717 fprintf (dump_file, "Created new direct edge to %s\n",
1718 cgraph_node_name (dest));
1719 }
1720 }
1721
1722 flags = gimple_call_flags (stmt);
1723 if (flags & ECF_MAY_BE_ALLOCA)
1724 cfun->calls_alloca = true;
1725 if (flags & ECF_RETURNS_TWICE)
1726 cfun->calls_setjmp = true;
1727 }
1728
1729 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1730 id->eh_map, id->eh_lp_nr);
1731
1732 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1733 {
1734 ssa_op_iter i;
1735 tree def;
1736
1737 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1738 if (TREE_CODE (def) == SSA_NAME)
1739 SSA_NAME_DEF_STMT (def) = stmt;
1740 }
1741
1742 gsi_next (&copy_gsi);
1743 }
1744 while (!gsi_end_p (copy_gsi));
1745
1746 copy_gsi = gsi_last_bb (copy_basic_block);
1747 }
1748
1749 return copy_basic_block;
1750 }
1751
1752 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1753 form is quite easy, since dominator relationship for old basic blocks does
1754 not change.
1755
1756 There is however exception where inlining might change dominator relation
1757 across EH edges from basic block within inlined functions destinating
1758 to landing pads in function we inline into.
1759
1760 The function fills in PHI_RESULTs of such PHI nodes if they refer
1761 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1762 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1763 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1764 set, and this means that there will be no overlapping live ranges
1765 for the underlying symbol.
1766
1767 This might change in future if we allow redirecting of EH edges and
1768 we might want to change way build CFG pre-inlining to include
1769 all the possible edges then. */
1770 static void
1771 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1772 bool can_throw, bool nonlocal_goto)
1773 {
1774 edge e;
1775 edge_iterator ei;
1776
1777 FOR_EACH_EDGE (e, ei, bb->succs)
1778 if (!e->dest->aux
1779 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1780 {
1781 gimple phi;
1782 gimple_stmt_iterator si;
1783
1784 if (!nonlocal_goto)
1785 gcc_assert (e->flags & EDGE_EH);
1786
1787 if (!can_throw)
1788 gcc_assert (!(e->flags & EDGE_EH));
1789
1790 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1791 {
1792 edge re;
1793
1794 phi = gsi_stmt (si);
1795
1796 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1797 gcc_assert (!e->dest->aux);
1798
1799 gcc_assert ((e->flags & EDGE_EH)
1800 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1801
1802 if (!is_gimple_reg (PHI_RESULT (phi)))
1803 {
1804 mark_virtual_operands_for_renaming (cfun);
1805 continue;
1806 }
1807
1808 re = find_edge (ret_bb, e->dest);
1809 gcc_assert (re);
1810 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1811 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1812
1813 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1814 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1815 }
1816 }
1817 }
1818
1819
1820 /* Copy edges from BB into its copy constructed earlier, scale profile
1821 accordingly. Edges will be taken care of later. Assume aux
1822 pointers to point to the copies of each BB. Return true if any
1823 debug stmts are left after a statement that must end the basic block. */
1824
1825 static bool
1826 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1827 {
1828 basic_block new_bb = (basic_block) bb->aux;
1829 edge_iterator ei;
1830 edge old_edge;
1831 gimple_stmt_iterator si;
1832 int flags;
1833 bool need_debug_cleanup = false;
1834
1835 /* Use the indices from the original blocks to create edges for the
1836 new ones. */
1837 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1838 if (!(old_edge->flags & EDGE_EH))
1839 {
1840 edge new_edge;
1841
1842 flags = old_edge->flags;
1843
1844 /* Return edges do get a FALLTHRU flag when the get inlined. */
1845 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1846 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1847 flags |= EDGE_FALLTHRU;
1848 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1849 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1850 new_edge->probability = old_edge->probability;
1851 }
1852
1853 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1854 return false;
1855
1856 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1857 {
1858 gimple copy_stmt;
1859 bool can_throw, nonlocal_goto;
1860
1861 copy_stmt = gsi_stmt (si);
1862 if (!is_gimple_debug (copy_stmt))
1863 update_stmt (copy_stmt);
1864
1865 /* Do this before the possible split_block. */
1866 gsi_next (&si);
1867
1868 /* If this tree could throw an exception, there are two
1869 cases where we need to add abnormal edge(s): the
1870 tree wasn't in a region and there is a "current
1871 region" in the caller; or the original tree had
1872 EH edges. In both cases split the block after the tree,
1873 and add abnormal edge(s) as needed; we need both
1874 those from the callee and the caller.
1875 We check whether the copy can throw, because the const
1876 propagation can change an INDIRECT_REF which throws
1877 into a COMPONENT_REF which doesn't. If the copy
1878 can throw, the original could also throw. */
1879 can_throw = stmt_can_throw_internal (copy_stmt);
1880 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1881
1882 if (can_throw || nonlocal_goto)
1883 {
1884 if (!gsi_end_p (si))
1885 {
1886 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
1887 gsi_next (&si);
1888 if (gsi_end_p (si))
1889 need_debug_cleanup = true;
1890 }
1891 if (!gsi_end_p (si))
1892 /* Note that bb's predecessor edges aren't necessarily
1893 right at this point; split_block doesn't care. */
1894 {
1895 edge e = split_block (new_bb, copy_stmt);
1896
1897 new_bb = e->dest;
1898 new_bb->aux = e->src->aux;
1899 si = gsi_start_bb (new_bb);
1900 }
1901 }
1902
1903 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1904 make_eh_dispatch_edges (copy_stmt);
1905 else if (can_throw)
1906 make_eh_edges (copy_stmt);
1907
1908 if (nonlocal_goto)
1909 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1910
1911 if ((can_throw || nonlocal_goto)
1912 && gimple_in_ssa_p (cfun))
1913 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1914 can_throw, nonlocal_goto);
1915 }
1916 return need_debug_cleanup;
1917 }
1918
1919 /* Copy the PHIs. All blocks and edges are copied, some blocks
1920 was possibly split and new outgoing EH edges inserted.
1921 BB points to the block of original function and AUX pointers links
1922 the original and newly copied blocks. */
1923
1924 static void
1925 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1926 {
1927 basic_block const new_bb = (basic_block) bb->aux;
1928 edge_iterator ei;
1929 gimple phi;
1930 gimple_stmt_iterator si;
1931 edge new_edge;
1932 bool inserted = false;
1933
1934 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
1935 {
1936 tree res, new_res;
1937 gimple new_phi;
1938
1939 phi = gsi_stmt (si);
1940 res = PHI_RESULT (phi);
1941 new_res = res;
1942 if (is_gimple_reg (res))
1943 {
1944 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1945 SSA_NAME_DEF_STMT (new_res)
1946 = new_phi = create_phi_node (new_res, new_bb);
1947 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1948 {
1949 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
1950 tree arg;
1951 tree new_arg;
1952 tree block = id->block;
1953 edge_iterator ei2;
1954
1955 /* When doing partial cloning, we allow PHIs on the entry block
1956 as long as all the arguments are the same. Find any input
1957 edge to see argument to copy. */
1958 if (!old_edge)
1959 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
1960 if (!old_edge->src->aux)
1961 break;
1962
1963 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1964 new_arg = arg;
1965 id->block = NULL_TREE;
1966 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1967 id->block = block;
1968 gcc_assert (new_arg);
1969 /* With return slot optimization we can end up with
1970 non-gimple (foo *)&this->m, fix that here. */
1971 if (TREE_CODE (new_arg) != SSA_NAME
1972 && TREE_CODE (new_arg) != FUNCTION_DECL
1973 && !is_gimple_val (new_arg))
1974 {
1975 gimple_seq stmts = NULL;
1976 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1977 gsi_insert_seq_on_edge (new_edge, stmts);
1978 inserted = true;
1979 }
1980 add_phi_arg (new_phi, new_arg, new_edge,
1981 gimple_phi_arg_location_from_edge (phi, old_edge));
1982 }
1983 }
1984 }
1985
1986 /* Commit the delayed edge insertions. */
1987 if (inserted)
1988 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1989 gsi_commit_one_edge_insert (new_edge, NULL);
1990 }
1991
1992
1993 /* Wrapper for remap_decl so it can be used as a callback. */
1994
1995 static tree
1996 remap_decl_1 (tree decl, void *data)
1997 {
1998 return remap_decl (decl, (copy_body_data *) data);
1999 }
2000
2001 /* Build struct function and associated datastructures for the new clone
2002 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
2003
2004 static void
2005 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2006 {
2007 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2008 gcov_type count_scale;
2009
2010 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2011 count_scale = (REG_BR_PROB_BASE * count
2012 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2013 else
2014 count_scale = REG_BR_PROB_BASE;
2015
2016 /* Register specific tree functions. */
2017 gimple_register_cfg_hooks ();
2018
2019 /* Get clean struct function. */
2020 push_struct_function (new_fndecl);
2021
2022 /* We will rebuild these, so just sanity check that they are empty. */
2023 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2024 gcc_assert (cfun->local_decls == NULL);
2025 gcc_assert (cfun->cfg == NULL);
2026 gcc_assert (cfun->decl == new_fndecl);
2027
2028 /* Copy items we preserve during cloning. */
2029 cfun->static_chain_decl = src_cfun->static_chain_decl;
2030 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2031 cfun->function_end_locus = src_cfun->function_end_locus;
2032 cfun->curr_properties = src_cfun->curr_properties & ~PROP_loops;
2033 cfun->last_verified = src_cfun->last_verified;
2034 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2035 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2036 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2037 cfun->stdarg = src_cfun->stdarg;
2038 cfun->after_inlining = src_cfun->after_inlining;
2039 cfun->can_throw_non_call_exceptions
2040 = src_cfun->can_throw_non_call_exceptions;
2041 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2042 cfun->returns_struct = src_cfun->returns_struct;
2043 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2044
2045 init_empty_tree_cfg ();
2046
2047 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2048 ENTRY_BLOCK_PTR->count =
2049 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2050 REG_BR_PROB_BASE);
2051 ENTRY_BLOCK_PTR->frequency
2052 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2053 EXIT_BLOCK_PTR->count =
2054 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2055 REG_BR_PROB_BASE);
2056 EXIT_BLOCK_PTR->frequency =
2057 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2058 if (src_cfun->eh)
2059 init_eh_for_function ();
2060
2061 if (src_cfun->gimple_df)
2062 {
2063 init_tree_ssa (cfun);
2064 cfun->gimple_df->in_ssa_p = true;
2065 init_ssa_operands (cfun);
2066 }
2067 pop_cfun ();
2068 }
2069
2070 /* Helper function for copy_cfg_body. Move debug stmts from the end
2071 of NEW_BB to the beginning of successor basic blocks when needed. If the
2072 successor has multiple predecessors, reset them, otherwise keep
2073 their value. */
2074
2075 static void
2076 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2077 {
2078 edge e;
2079 edge_iterator ei;
2080 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2081
2082 if (gsi_end_p (si)
2083 || gsi_one_before_end_p (si)
2084 || !(stmt_can_throw_internal (gsi_stmt (si))
2085 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2086 return;
2087
2088 FOR_EACH_EDGE (e, ei, new_bb->succs)
2089 {
2090 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2091 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2092 while (is_gimple_debug (gsi_stmt (ssi)))
2093 {
2094 gimple stmt = gsi_stmt (ssi), new_stmt;
2095 tree var;
2096 tree value;
2097
2098 /* For the last edge move the debug stmts instead of copying
2099 them. */
2100 if (ei_one_before_end_p (ei))
2101 {
2102 si = ssi;
2103 gsi_prev (&ssi);
2104 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2105 gimple_debug_bind_reset_value (stmt);
2106 gsi_remove (&si, false);
2107 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2108 continue;
2109 }
2110
2111 if (gimple_debug_bind_p (stmt))
2112 {
2113 var = gimple_debug_bind_get_var (stmt);
2114 if (single_pred_p (e->dest))
2115 {
2116 value = gimple_debug_bind_get_value (stmt);
2117 value = unshare_expr (value);
2118 }
2119 else
2120 value = NULL_TREE;
2121 new_stmt = gimple_build_debug_bind (var, value, stmt);
2122 }
2123 else if (gimple_debug_source_bind_p (stmt))
2124 {
2125 var = gimple_debug_source_bind_get_var (stmt);
2126 value = gimple_debug_source_bind_get_value (stmt);
2127 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2128 }
2129 else
2130 gcc_unreachable ();
2131 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2132 VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt);
2133 gsi_prev (&ssi);
2134 }
2135 }
2136 }
2137
2138 /* Make a copy of the body of FN so that it can be inserted inline in
2139 another function. Walks FN via CFG, returns new fndecl. */
2140
2141 static tree
2142 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2143 basic_block entry_block_map, basic_block exit_block_map,
2144 bitmap blocks_to_copy, basic_block new_entry)
2145 {
2146 tree callee_fndecl = id->src_fn;
2147 /* Original cfun for the callee, doesn't change. */
2148 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2149 struct function *cfun_to_copy;
2150 basic_block bb;
2151 tree new_fndecl = NULL;
2152 bool need_debug_cleanup = false;
2153 gcov_type count_scale;
2154 int last;
2155 int incoming_frequency = 0;
2156 gcov_type incoming_count = 0;
2157
2158 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2159 count_scale = (REG_BR_PROB_BASE * count
2160 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2161 else
2162 count_scale = REG_BR_PROB_BASE;
2163
2164 /* Register specific tree functions. */
2165 gimple_register_cfg_hooks ();
2166
2167 /* If we are inlining just region of the function, make sure to connect new entry
2168 to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
2169 frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
2170 probabilities of edges incoming from nonduplicated region. */
2171 if (new_entry)
2172 {
2173 edge e;
2174 edge_iterator ei;
2175
2176 FOR_EACH_EDGE (e, ei, new_entry->preds)
2177 if (!e->src->aux)
2178 {
2179 incoming_frequency += EDGE_FREQUENCY (e);
2180 incoming_count += e->count;
2181 }
2182 incoming_count = incoming_count * count_scale / REG_BR_PROB_BASE;
2183 incoming_frequency
2184 = incoming_frequency * frequency_scale / REG_BR_PROB_BASE;
2185 ENTRY_BLOCK_PTR->count = incoming_count;
2186 ENTRY_BLOCK_PTR->frequency = incoming_frequency;
2187 }
2188
2189 /* Must have a CFG here at this point. */
2190 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2191 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2192
2193 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2194
2195 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2196 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2197 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2198 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2199
2200 /* Duplicate any exception-handling regions. */
2201 if (cfun->eh)
2202 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2203 remap_decl_1, id);
2204
2205 /* Use aux pointers to map the original blocks to copy. */
2206 FOR_EACH_BB_FN (bb, cfun_to_copy)
2207 if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index))
2208 {
2209 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2210 bb->aux = new_bb;
2211 new_bb->aux = bb;
2212 }
2213
2214 last = last_basic_block;
2215
2216 /* Now that we've duplicated the blocks, duplicate their edges. */
2217 FOR_ALL_BB_FN (bb, cfun_to_copy)
2218 if (!blocks_to_copy
2219 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2220 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
2221
2222 if (new_entry)
2223 {
2224 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2225 e->probability = REG_BR_PROB_BASE;
2226 e->count = incoming_count;
2227 }
2228
2229 if (gimple_in_ssa_p (cfun))
2230 FOR_ALL_BB_FN (bb, cfun_to_copy)
2231 if (!blocks_to_copy
2232 || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
2233 copy_phis_for_bb (bb, id);
2234
2235 FOR_ALL_BB_FN (bb, cfun_to_copy)
2236 if (bb->aux)
2237 {
2238 if (need_debug_cleanup
2239 && bb->index != ENTRY_BLOCK
2240 && bb->index != EXIT_BLOCK)
2241 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2242 ((basic_block)bb->aux)->aux = NULL;
2243 bb->aux = NULL;
2244 }
2245
2246 /* Zero out AUX fields of newly created block during EH edge
2247 insertion. */
2248 for (; last < last_basic_block; last++)
2249 {
2250 if (need_debug_cleanup)
2251 maybe_move_debug_stmts_to_successors (id, BASIC_BLOCK (last));
2252 BASIC_BLOCK (last)->aux = NULL;
2253 }
2254 entry_block_map->aux = NULL;
2255 exit_block_map->aux = NULL;
2256
2257 if (id->eh_map)
2258 {
2259 pointer_map_destroy (id->eh_map);
2260 id->eh_map = NULL;
2261 }
2262
2263 return new_fndecl;
2264 }
2265
2266 /* Copy the debug STMT using ID. We deal with these statements in a
2267 special way: if any variable in their VALUE expression wasn't
2268 remapped yet, we won't remap it, because that would get decl uids
2269 out of sync, causing codegen differences between -g and -g0. If
2270 this arises, we drop the VALUE expression altogether. */
2271
2272 static void
2273 copy_debug_stmt (gimple stmt, copy_body_data *id)
2274 {
2275 tree t, *n;
2276 struct walk_stmt_info wi;
2277
2278 t = id->block;
2279 if (gimple_block (stmt))
2280 {
2281 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2282 if (n)
2283 t = *n;
2284 }
2285 gimple_set_block (stmt, t);
2286
2287 /* Remap all the operands in COPY. */
2288 memset (&wi, 0, sizeof (wi));
2289 wi.info = id;
2290
2291 processing_debug_stmt = 1;
2292
2293 if (gimple_debug_source_bind_p (stmt))
2294 t = gimple_debug_source_bind_get_var (stmt);
2295 else
2296 t = gimple_debug_bind_get_var (stmt);
2297
2298 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2299 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2300 {
2301 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2302 t = *n;
2303 }
2304 else if (TREE_CODE (t) == VAR_DECL
2305 && !is_global_var (t)
2306 && !pointer_map_contains (id->decl_map, t))
2307 /* T is a non-localized variable. */;
2308 else
2309 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2310
2311 if (gimple_debug_bind_p (stmt))
2312 {
2313 gimple_debug_bind_set_var (stmt, t);
2314
2315 if (gimple_debug_bind_has_value_p (stmt))
2316 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2317 remap_gimple_op_r, &wi, NULL);
2318
2319 /* Punt if any decl couldn't be remapped. */
2320 if (processing_debug_stmt < 0)
2321 gimple_debug_bind_reset_value (stmt);
2322 }
2323 else if (gimple_debug_source_bind_p (stmt))
2324 {
2325 gimple_debug_source_bind_set_var (stmt, t);
2326 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2327 remap_gimple_op_r, &wi, NULL);
2328 }
2329
2330 processing_debug_stmt = 0;
2331
2332 update_stmt (stmt);
2333 }
2334
2335 /* Process deferred debug stmts. In order to give values better odds
2336 of being successfully remapped, we delay the processing of debug
2337 stmts until all other stmts that might require remapping are
2338 processed. */
2339
2340 static void
2341 copy_debug_stmts (copy_body_data *id)
2342 {
2343 size_t i;
2344 gimple stmt;
2345
2346 if (!id->debug_stmts)
2347 return;
2348
2349 FOR_EACH_VEC_ELT (gimple, id->debug_stmts, i, stmt)
2350 copy_debug_stmt (stmt, id);
2351
2352 VEC_free (gimple, heap, id->debug_stmts);
2353 }
2354
2355 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2356 another function. */
2357
2358 static tree
2359 copy_tree_body (copy_body_data *id)
2360 {
2361 tree fndecl = id->src_fn;
2362 tree body = DECL_SAVED_TREE (fndecl);
2363
2364 walk_tree (&body, copy_tree_body_r, id, NULL);
2365
2366 return body;
2367 }
2368
2369 /* Make a copy of the body of FN so that it can be inserted inline in
2370 another function. */
2371
2372 static tree
2373 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2374 basic_block entry_block_map, basic_block exit_block_map,
2375 bitmap blocks_to_copy, basic_block new_entry)
2376 {
2377 tree fndecl = id->src_fn;
2378 tree body;
2379
2380 /* If this body has a CFG, walk CFG and copy. */
2381 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2382 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2383 blocks_to_copy, new_entry);
2384 copy_debug_stmts (id);
2385
2386 return body;
2387 }
2388
2389 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2390 defined in function FN, or of a data member thereof. */
2391
2392 static bool
2393 self_inlining_addr_expr (tree value, tree fn)
2394 {
2395 tree var;
2396
2397 if (TREE_CODE (value) != ADDR_EXPR)
2398 return false;
2399
2400 var = get_base_address (TREE_OPERAND (value, 0));
2401
2402 return var && auto_var_in_fn_p (var, fn);
2403 }
2404
2405 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2406 lexical block and line number information from base_stmt, if given,
2407 or from the last stmt of the block otherwise. */
2408
2409 static gimple
2410 insert_init_debug_bind (copy_body_data *id,
2411 basic_block bb, tree var, tree value,
2412 gimple base_stmt)
2413 {
2414 gimple note;
2415 gimple_stmt_iterator gsi;
2416 tree tracked_var;
2417
2418 if (!gimple_in_ssa_p (id->src_cfun))
2419 return NULL;
2420
2421 if (!MAY_HAVE_DEBUG_STMTS)
2422 return NULL;
2423
2424 tracked_var = target_for_debug_bind (var);
2425 if (!tracked_var)
2426 return NULL;
2427
2428 if (bb)
2429 {
2430 gsi = gsi_last_bb (bb);
2431 if (!base_stmt && !gsi_end_p (gsi))
2432 base_stmt = gsi_stmt (gsi);
2433 }
2434
2435 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2436
2437 if (bb)
2438 {
2439 if (!gsi_end_p (gsi))
2440 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2441 else
2442 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2443 }
2444
2445 return note;
2446 }
2447
2448 static void
2449 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2450 {
2451 /* If VAR represents a zero-sized variable, it's possible that the
2452 assignment statement may result in no gimple statements. */
2453 if (init_stmt)
2454 {
2455 gimple_stmt_iterator si = gsi_last_bb (bb);
2456
2457 /* We can end up with init statements that store to a non-register
2458 from a rhs with a conversion. Handle that here by forcing the
2459 rhs into a temporary. gimple_regimplify_operands is not
2460 prepared to do this for us. */
2461 if (!is_gimple_debug (init_stmt)
2462 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2463 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2464 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2465 {
2466 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2467 gimple_expr_type (init_stmt),
2468 gimple_assign_rhs1 (init_stmt));
2469 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2470 GSI_NEW_STMT);
2471 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2472 gimple_assign_set_rhs1 (init_stmt, rhs);
2473 }
2474 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2475 gimple_regimplify_operands (init_stmt, &si);
2476
2477 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2478 {
2479 tree var, def = gimple_assign_lhs (init_stmt);
2480
2481 if (TREE_CODE (def) == SSA_NAME)
2482 var = SSA_NAME_VAR (def);
2483 else
2484 var = def;
2485
2486 insert_init_debug_bind (id, bb, var, def, init_stmt);
2487 }
2488 }
2489 }
2490
2491 /* Initialize parameter P with VALUE. If needed, produce init statement
2492 at the end of BB. When BB is NULL, we return init statement to be
2493 output later. */
2494 static gimple
2495 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2496 basic_block bb, tree *vars)
2497 {
2498 gimple init_stmt = NULL;
2499 tree var;
2500 tree rhs = value;
2501 tree def = (gimple_in_ssa_p (cfun)
2502 ? ssa_default_def (id->src_cfun, p) : NULL);
2503
2504 if (value
2505 && value != error_mark_node
2506 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2507 {
2508 /* If we can match up types by promotion/demotion do so. */
2509 if (fold_convertible_p (TREE_TYPE (p), value))
2510 rhs = fold_convert (TREE_TYPE (p), value);
2511 else
2512 {
2513 /* ??? For valid programs we should not end up here.
2514 Still if we end up with truly mismatched types here, fall back
2515 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
2516 GIMPLE to the following passes. */
2517 if (!is_gimple_reg_type (TREE_TYPE (value))
2518 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
2519 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2520 else
2521 rhs = build_zero_cst (TREE_TYPE (p));
2522 }
2523 }
2524
2525 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2526 here since the type of this decl must be visible to the calling
2527 function. */
2528 var = copy_decl_to_var (p, id);
2529
2530 /* Declare this new variable. */
2531 DECL_CHAIN (var) = *vars;
2532 *vars = var;
2533
2534 /* Make gimplifier happy about this variable. */
2535 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2536
2537 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2538 we would not need to create a new variable here at all, if it
2539 weren't for debug info. Still, we can just use the argument
2540 value. */
2541 if (TREE_READONLY (p)
2542 && !TREE_ADDRESSABLE (p)
2543 && value && !TREE_SIDE_EFFECTS (value)
2544 && !def)
2545 {
2546 /* We may produce non-gimple trees by adding NOPs or introduce
2547 invalid sharing when operand is not really constant.
2548 It is not big deal to prohibit constant propagation here as
2549 we will constant propagate in DOM1 pass anyway. */
2550 if (is_gimple_min_invariant (value)
2551 && useless_type_conversion_p (TREE_TYPE (p),
2552 TREE_TYPE (value))
2553 /* We have to be very careful about ADDR_EXPR. Make sure
2554 the base variable isn't a local variable of the inlined
2555 function, e.g., when doing recursive inlining, direct or
2556 mutually-recursive or whatever, which is why we don't
2557 just test whether fn == current_function_decl. */
2558 && ! self_inlining_addr_expr (value, fn))
2559 {
2560 insert_decl_map (id, p, value);
2561 insert_debug_decl_map (id, p, var);
2562 return insert_init_debug_bind (id, bb, var, value, NULL);
2563 }
2564 }
2565
2566 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2567 that way, when the PARM_DECL is encountered, it will be
2568 automatically replaced by the VAR_DECL. */
2569 insert_decl_map (id, p, var);
2570
2571 /* Even if P was TREE_READONLY, the new VAR should not be.
2572 In the original code, we would have constructed a
2573 temporary, and then the function body would have never
2574 changed the value of P. However, now, we will be
2575 constructing VAR directly. The constructor body may
2576 change its value multiple times as it is being
2577 constructed. Therefore, it must not be TREE_READONLY;
2578 the back-end assumes that TREE_READONLY variable is
2579 assigned to only once. */
2580 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2581 TREE_READONLY (var) = 0;
2582
2583 /* If there is no setup required and we are in SSA, take the easy route
2584 replacing all SSA names representing the function parameter by the
2585 SSA name passed to function.
2586
2587 We need to construct map for the variable anyway as it might be used
2588 in different SSA names when parameter is set in function.
2589
2590 Do replacement at -O0 for const arguments replaced by constant.
2591 This is important for builtin_constant_p and other construct requiring
2592 constant argument to be visible in inlined function body. */
2593 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2594 && (optimize
2595 || (TREE_READONLY (p)
2596 && is_gimple_min_invariant (rhs)))
2597 && (TREE_CODE (rhs) == SSA_NAME
2598 || is_gimple_min_invariant (rhs))
2599 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2600 {
2601 insert_decl_map (id, def, rhs);
2602 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2603 }
2604
2605 /* If the value of argument is never used, don't care about initializing
2606 it. */
2607 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2608 {
2609 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2610 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2611 }
2612
2613 /* Initialize this VAR_DECL from the equivalent argument. Convert
2614 the argument to the proper type in case it was promoted. */
2615 if (value)
2616 {
2617 if (rhs == error_mark_node)
2618 {
2619 insert_decl_map (id, p, var);
2620 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2621 }
2622
2623 STRIP_USELESS_TYPE_CONVERSION (rhs);
2624
2625 /* If we are in SSA form properly remap the default definition
2626 or assign to a dummy SSA name if the parameter is unused and
2627 we are not optimizing. */
2628 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2629 {
2630 if (def)
2631 {
2632 def = remap_ssa_name (def, id);
2633 init_stmt = gimple_build_assign (def, rhs);
2634 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2635 set_ssa_default_def (cfun, var, NULL);
2636 }
2637 else if (!optimize)
2638 {
2639 def = make_ssa_name (var, NULL);
2640 init_stmt = gimple_build_assign (def, rhs);
2641 }
2642 }
2643 else
2644 init_stmt = gimple_build_assign (var, rhs);
2645
2646 if (bb && init_stmt)
2647 insert_init_stmt (id, bb, init_stmt);
2648 }
2649 return init_stmt;
2650 }
2651
2652 /* Generate code to initialize the parameters of the function at the
2653 top of the stack in ID from the GIMPLE_CALL STMT. */
2654
2655 static void
2656 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2657 tree fn, basic_block bb)
2658 {
2659 tree parms;
2660 size_t i;
2661 tree p;
2662 tree vars = NULL_TREE;
2663 tree static_chain = gimple_call_chain (stmt);
2664
2665 /* Figure out what the parameters are. */
2666 parms = DECL_ARGUMENTS (fn);
2667
2668 /* Loop through the parameter declarations, replacing each with an
2669 equivalent VAR_DECL, appropriately initialized. */
2670 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2671 {
2672 tree val;
2673 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2674 setup_one_parameter (id, p, val, fn, bb, &vars);
2675 }
2676 /* After remapping parameters remap their types. This has to be done
2677 in a second loop over all parameters to appropriately remap
2678 variable sized arrays when the size is specified in a
2679 parameter following the array. */
2680 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
2681 {
2682 tree *varp = (tree *) pointer_map_contains (id->decl_map, p);
2683 if (varp
2684 && TREE_CODE (*varp) == VAR_DECL)
2685 {
2686 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
2687 ? ssa_default_def (id->src_cfun, p) : NULL);
2688 tree var = *varp;
2689 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
2690 /* Also remap the default definition if it was remapped
2691 to the default definition of the parameter replacement
2692 by the parameter setup. */
2693 if (def)
2694 {
2695 tree *defp = (tree *) pointer_map_contains (id->decl_map, def);
2696 if (defp
2697 && TREE_CODE (*defp) == SSA_NAME
2698 && SSA_NAME_VAR (*defp) == var)
2699 TREE_TYPE (*defp) = TREE_TYPE (var);
2700 }
2701 }
2702 }
2703
2704 /* Initialize the static chain. */
2705 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2706 gcc_assert (fn != current_function_decl);
2707 if (p)
2708 {
2709 /* No static chain? Seems like a bug in tree-nested.c. */
2710 gcc_assert (static_chain);
2711
2712 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2713 }
2714
2715 declare_inline_vars (id->block, vars);
2716 }
2717
2718
2719 /* Declare a return variable to replace the RESULT_DECL for the
2720 function we are calling. An appropriate DECL_STMT is returned.
2721 The USE_STMT is filled to contain a use of the declaration to
2722 indicate the return value of the function.
2723
2724 RETURN_SLOT, if non-null is place where to store the result. It
2725 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2726 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2727
2728 The return value is a (possibly null) value that holds the result
2729 as seen by the caller. */
2730
2731 static tree
2732 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2733 basic_block entry_bb)
2734 {
2735 tree callee = id->src_fn;
2736 tree result = DECL_RESULT (callee);
2737 tree callee_type = TREE_TYPE (result);
2738 tree caller_type;
2739 tree var, use;
2740
2741 /* Handle type-mismatches in the function declaration return type
2742 vs. the call expression. */
2743 if (modify_dest)
2744 caller_type = TREE_TYPE (modify_dest);
2745 else
2746 caller_type = TREE_TYPE (TREE_TYPE (callee));
2747
2748 /* We don't need to do anything for functions that don't return anything. */
2749 if (VOID_TYPE_P (callee_type))
2750 return NULL_TREE;
2751
2752 /* If there was a return slot, then the return value is the
2753 dereferenced address of that object. */
2754 if (return_slot)
2755 {
2756 /* The front end shouldn't have used both return_slot and
2757 a modify expression. */
2758 gcc_assert (!modify_dest);
2759 if (DECL_BY_REFERENCE (result))
2760 {
2761 tree return_slot_addr = build_fold_addr_expr (return_slot);
2762 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2763
2764 /* We are going to construct *&return_slot and we can't do that
2765 for variables believed to be not addressable.
2766
2767 FIXME: This check possibly can match, because values returned
2768 via return slot optimization are not believed to have address
2769 taken by alias analysis. */
2770 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2771 var = return_slot_addr;
2772 }
2773 else
2774 {
2775 var = return_slot;
2776 gcc_assert (TREE_CODE (var) != SSA_NAME);
2777 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2778 }
2779 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2780 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2781 && !DECL_GIMPLE_REG_P (result)
2782 && DECL_P (var))
2783 DECL_GIMPLE_REG_P (var) = 0;
2784 use = NULL;
2785 goto done;
2786 }
2787
2788 /* All types requiring non-trivial constructors should have been handled. */
2789 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2790
2791 /* Attempt to avoid creating a new temporary variable. */
2792 if (modify_dest
2793 && TREE_CODE (modify_dest) != SSA_NAME)
2794 {
2795 bool use_it = false;
2796
2797 /* We can't use MODIFY_DEST if there's type promotion involved. */
2798 if (!useless_type_conversion_p (callee_type, caller_type))
2799 use_it = false;
2800
2801 /* ??? If we're assigning to a variable sized type, then we must
2802 reuse the destination variable, because we've no good way to
2803 create variable sized temporaries at this point. */
2804 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2805 use_it = true;
2806
2807 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2808 reuse it as the result of the call directly. Don't do this if
2809 it would promote MODIFY_DEST to addressable. */
2810 else if (TREE_ADDRESSABLE (result))
2811 use_it = false;
2812 else
2813 {
2814 tree base_m = get_base_address (modify_dest);
2815
2816 /* If the base isn't a decl, then it's a pointer, and we don't
2817 know where that's going to go. */
2818 if (!DECL_P (base_m))
2819 use_it = false;
2820 else if (is_global_var (base_m))
2821 use_it = false;
2822 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2823 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2824 && !DECL_GIMPLE_REG_P (result)
2825 && DECL_GIMPLE_REG_P (base_m))
2826 use_it = false;
2827 else if (!TREE_ADDRESSABLE (base_m))
2828 use_it = true;
2829 }
2830
2831 if (use_it)
2832 {
2833 var = modify_dest;
2834 use = NULL;
2835 goto done;
2836 }
2837 }
2838
2839 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2840
2841 var = copy_result_decl_to_var (result, id);
2842 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2843
2844 /* Do not have the rest of GCC warn about this variable as it should
2845 not be visible to the user. */
2846 TREE_NO_WARNING (var) = 1;
2847
2848 declare_inline_vars (id->block, var);
2849
2850 /* Build the use expr. If the return type of the function was
2851 promoted, convert it back to the expected type. */
2852 use = var;
2853 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2854 {
2855 /* If we can match up types by promotion/demotion do so. */
2856 if (fold_convertible_p (caller_type, var))
2857 use = fold_convert (caller_type, var);
2858 else
2859 {
2860 /* ??? For valid programs we should not end up here.
2861 Still if we end up with truly mismatched types here, fall back
2862 to using a MEM_REF to not leak invalid GIMPLE to the following
2863 passes. */
2864 /* Prevent var from being written into SSA form. */
2865 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
2866 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
2867 DECL_GIMPLE_REG_P (var) = false;
2868 else if (is_gimple_reg_type (TREE_TYPE (var)))
2869 TREE_ADDRESSABLE (var) = true;
2870 use = fold_build2 (MEM_REF, caller_type,
2871 build_fold_addr_expr (var),
2872 build_int_cst (ptr_type_node, 0));
2873 }
2874 }
2875
2876 STRIP_USELESS_TYPE_CONVERSION (use);
2877
2878 if (DECL_BY_REFERENCE (result))
2879 {
2880 TREE_ADDRESSABLE (var) = 1;
2881 var = build_fold_addr_expr (var);
2882 }
2883
2884 done:
2885 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2886 way, when the RESULT_DECL is encountered, it will be
2887 automatically replaced by the VAR_DECL.
2888
2889 When returning by reference, ensure that RESULT_DECL remaps to
2890 gimple_val. */
2891 if (DECL_BY_REFERENCE (result)
2892 && !is_gimple_val (var))
2893 {
2894 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
2895 insert_decl_map (id, result, temp);
2896 /* When RESULT_DECL is in SSA form, we need to remap and initialize
2897 it's default_def SSA_NAME. */
2898 if (gimple_in_ssa_p (id->src_cfun)
2899 && is_gimple_reg (result))
2900 {
2901 temp = make_ssa_name (temp, NULL);
2902 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
2903 }
2904 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
2905 }
2906 else
2907 insert_decl_map (id, result, var);
2908
2909 /* Remember this so we can ignore it in remap_decls. */
2910 id->retvar = var;
2911
2912 return use;
2913 }
2914
2915 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2916 to a local label. */
2917
2918 static tree
2919 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2920 {
2921 tree node = *nodep;
2922 tree fn = (tree) fnp;
2923
2924 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2925 return node;
2926
2927 if (TYPE_P (node))
2928 *walk_subtrees = 0;
2929
2930 return NULL_TREE;
2931 }
2932
2933 /* Determine if the function can be copied. If so return NULL. If
2934 not return a string describng the reason for failure. */
2935
2936 static const char *
2937 copy_forbidden (struct function *fun, tree fndecl)
2938 {
2939 const char *reason = fun->cannot_be_copied_reason;
2940 tree decl;
2941 unsigned ix;
2942
2943 /* Only examine the function once. */
2944 if (fun->cannot_be_copied_set)
2945 return reason;
2946
2947 /* We cannot copy a function that receives a non-local goto
2948 because we cannot remap the destination label used in the
2949 function that is performing the non-local goto. */
2950 /* ??? Actually, this should be possible, if we work at it.
2951 No doubt there's just a handful of places that simply
2952 assume it doesn't happen and don't substitute properly. */
2953 if (fun->has_nonlocal_label)
2954 {
2955 reason = G_("function %q+F can never be copied "
2956 "because it receives a non-local goto");
2957 goto fail;
2958 }
2959
2960 FOR_EACH_LOCAL_DECL (fun, ix, decl)
2961 if (TREE_CODE (decl) == VAR_DECL
2962 && TREE_STATIC (decl)
2963 && !DECL_EXTERNAL (decl)
2964 && DECL_INITIAL (decl)
2965 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2966 has_label_address_in_static_1,
2967 fndecl))
2968 {
2969 reason = G_("function %q+F can never be copied because it saves "
2970 "address of local label in a static variable");
2971 goto fail;
2972 }
2973
2974 fail:
2975 fun->cannot_be_copied_reason = reason;
2976 fun->cannot_be_copied_set = true;
2977 return reason;
2978 }
2979
2980
2981 static const char *inline_forbidden_reason;
2982
2983 /* A callback for walk_gimple_seq to handle statements. Returns non-null
2984 iff a function can not be inlined. Also sets the reason why. */
2985
2986 static tree
2987 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2988 struct walk_stmt_info *wip)
2989 {
2990 tree fn = (tree) wip->info;
2991 tree t;
2992 gimple stmt = gsi_stmt (*gsi);
2993
2994 switch (gimple_code (stmt))
2995 {
2996 case GIMPLE_CALL:
2997 /* Refuse to inline alloca call unless user explicitly forced so as
2998 this may change program's memory overhead drastically when the
2999 function using alloca is called in loop. In GCC present in
3000 SPEC2000 inlining into schedule_block cause it to require 2GB of
3001 RAM instead of 256MB. Don't do so for alloca calls emitted for
3002 VLA objects as those can't cause unbounded growth (they're always
3003 wrapped inside stack_save/stack_restore regions. */
3004 if (gimple_alloca_call_p (stmt)
3005 && !gimple_call_alloca_for_var_p (stmt)
3006 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3007 {
3008 inline_forbidden_reason
3009 = G_("function %q+F can never be inlined because it uses "
3010 "alloca (override using the always_inline attribute)");
3011 *handled_ops_p = true;
3012 return fn;
3013 }
3014
3015 t = gimple_call_fndecl (stmt);
3016 if (t == NULL_TREE)
3017 break;
3018
3019 /* We cannot inline functions that call setjmp. */
3020 if (setjmp_call_p (t))
3021 {
3022 inline_forbidden_reason
3023 = G_("function %q+F can never be inlined because it uses setjmp");
3024 *handled_ops_p = true;
3025 return t;
3026 }
3027
3028 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3029 switch (DECL_FUNCTION_CODE (t))
3030 {
3031 /* We cannot inline functions that take a variable number of
3032 arguments. */
3033 case BUILT_IN_VA_START:
3034 case BUILT_IN_NEXT_ARG:
3035 case BUILT_IN_VA_END:
3036 inline_forbidden_reason
3037 = G_("function %q+F can never be inlined because it "
3038 "uses variable argument lists");
3039 *handled_ops_p = true;
3040 return t;
3041
3042 case BUILT_IN_LONGJMP:
3043 /* We can't inline functions that call __builtin_longjmp at
3044 all. The non-local goto machinery really requires the
3045 destination be in a different function. If we allow the
3046 function calling __builtin_longjmp to be inlined into the
3047 function calling __builtin_setjmp, Things will Go Awry. */
3048 inline_forbidden_reason
3049 = G_("function %q+F can never be inlined because "
3050 "it uses setjmp-longjmp exception handling");
3051 *handled_ops_p = true;
3052 return t;
3053
3054 case BUILT_IN_NONLOCAL_GOTO:
3055 /* Similarly. */
3056 inline_forbidden_reason
3057 = G_("function %q+F can never be inlined because "
3058 "it uses non-local goto");
3059 *handled_ops_p = true;
3060 return t;
3061
3062 case BUILT_IN_RETURN:
3063 case BUILT_IN_APPLY_ARGS:
3064 /* If a __builtin_apply_args caller would be inlined,
3065 it would be saving arguments of the function it has
3066 been inlined into. Similarly __builtin_return would
3067 return from the function the inline has been inlined into. */
3068 inline_forbidden_reason
3069 = G_("function %q+F can never be inlined because "
3070 "it uses __builtin_return or __builtin_apply_args");
3071 *handled_ops_p = true;
3072 return t;
3073
3074 default:
3075 break;
3076 }
3077 break;
3078
3079 case GIMPLE_GOTO:
3080 t = gimple_goto_dest (stmt);
3081
3082 /* We will not inline a function which uses computed goto. The
3083 addresses of its local labels, which may be tucked into
3084 global storage, are of course not constant across
3085 instantiations, which causes unexpected behavior. */
3086 if (TREE_CODE (t) != LABEL_DECL)
3087 {
3088 inline_forbidden_reason
3089 = G_("function %q+F can never be inlined "
3090 "because it contains a computed goto");
3091 *handled_ops_p = true;
3092 return t;
3093 }
3094 break;
3095
3096 default:
3097 break;
3098 }
3099
3100 *handled_ops_p = false;
3101 return NULL_TREE;
3102 }
3103
3104 /* Return true if FNDECL is a function that cannot be inlined into
3105 another one. */
3106
3107 static bool
3108 inline_forbidden_p (tree fndecl)
3109 {
3110 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3111 struct walk_stmt_info wi;
3112 struct pointer_set_t *visited_nodes;
3113 basic_block bb;
3114 bool forbidden_p = false;
3115
3116 /* First check for shared reasons not to copy the code. */
3117 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3118 if (inline_forbidden_reason != NULL)
3119 return true;
3120
3121 /* Next, walk the statements of the function looking for
3122 constraucts we can't handle, or are non-optimal for inlining. */
3123 visited_nodes = pointer_set_create ();
3124 memset (&wi, 0, sizeof (wi));
3125 wi.info = (void *) fndecl;
3126 wi.pset = visited_nodes;
3127
3128 FOR_EACH_BB_FN (bb, fun)
3129 {
3130 gimple ret;
3131 gimple_seq seq = bb_seq (bb);
3132 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3133 forbidden_p = (ret != NULL);
3134 if (forbidden_p)
3135 break;
3136 }
3137
3138 pointer_set_destroy (visited_nodes);
3139 return forbidden_p;
3140 }
3141 \f
3142 /* Return false if the function FNDECL cannot be inlined on account of its
3143 attributes, true otherwise. */
3144 static bool
3145 function_attribute_inlinable_p (const_tree fndecl)
3146 {
3147 if (targetm.attribute_table)
3148 {
3149 const_tree a;
3150
3151 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3152 {
3153 const_tree name = TREE_PURPOSE (a);
3154 int i;
3155
3156 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3157 if (is_attribute_p (targetm.attribute_table[i].name, name))
3158 return targetm.function_attribute_inlinable_p (fndecl);
3159 }
3160 }
3161
3162 return true;
3163 }
3164
3165 /* Returns nonzero if FN is a function that does not have any
3166 fundamental inline blocking properties. */
3167
3168 bool
3169 tree_inlinable_function_p (tree fn)
3170 {
3171 bool inlinable = true;
3172 bool do_warning;
3173 tree always_inline;
3174
3175 /* If we've already decided this function shouldn't be inlined,
3176 there's no need to check again. */
3177 if (DECL_UNINLINABLE (fn))
3178 return false;
3179
3180 /* We only warn for functions declared `inline' by the user. */
3181 do_warning = (warn_inline
3182 && DECL_DECLARED_INLINE_P (fn)
3183 && !DECL_NO_INLINE_WARNING_P (fn)
3184 && !DECL_IN_SYSTEM_HEADER (fn));
3185
3186 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3187
3188 if (flag_no_inline
3189 && always_inline == NULL)
3190 {
3191 if (do_warning)
3192 warning (OPT_Winline, "function %q+F can never be inlined because it "
3193 "is suppressed using -fno-inline", fn);
3194 inlinable = false;
3195 }
3196
3197 else if (!function_attribute_inlinable_p (fn))
3198 {
3199 if (do_warning)
3200 warning (OPT_Winline, "function %q+F can never be inlined because it "
3201 "uses attributes conflicting with inlining", fn);
3202 inlinable = false;
3203 }
3204
3205 else if (inline_forbidden_p (fn))
3206 {
3207 /* See if we should warn about uninlinable functions. Previously,
3208 some of these warnings would be issued while trying to expand
3209 the function inline, but that would cause multiple warnings
3210 about functions that would for example call alloca. But since
3211 this a property of the function, just one warning is enough.
3212 As a bonus we can now give more details about the reason why a
3213 function is not inlinable. */
3214 if (always_inline)
3215 error (inline_forbidden_reason, fn);
3216 else if (do_warning)
3217 warning (OPT_Winline, inline_forbidden_reason, fn);
3218
3219 inlinable = false;
3220 }
3221
3222 /* Squirrel away the result so that we don't have to check again. */
3223 DECL_UNINLINABLE (fn) = !inlinable;
3224
3225 return inlinable;
3226 }
3227
3228 /* Estimate the cost of a memory move. Use machine dependent
3229 word size and take possible memcpy call into account. */
3230
3231 int
3232 estimate_move_cost (tree type)
3233 {
3234 HOST_WIDE_INT size;
3235
3236 gcc_assert (!VOID_TYPE_P (type));
3237
3238 if (TREE_CODE (type) == VECTOR_TYPE)
3239 {
3240 enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3241 enum machine_mode simd
3242 = targetm.vectorize.preferred_simd_mode (inner);
3243 int simd_mode_size = GET_MODE_SIZE (simd);
3244 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3245 / simd_mode_size);
3246 }
3247
3248 size = int_size_in_bytes (type);
3249
3250 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3251 /* Cost of a memcpy call, 3 arguments and the call. */
3252 return 4;
3253 else
3254 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3255 }
3256
3257 /* Returns cost of operation CODE, according to WEIGHTS */
3258
3259 static int
3260 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3261 tree op1 ATTRIBUTE_UNUSED, tree op2)
3262 {
3263 switch (code)
3264 {
3265 /* These are "free" conversions, or their presumed cost
3266 is folded into other operations. */
3267 case RANGE_EXPR:
3268 CASE_CONVERT:
3269 case COMPLEX_EXPR:
3270 case PAREN_EXPR:
3271 case VIEW_CONVERT_EXPR:
3272 return 0;
3273
3274 /* Assign cost of 1 to usual operations.
3275 ??? We may consider mapping RTL costs to this. */
3276 case COND_EXPR:
3277 case VEC_COND_EXPR:
3278 case VEC_PERM_EXPR:
3279
3280 case PLUS_EXPR:
3281 case POINTER_PLUS_EXPR:
3282 case MINUS_EXPR:
3283 case MULT_EXPR:
3284 case MULT_HIGHPART_EXPR:
3285 case FMA_EXPR:
3286
3287 case ADDR_SPACE_CONVERT_EXPR:
3288 case FIXED_CONVERT_EXPR:
3289 case FIX_TRUNC_EXPR:
3290
3291 case NEGATE_EXPR:
3292 case FLOAT_EXPR:
3293 case MIN_EXPR:
3294 case MAX_EXPR:
3295 case ABS_EXPR:
3296
3297 case LSHIFT_EXPR:
3298 case RSHIFT_EXPR:
3299 case LROTATE_EXPR:
3300 case RROTATE_EXPR:
3301 case VEC_LSHIFT_EXPR:
3302 case VEC_RSHIFT_EXPR:
3303
3304 case BIT_IOR_EXPR:
3305 case BIT_XOR_EXPR:
3306 case BIT_AND_EXPR:
3307 case BIT_NOT_EXPR:
3308
3309 case TRUTH_ANDIF_EXPR:
3310 case TRUTH_ORIF_EXPR:
3311 case TRUTH_AND_EXPR:
3312 case TRUTH_OR_EXPR:
3313 case TRUTH_XOR_EXPR:
3314 case TRUTH_NOT_EXPR:
3315
3316 case LT_EXPR:
3317 case LE_EXPR:
3318 case GT_EXPR:
3319 case GE_EXPR:
3320 case EQ_EXPR:
3321 case NE_EXPR:
3322 case ORDERED_EXPR:
3323 case UNORDERED_EXPR:
3324
3325 case UNLT_EXPR:
3326 case UNLE_EXPR:
3327 case UNGT_EXPR:
3328 case UNGE_EXPR:
3329 case UNEQ_EXPR:
3330 case LTGT_EXPR:
3331
3332 case CONJ_EXPR:
3333
3334 case PREDECREMENT_EXPR:
3335 case PREINCREMENT_EXPR:
3336 case POSTDECREMENT_EXPR:
3337 case POSTINCREMENT_EXPR:
3338
3339 case REALIGN_LOAD_EXPR:
3340
3341 case REDUC_MAX_EXPR:
3342 case REDUC_MIN_EXPR:
3343 case REDUC_PLUS_EXPR:
3344 case WIDEN_SUM_EXPR:
3345 case WIDEN_MULT_EXPR:
3346 case DOT_PROD_EXPR:
3347 case WIDEN_MULT_PLUS_EXPR:
3348 case WIDEN_MULT_MINUS_EXPR:
3349 case WIDEN_LSHIFT_EXPR:
3350
3351 case VEC_WIDEN_MULT_HI_EXPR:
3352 case VEC_WIDEN_MULT_LO_EXPR:
3353 case VEC_WIDEN_MULT_EVEN_EXPR:
3354 case VEC_WIDEN_MULT_ODD_EXPR:
3355 case VEC_UNPACK_HI_EXPR:
3356 case VEC_UNPACK_LO_EXPR:
3357 case VEC_UNPACK_FLOAT_HI_EXPR:
3358 case VEC_UNPACK_FLOAT_LO_EXPR:
3359 case VEC_PACK_TRUNC_EXPR:
3360 case VEC_PACK_SAT_EXPR:
3361 case VEC_PACK_FIX_TRUNC_EXPR:
3362 case VEC_WIDEN_LSHIFT_HI_EXPR:
3363 case VEC_WIDEN_LSHIFT_LO_EXPR:
3364
3365 return 1;
3366
3367 /* Few special cases of expensive operations. This is useful
3368 to avoid inlining on functions having too many of these. */
3369 case TRUNC_DIV_EXPR:
3370 case CEIL_DIV_EXPR:
3371 case FLOOR_DIV_EXPR:
3372 case ROUND_DIV_EXPR:
3373 case EXACT_DIV_EXPR:
3374 case TRUNC_MOD_EXPR:
3375 case CEIL_MOD_EXPR:
3376 case FLOOR_MOD_EXPR:
3377 case ROUND_MOD_EXPR:
3378 case RDIV_EXPR:
3379 if (TREE_CODE (op2) != INTEGER_CST)
3380 return weights->div_mod_cost;
3381 return 1;
3382
3383 default:
3384 /* We expect a copy assignment with no operator. */
3385 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3386 return 0;
3387 }
3388 }
3389
3390
3391 /* Estimate number of instructions that will be created by expanding
3392 the statements in the statement sequence STMTS.
3393 WEIGHTS contains weights attributed to various constructs. */
3394
3395 static
3396 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3397 {
3398 int cost;
3399 gimple_stmt_iterator gsi;
3400
3401 cost = 0;
3402 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3403 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3404
3405 return cost;
3406 }
3407
3408
3409 /* Estimate number of instructions that will be created by expanding STMT.
3410 WEIGHTS contains weights attributed to various constructs. */
3411
3412 int
3413 estimate_num_insns (gimple stmt, eni_weights *weights)
3414 {
3415 unsigned cost, i;
3416 enum gimple_code code = gimple_code (stmt);
3417 tree lhs;
3418 tree rhs;
3419
3420 switch (code)
3421 {
3422 case GIMPLE_ASSIGN:
3423 /* Try to estimate the cost of assignments. We have three cases to
3424 deal with:
3425 1) Simple assignments to registers;
3426 2) Stores to things that must live in memory. This includes
3427 "normal" stores to scalars, but also assignments of large
3428 structures, or constructors of big arrays;
3429
3430 Let us look at the first two cases, assuming we have "a = b + C":
3431 <GIMPLE_ASSIGN <var_decl "a">
3432 <plus_expr <var_decl "b"> <constant C>>
3433 If "a" is a GIMPLE register, the assignment to it is free on almost
3434 any target, because "a" usually ends up in a real register. Hence
3435 the only cost of this expression comes from the PLUS_EXPR, and we
3436 can ignore the GIMPLE_ASSIGN.
3437 If "a" is not a GIMPLE register, the assignment to "a" will most
3438 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3439 of moving something into "a", which we compute using the function
3440 estimate_move_cost. */
3441 if (gimple_clobber_p (stmt))
3442 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3443
3444 lhs = gimple_assign_lhs (stmt);
3445 rhs = gimple_assign_rhs1 (stmt);
3446
3447 if (is_gimple_reg (lhs))
3448 cost = 0;
3449 else
3450 cost = estimate_move_cost (TREE_TYPE (lhs));
3451
3452 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3453 cost += estimate_move_cost (TREE_TYPE (rhs));
3454
3455 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3456 gimple_assign_rhs1 (stmt),
3457 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3458 == GIMPLE_BINARY_RHS
3459 ? gimple_assign_rhs2 (stmt) : NULL);
3460 break;
3461
3462 case GIMPLE_COND:
3463 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3464 gimple_op (stmt, 0),
3465 gimple_op (stmt, 1));
3466 break;
3467
3468 case GIMPLE_SWITCH:
3469 /* Take into account cost of the switch + guess 2 conditional jumps for
3470 each case label.
3471
3472 TODO: once the switch expansion logic is sufficiently separated, we can
3473 do better job on estimating cost of the switch. */
3474 if (weights->time_based)
3475 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3476 else
3477 cost = gimple_switch_num_labels (stmt) * 2;
3478 break;
3479
3480 case GIMPLE_CALL:
3481 {
3482 tree decl = gimple_call_fndecl (stmt);
3483 struct cgraph_node *node = NULL;
3484
3485 /* Do not special case builtins where we see the body.
3486 This just confuse inliner. */
3487 if (!decl || !(node = cgraph_get_node (decl)) || node->analyzed)
3488 ;
3489 /* For buitins that are likely expanded to nothing or
3490 inlined do not account operand costs. */
3491 else if (is_simple_builtin (decl))
3492 return 0;
3493 else if (is_inexpensive_builtin (decl))
3494 return weights->target_builtin_call_cost;
3495 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3496 {
3497 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
3498 specialize the cheap expansion we do here.
3499 ??? This asks for a more general solution. */
3500 switch (DECL_FUNCTION_CODE (decl))
3501 {
3502 case BUILT_IN_POW:
3503 case BUILT_IN_POWF:
3504 case BUILT_IN_POWL:
3505 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
3506 && REAL_VALUES_EQUAL
3507 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
3508 return estimate_operator_cost (MULT_EXPR, weights,
3509 gimple_call_arg (stmt, 0),
3510 gimple_call_arg (stmt, 0));
3511 break;
3512
3513 default:
3514 break;
3515 }
3516 }
3517
3518 cost = node ? weights->call_cost : weights->indirect_call_cost;
3519 if (gimple_call_lhs (stmt))
3520 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
3521 for (i = 0; i < gimple_call_num_args (stmt); i++)
3522 {
3523 tree arg = gimple_call_arg (stmt, i);
3524 cost += estimate_move_cost (TREE_TYPE (arg));
3525 }
3526 break;
3527 }
3528
3529 case GIMPLE_RETURN:
3530 return weights->return_cost;
3531
3532 case GIMPLE_GOTO:
3533 case GIMPLE_LABEL:
3534 case GIMPLE_NOP:
3535 case GIMPLE_PHI:
3536 case GIMPLE_PREDICT:
3537 case GIMPLE_DEBUG:
3538 return 0;
3539
3540 case GIMPLE_ASM:
3541 return asm_str_count (gimple_asm_string (stmt));
3542
3543 case GIMPLE_RESX:
3544 /* This is either going to be an external function call with one
3545 argument, or two register copy statements plus a goto. */
3546 return 2;
3547
3548 case GIMPLE_EH_DISPATCH:
3549 /* ??? This is going to turn into a switch statement. Ideally
3550 we'd have a look at the eh region and estimate the number of
3551 edges involved. */
3552 return 10;
3553
3554 case GIMPLE_BIND:
3555 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3556
3557 case GIMPLE_EH_FILTER:
3558 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3559
3560 case GIMPLE_CATCH:
3561 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3562
3563 case GIMPLE_TRY:
3564 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3565 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3566
3567 /* OpenMP directives are generally very expensive. */
3568
3569 case GIMPLE_OMP_RETURN:
3570 case GIMPLE_OMP_SECTIONS_SWITCH:
3571 case GIMPLE_OMP_ATOMIC_STORE:
3572 case GIMPLE_OMP_CONTINUE:
3573 /* ...except these, which are cheap. */
3574 return 0;
3575
3576 case GIMPLE_OMP_ATOMIC_LOAD:
3577 return weights->omp_cost;
3578
3579 case GIMPLE_OMP_FOR:
3580 return (weights->omp_cost
3581 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3582 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3583
3584 case GIMPLE_OMP_PARALLEL:
3585 case GIMPLE_OMP_TASK:
3586 case GIMPLE_OMP_CRITICAL:
3587 case GIMPLE_OMP_MASTER:
3588 case GIMPLE_OMP_ORDERED:
3589 case GIMPLE_OMP_SECTION:
3590 case GIMPLE_OMP_SECTIONS:
3591 case GIMPLE_OMP_SINGLE:
3592 return (weights->omp_cost
3593 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3594
3595 case GIMPLE_TRANSACTION:
3596 return (weights->tm_cost
3597 + estimate_num_insns_seq (gimple_transaction_body (stmt),
3598 weights));
3599
3600 default:
3601 gcc_unreachable ();
3602 }
3603
3604 return cost;
3605 }
3606
3607 /* Estimate number of instructions that will be created by expanding
3608 function FNDECL. WEIGHTS contains weights attributed to various
3609 constructs. */
3610
3611 int
3612 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3613 {
3614 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3615 gimple_stmt_iterator bsi;
3616 basic_block bb;
3617 int n = 0;
3618
3619 gcc_assert (my_function && my_function->cfg);
3620 FOR_EACH_BB_FN (bb, my_function)
3621 {
3622 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3623 n += estimate_num_insns (gsi_stmt (bsi), weights);
3624 }
3625
3626 return n;
3627 }
3628
3629
3630 /* Initializes weights used by estimate_num_insns. */
3631
3632 void
3633 init_inline_once (void)
3634 {
3635 eni_size_weights.call_cost = 1;
3636 eni_size_weights.indirect_call_cost = 3;
3637 eni_size_weights.target_builtin_call_cost = 1;
3638 eni_size_weights.div_mod_cost = 1;
3639 eni_size_weights.omp_cost = 40;
3640 eni_size_weights.tm_cost = 10;
3641 eni_size_weights.time_based = false;
3642 eni_size_weights.return_cost = 1;
3643
3644 /* Estimating time for call is difficult, since we have no idea what the
3645 called function does. In the current uses of eni_time_weights,
3646 underestimating the cost does less harm than overestimating it, so
3647 we choose a rather small value here. */
3648 eni_time_weights.call_cost = 10;
3649 eni_time_weights.indirect_call_cost = 15;
3650 eni_time_weights.target_builtin_call_cost = 1;
3651 eni_time_weights.div_mod_cost = 10;
3652 eni_time_weights.omp_cost = 40;
3653 eni_time_weights.tm_cost = 40;
3654 eni_time_weights.time_based = true;
3655 eni_time_weights.return_cost = 2;
3656 }
3657
3658 /* Estimate the number of instructions in a gimple_seq. */
3659
3660 int
3661 count_insns_seq (gimple_seq seq, eni_weights *weights)
3662 {
3663 gimple_stmt_iterator gsi;
3664 int n = 0;
3665 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3666 n += estimate_num_insns (gsi_stmt (gsi), weights);
3667
3668 return n;
3669 }
3670
3671
3672 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3673
3674 static void
3675 prepend_lexical_block (tree current_block, tree new_block)
3676 {
3677 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3678 BLOCK_SUBBLOCKS (current_block) = new_block;
3679 BLOCK_SUPERCONTEXT (new_block) = current_block;
3680 }
3681
3682 /* Add local variables from CALLEE to CALLER. */
3683
3684 static inline void
3685 add_local_variables (struct function *callee, struct function *caller,
3686 copy_body_data *id)
3687 {
3688 tree var;
3689 unsigned ix;
3690
3691 FOR_EACH_LOCAL_DECL (callee, ix, var)
3692 if (!can_be_nonlocal (var, id))
3693 {
3694 tree new_var = remap_decl (var, id);
3695
3696 /* Remap debug-expressions. */
3697 if (TREE_CODE (new_var) == VAR_DECL
3698 && DECL_DEBUG_EXPR_IS_FROM (new_var)
3699 && new_var != var)
3700 {
3701 tree tem = DECL_DEBUG_EXPR (var);
3702 bool old_regimplify = id->regimplify;
3703 id->remapping_type_depth++;
3704 walk_tree (&tem, copy_tree_body_r, id, NULL);
3705 id->remapping_type_depth--;
3706 id->regimplify = old_regimplify;
3707 SET_DECL_DEBUG_EXPR (new_var, tem);
3708 }
3709 add_local_decl (caller, new_var);
3710 }
3711 }
3712
3713 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3714
3715 static bool
3716 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3717 {
3718 tree use_retvar;
3719 tree fn;
3720 struct pointer_map_t *st, *dst;
3721 tree return_slot;
3722 tree modify_dest;
3723 location_t saved_location;
3724 struct cgraph_edge *cg_edge;
3725 cgraph_inline_failed_t reason;
3726 basic_block return_block;
3727 edge e;
3728 gimple_stmt_iterator gsi, stmt_gsi;
3729 bool successfully_inlined = FALSE;
3730 bool purge_dead_abnormal_edges;
3731
3732 /* Set input_location here so we get the right instantiation context
3733 if we call instantiate_decl from inlinable_function_p. */
3734 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
3735 saved_location = input_location;
3736 input_location = gimple_location (stmt);
3737
3738 /* From here on, we're only interested in CALL_EXPRs. */
3739 if (gimple_code (stmt) != GIMPLE_CALL)
3740 goto egress;
3741
3742 cg_edge = cgraph_edge (id->dst_node, stmt);
3743 gcc_checking_assert (cg_edge);
3744 /* First, see if we can figure out what function is being called.
3745 If we cannot, then there is no hope of inlining the function. */
3746 if (cg_edge->indirect_unknown_callee)
3747 goto egress;
3748 fn = cg_edge->callee->symbol.decl;
3749 gcc_checking_assert (fn);
3750
3751 /* If FN is a declaration of a function in a nested scope that was
3752 globally declared inline, we don't set its DECL_INITIAL.
3753 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3754 C++ front-end uses it for cdtors to refer to their internal
3755 declarations, that are not real functions. Fortunately those
3756 don't have trees to be saved, so we can tell by checking their
3757 gimple_body. */
3758 if (!DECL_INITIAL (fn)
3759 && DECL_ABSTRACT_ORIGIN (fn)
3760 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3761 fn = DECL_ABSTRACT_ORIGIN (fn);
3762
3763 /* Don't try to inline functions that are not well-suited to inlining. */
3764 if (cg_edge->inline_failed)
3765 {
3766 reason = cg_edge->inline_failed;
3767 /* If this call was originally indirect, we do not want to emit any
3768 inlining related warnings or sorry messages because there are no
3769 guarantees regarding those. */
3770 if (cg_edge->indirect_inlining_edge)
3771 goto egress;
3772
3773 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3774 /* Avoid warnings during early inline pass. */
3775 && cgraph_global_info_ready
3776 /* PR 20090218-1_0.c. Body can be provided by another module. */
3777 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
3778 {
3779 error ("inlining failed in call to always_inline %q+F: %s", fn,
3780 cgraph_inline_failed_string (reason));
3781 error ("called from here");
3782 }
3783 else if (warn_inline
3784 && DECL_DECLARED_INLINE_P (fn)
3785 && !DECL_NO_INLINE_WARNING_P (fn)
3786 && !DECL_IN_SYSTEM_HEADER (fn)
3787 && reason != CIF_UNSPECIFIED
3788 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3789 /* Do not warn about not inlined recursive calls. */
3790 && !cgraph_edge_recursive_p (cg_edge)
3791 /* Avoid warnings during early inline pass. */
3792 && cgraph_global_info_ready)
3793 {
3794 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3795 fn, _(cgraph_inline_failed_string (reason)));
3796 warning (OPT_Winline, "called from here");
3797 }
3798 goto egress;
3799 }
3800 fn = cg_edge->callee->symbol.decl;
3801
3802 #ifdef ENABLE_CHECKING
3803 if (cg_edge->callee->symbol.decl != id->dst_node->symbol.decl)
3804 verify_cgraph_node (cg_edge->callee);
3805 #endif
3806
3807 /* We will be inlining this callee. */
3808 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3809
3810 /* Update the callers EH personality. */
3811 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->symbol.decl))
3812 DECL_FUNCTION_PERSONALITY (cg_edge->caller->symbol.decl)
3813 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->symbol.decl);
3814
3815 /* Split the block holding the GIMPLE_CALL. */
3816 e = split_block (bb, stmt);
3817 bb = e->src;
3818 return_block = e->dest;
3819 remove_edge (e);
3820
3821 /* split_block splits after the statement; work around this by
3822 moving the call into the second block manually. Not pretty,
3823 but seems easier than doing the CFG manipulation by hand
3824 when the GIMPLE_CALL is in the last statement of BB. */
3825 stmt_gsi = gsi_last_bb (bb);
3826 gsi_remove (&stmt_gsi, false);
3827
3828 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3829 been the source of abnormal edges. In this case, schedule
3830 the removal of dead abnormal edges. */
3831 gsi = gsi_start_bb (return_block);
3832 if (gsi_end_p (gsi))
3833 {
3834 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3835 purge_dead_abnormal_edges = true;
3836 }
3837 else
3838 {
3839 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3840 purge_dead_abnormal_edges = false;
3841 }
3842
3843 stmt_gsi = gsi_start_bb (return_block);
3844
3845 /* Build a block containing code to initialize the arguments, the
3846 actual inline expansion of the body, and a label for the return
3847 statements within the function to jump to. The type of the
3848 statement expression is the return type of the function call. */
3849 id->block = make_node (BLOCK);
3850 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3851 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3852 prepend_lexical_block (gimple_block (stmt), id->block);
3853
3854 /* Local declarations will be replaced by their equivalents in this
3855 map. */
3856 st = id->decl_map;
3857 id->decl_map = pointer_map_create ();
3858 dst = id->debug_map;
3859 id->debug_map = NULL;
3860
3861 /* Record the function we are about to inline. */
3862 id->src_fn = fn;
3863 id->src_node = cg_edge->callee;
3864 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3865 id->gimple_call = stmt;
3866
3867 gcc_assert (!id->src_cfun->after_inlining);
3868
3869 id->entry_bb = bb;
3870 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3871 {
3872 gimple_stmt_iterator si = gsi_last_bb (bb);
3873 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3874 NOT_TAKEN),
3875 GSI_NEW_STMT);
3876 }
3877 initialize_inlined_parameters (id, stmt, fn, bb);
3878
3879 if (DECL_INITIAL (fn))
3880 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3881
3882 /* Return statements in the function body will be replaced by jumps
3883 to the RET_LABEL. */
3884 gcc_assert (DECL_INITIAL (fn));
3885 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3886
3887 /* Find the LHS to which the result of this call is assigned. */
3888 return_slot = NULL;
3889 if (gimple_call_lhs (stmt))
3890 {
3891 modify_dest = gimple_call_lhs (stmt);
3892
3893 /* The function which we are inlining might not return a value,
3894 in which case we should issue a warning that the function
3895 does not return a value. In that case the optimizers will
3896 see that the variable to which the value is assigned was not
3897 initialized. We do not want to issue a warning about that
3898 uninitialized variable. */
3899 if (DECL_P (modify_dest))
3900 TREE_NO_WARNING (modify_dest) = 1;
3901
3902 if (gimple_call_return_slot_opt_p (stmt))
3903 {
3904 return_slot = modify_dest;
3905 modify_dest = NULL;
3906 }
3907 }
3908 else
3909 modify_dest = NULL;
3910
3911 /* If we are inlining a call to the C++ operator new, we don't want
3912 to use type based alias analysis on the return value. Otherwise
3913 we may get confused if the compiler sees that the inlined new
3914 function returns a pointer which was just deleted. See bug
3915 33407. */
3916 if (DECL_IS_OPERATOR_NEW (fn))
3917 {
3918 return_slot = NULL;
3919 modify_dest = NULL;
3920 }
3921
3922 /* Declare the return variable for the function. */
3923 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
3924
3925 /* Add local vars in this inlined callee to caller. */
3926 add_local_variables (id->src_cfun, cfun, id);
3927
3928 if (dump_file && (dump_flags & TDF_DETAILS))
3929 {
3930 fprintf (dump_file, "Inlining ");
3931 print_generic_expr (dump_file, id->src_fn, 0);
3932 fprintf (dump_file, " to ");
3933 print_generic_expr (dump_file, id->dst_fn, 0);
3934 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3935 }
3936
3937 /* This is it. Duplicate the callee body. Assume callee is
3938 pre-gimplified. Note that we must not alter the caller
3939 function in any way before this point, as this CALL_EXPR may be
3940 a self-referential call; if we're calling ourselves, we need to
3941 duplicate our body before altering anything. */
3942 copy_body (id, bb->count,
3943 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
3944 bb, return_block, NULL, NULL);
3945
3946 /* Reset the escaped solution. */
3947 if (cfun->gimple_df)
3948 pt_solution_reset (&cfun->gimple_df->escaped);
3949
3950 /* Clean up. */
3951 if (id->debug_map)
3952 {
3953 pointer_map_destroy (id->debug_map);
3954 id->debug_map = dst;
3955 }
3956 pointer_map_destroy (id->decl_map);
3957 id->decl_map = st;
3958
3959 /* Unlink the calls virtual operands before replacing it. */
3960 unlink_stmt_vdef (stmt);
3961
3962 /* If the inlined function returns a result that we care about,
3963 substitute the GIMPLE_CALL with an assignment of the return
3964 variable to the LHS of the call. That is, if STMT was
3965 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3966 if (use_retvar && gimple_call_lhs (stmt))
3967 {
3968 gimple old_stmt = stmt;
3969 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3970 gsi_replace (&stmt_gsi, stmt, false);
3971 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3972 }
3973 else
3974 {
3975 /* Handle the case of inlining a function with no return
3976 statement, which causes the return value to become undefined. */
3977 if (gimple_call_lhs (stmt)
3978 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3979 {
3980 tree name = gimple_call_lhs (stmt);
3981 tree var = SSA_NAME_VAR (name);
3982 tree def = ssa_default_def (cfun, var);
3983
3984 if (def)
3985 {
3986 /* If the variable is used undefined, make this name
3987 undefined via a move. */
3988 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3989 gsi_replace (&stmt_gsi, stmt, true);
3990 }
3991 else
3992 {
3993 /* Otherwise make this variable undefined. */
3994 gsi_remove (&stmt_gsi, true);
3995 set_ssa_default_def (cfun, var, name);
3996 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
3997 }
3998 }
3999 else
4000 gsi_remove (&stmt_gsi, true);
4001 }
4002
4003 if (purge_dead_abnormal_edges)
4004 {
4005 gimple_purge_dead_eh_edges (return_block);
4006 gimple_purge_dead_abnormal_call_edges (return_block);
4007 }
4008
4009 /* If the value of the new expression is ignored, that's OK. We
4010 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4011 the equivalent inlined version either. */
4012 if (is_gimple_assign (stmt))
4013 {
4014 gcc_assert (gimple_assign_single_p (stmt)
4015 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4016 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4017 }
4018
4019 /* Output the inlining info for this abstract function, since it has been
4020 inlined. If we don't do this now, we can lose the information about the
4021 variables in the function when the blocks get blown away as soon as we
4022 remove the cgraph node. */
4023 (*debug_hooks->outlining_inline_function) (cg_edge->callee->symbol.decl);
4024
4025 /* Update callgraph if needed. */
4026 cgraph_remove_node (cg_edge->callee);
4027
4028 id->block = NULL_TREE;
4029 successfully_inlined = TRUE;
4030
4031 egress:
4032 input_location = saved_location;
4033 return successfully_inlined;
4034 }
4035
4036 /* Expand call statements reachable from STMT_P.
4037 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4038 in a MODIFY_EXPR. */
4039
4040 static bool
4041 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4042 {
4043 gimple_stmt_iterator gsi;
4044
4045 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4046 {
4047 gimple stmt = gsi_stmt (gsi);
4048
4049 if (is_gimple_call (stmt)
4050 && expand_call_inline (bb, stmt, id))
4051 return true;
4052 }
4053
4054 return false;
4055 }
4056
4057
4058 /* Walk all basic blocks created after FIRST and try to fold every statement
4059 in the STATEMENTS pointer set. */
4060
4061 static void
4062 fold_marked_statements (int first, struct pointer_set_t *statements)
4063 {
4064 for (; first < n_basic_blocks; first++)
4065 if (BASIC_BLOCK (first))
4066 {
4067 gimple_stmt_iterator gsi;
4068
4069 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
4070 !gsi_end_p (gsi);
4071 gsi_next (&gsi))
4072 if (pointer_set_contains (statements, gsi_stmt (gsi)))
4073 {
4074 gimple old_stmt = gsi_stmt (gsi);
4075 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4076
4077 if (old_decl && DECL_BUILT_IN (old_decl))
4078 {
4079 /* Folding builtins can create multiple instructions,
4080 we need to look at all of them. */
4081 gimple_stmt_iterator i2 = gsi;
4082 gsi_prev (&i2);
4083 if (fold_stmt (&gsi))
4084 {
4085 gimple new_stmt;
4086 /* If a builtin at the end of a bb folded into nothing,
4087 the following loop won't work. */
4088 if (gsi_end_p (gsi))
4089 {
4090 cgraph_update_edges_for_call_stmt (old_stmt,
4091 old_decl, NULL);
4092 break;
4093 }
4094 if (gsi_end_p (i2))
4095 i2 = gsi_start_bb (BASIC_BLOCK (first));
4096 else
4097 gsi_next (&i2);
4098 while (1)
4099 {
4100 new_stmt = gsi_stmt (i2);
4101 update_stmt (new_stmt);
4102 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4103 new_stmt);
4104
4105 if (new_stmt == gsi_stmt (gsi))
4106 {
4107 /* It is okay to check only for the very last
4108 of these statements. If it is a throwing
4109 statement nothing will change. If it isn't
4110 this can remove EH edges. If that weren't
4111 correct then because some intermediate stmts
4112 throw, but not the last one. That would mean
4113 we'd have to split the block, which we can't
4114 here and we'd loose anyway. And as builtins
4115 probably never throw, this all
4116 is mood anyway. */
4117 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4118 new_stmt))
4119 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4120 break;
4121 }
4122 gsi_next (&i2);
4123 }
4124 }
4125 }
4126 else if (fold_stmt (&gsi))
4127 {
4128 /* Re-read the statement from GSI as fold_stmt() may
4129 have changed it. */
4130 gimple new_stmt = gsi_stmt (gsi);
4131 update_stmt (new_stmt);
4132
4133 if (is_gimple_call (old_stmt)
4134 || is_gimple_call (new_stmt))
4135 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4136 new_stmt);
4137
4138 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4139 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
4140 }
4141 }
4142 }
4143 }
4144
4145 /* Return true if BB has at least one abnormal outgoing edge. */
4146
4147 static inline bool
4148 has_abnormal_outgoing_edge_p (basic_block bb)
4149 {
4150 edge e;
4151 edge_iterator ei;
4152
4153 FOR_EACH_EDGE (e, ei, bb->succs)
4154 if (e->flags & EDGE_ABNORMAL)
4155 return true;
4156
4157 return false;
4158 }
4159
4160 /* Expand calls to inline functions in the body of FN. */
4161
4162 unsigned int
4163 optimize_inline_calls (tree fn)
4164 {
4165 copy_body_data id;
4166 basic_block bb;
4167 int last = n_basic_blocks;
4168 struct gimplify_ctx gctx;
4169 bool inlined_p = false;
4170
4171 /* Clear out ID. */
4172 memset (&id, 0, sizeof (id));
4173
4174 id.src_node = id.dst_node = cgraph_get_node (fn);
4175 gcc_assert (id.dst_node->analyzed);
4176 id.dst_fn = fn;
4177 /* Or any functions that aren't finished yet. */
4178 if (current_function_decl)
4179 id.dst_fn = current_function_decl;
4180
4181 id.copy_decl = copy_decl_maybe_to_var;
4182 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4183 id.transform_new_cfg = false;
4184 id.transform_return_to_modify = true;
4185 id.transform_lang_insert_block = NULL;
4186 id.statements_to_fold = pointer_set_create ();
4187
4188 push_gimplify_context (&gctx);
4189
4190 /* We make no attempts to keep dominance info up-to-date. */
4191 free_dominance_info (CDI_DOMINATORS);
4192 free_dominance_info (CDI_POST_DOMINATORS);
4193
4194 /* Register specific gimple functions. */
4195 gimple_register_cfg_hooks ();
4196
4197 /* Reach the trees by walking over the CFG, and note the
4198 enclosing basic-blocks in the call edges. */
4199 /* We walk the blocks going forward, because inlined function bodies
4200 will split id->current_basic_block, and the new blocks will
4201 follow it; we'll trudge through them, processing their CALL_EXPRs
4202 along the way. */
4203 FOR_EACH_BB (bb)
4204 inlined_p |= gimple_expand_calls_inline (bb, &id);
4205
4206 pop_gimplify_context (NULL);
4207
4208 #ifdef ENABLE_CHECKING
4209 {
4210 struct cgraph_edge *e;
4211
4212 verify_cgraph_node (id.dst_node);
4213
4214 /* Double check that we inlined everything we are supposed to inline. */
4215 for (e = id.dst_node->callees; e; e = e->next_callee)
4216 gcc_assert (e->inline_failed);
4217 }
4218 #endif
4219
4220 /* Fold queued statements. */
4221 fold_marked_statements (last, id.statements_to_fold);
4222 pointer_set_destroy (id.statements_to_fold);
4223
4224 gcc_assert (!id.debug_stmts);
4225
4226 /* If we didn't inline into the function there is nothing to do. */
4227 if (!inlined_p)
4228 return 0;
4229
4230 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4231 number_blocks (fn);
4232
4233 delete_unreachable_blocks_update_callgraph (&id);
4234 #ifdef ENABLE_CHECKING
4235 verify_cgraph_node (id.dst_node);
4236 #endif
4237
4238 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4239 not possible yet - the IPA passes might make various functions to not
4240 throw and they don't care to proactively update local EH info. This is
4241 done later in fixup_cfg pass that also execute the verification. */
4242 return (TODO_update_ssa
4243 | TODO_cleanup_cfg
4244 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4245 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4246 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4247 }
4248
4249 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4250
4251 tree
4252 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4253 {
4254 enum tree_code code = TREE_CODE (*tp);
4255 enum tree_code_class cl = TREE_CODE_CLASS (code);
4256
4257 /* We make copies of most nodes. */
4258 if (IS_EXPR_CODE_CLASS (cl)
4259 || code == TREE_LIST
4260 || code == TREE_VEC
4261 || code == TYPE_DECL
4262 || code == OMP_CLAUSE)
4263 {
4264 /* Because the chain gets clobbered when we make a copy, we save it
4265 here. */
4266 tree chain = NULL_TREE, new_tree;
4267
4268 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
4269 chain = TREE_CHAIN (*tp);
4270
4271 /* Copy the node. */
4272 new_tree = copy_node (*tp);
4273
4274 /* Propagate mudflap marked-ness. */
4275 if (flag_mudflap && mf_marked_p (*tp))
4276 mf_mark (new_tree);
4277
4278 *tp = new_tree;
4279
4280 /* Now, restore the chain, if appropriate. That will cause
4281 walk_tree to walk into the chain as well. */
4282 if (code == PARM_DECL
4283 || code == TREE_LIST
4284 || code == OMP_CLAUSE)
4285 TREE_CHAIN (*tp) = chain;
4286
4287 /* For now, we don't update BLOCKs when we make copies. So, we
4288 have to nullify all BIND_EXPRs. */
4289 if (TREE_CODE (*tp) == BIND_EXPR)
4290 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4291 }
4292 else if (code == CONSTRUCTOR)
4293 {
4294 /* CONSTRUCTOR nodes need special handling because
4295 we need to duplicate the vector of elements. */
4296 tree new_tree;
4297
4298 new_tree = copy_node (*tp);
4299
4300 /* Propagate mudflap marked-ness. */
4301 if (flag_mudflap && mf_marked_p (*tp))
4302 mf_mark (new_tree);
4303
4304 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4305 CONSTRUCTOR_ELTS (*tp));
4306 *tp = new_tree;
4307 }
4308 else if (code == STATEMENT_LIST)
4309 /* We used to just abort on STATEMENT_LIST, but we can run into them
4310 with statement-expressions (c++/40975). */
4311 copy_statement_list (tp);
4312 else if (TREE_CODE_CLASS (code) == tcc_type)
4313 *walk_subtrees = 0;
4314 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4315 *walk_subtrees = 0;
4316 else if (TREE_CODE_CLASS (code) == tcc_constant)
4317 *walk_subtrees = 0;
4318 return NULL_TREE;
4319 }
4320
4321 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4322 information indicating to what new SAVE_EXPR this one should be mapped,
4323 use that one. Otherwise, create a new node and enter it in ST. FN is
4324 the function into which the copy will be placed. */
4325
4326 static void
4327 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4328 {
4329 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4330 tree *n;
4331 tree t;
4332
4333 /* See if we already encountered this SAVE_EXPR. */
4334 n = (tree *) pointer_map_contains (st, *tp);
4335
4336 /* If we didn't already remap this SAVE_EXPR, do so now. */
4337 if (!n)
4338 {
4339 t = copy_node (*tp);
4340
4341 /* Remember this SAVE_EXPR. */
4342 *pointer_map_insert (st, *tp) = t;
4343 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4344 *pointer_map_insert (st, t) = t;
4345 }
4346 else
4347 {
4348 /* We've already walked into this SAVE_EXPR; don't do it again. */
4349 *walk_subtrees = 0;
4350 t = *n;
4351 }
4352
4353 /* Replace this SAVE_EXPR with the copy. */
4354 *tp = t;
4355 }
4356
4357 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4358 copies the declaration and enters it in the splay_tree in DATA (which is
4359 really an `copy_body_data *'). */
4360
4361 static tree
4362 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4363 void *data)
4364 {
4365 copy_body_data *id = (copy_body_data *) data;
4366
4367 /* Don't walk into types. */
4368 if (TYPE_P (*tp))
4369 *walk_subtrees = 0;
4370
4371 else if (TREE_CODE (*tp) == LABEL_EXPR)
4372 {
4373 tree decl = TREE_OPERAND (*tp, 0);
4374
4375 /* Copy the decl and remember the copy. */
4376 insert_decl_map (id, decl, id->copy_decl (decl, id));
4377 }
4378
4379 return NULL_TREE;
4380 }
4381
4382 /* Perform any modifications to EXPR required when it is unsaved. Does
4383 not recurse into EXPR's subtrees. */
4384
4385 static void
4386 unsave_expr_1 (tree expr)
4387 {
4388 switch (TREE_CODE (expr))
4389 {
4390 case TARGET_EXPR:
4391 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4392 It's OK for this to happen if it was part of a subtree that
4393 isn't immediately expanded, such as operand 2 of another
4394 TARGET_EXPR. */
4395 if (TREE_OPERAND (expr, 1))
4396 break;
4397
4398 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4399 TREE_OPERAND (expr, 3) = NULL_TREE;
4400 break;
4401
4402 default:
4403 break;
4404 }
4405 }
4406
4407 /* Called via walk_tree when an expression is unsaved. Using the
4408 splay_tree pointed to by ST (which is really a `splay_tree'),
4409 remaps all local declarations to appropriate replacements. */
4410
4411 static tree
4412 unsave_r (tree *tp, int *walk_subtrees, void *data)
4413 {
4414 copy_body_data *id = (copy_body_data *) data;
4415 struct pointer_map_t *st = id->decl_map;
4416 tree *n;
4417
4418 /* Only a local declaration (variable or label). */
4419 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4420 || TREE_CODE (*tp) == LABEL_DECL)
4421 {
4422 /* Lookup the declaration. */
4423 n = (tree *) pointer_map_contains (st, *tp);
4424
4425 /* If it's there, remap it. */
4426 if (n)
4427 *tp = *n;
4428 }
4429
4430 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4431 gcc_unreachable ();
4432 else if (TREE_CODE (*tp) == BIND_EXPR)
4433 copy_bind_expr (tp, walk_subtrees, id);
4434 else if (TREE_CODE (*tp) == SAVE_EXPR
4435 || TREE_CODE (*tp) == TARGET_EXPR)
4436 remap_save_expr (tp, st, walk_subtrees);
4437 else
4438 {
4439 copy_tree_r (tp, walk_subtrees, NULL);
4440
4441 /* Do whatever unsaving is required. */
4442 unsave_expr_1 (*tp);
4443 }
4444
4445 /* Keep iterating. */
4446 return NULL_TREE;
4447 }
4448
4449 /* Copies everything in EXPR and replaces variables, labels
4450 and SAVE_EXPRs local to EXPR. */
4451
4452 tree
4453 unsave_expr_now (tree expr)
4454 {
4455 copy_body_data id;
4456
4457 /* There's nothing to do for NULL_TREE. */
4458 if (expr == 0)
4459 return expr;
4460
4461 /* Set up ID. */
4462 memset (&id, 0, sizeof (id));
4463 id.src_fn = current_function_decl;
4464 id.dst_fn = current_function_decl;
4465 id.decl_map = pointer_map_create ();
4466 id.debug_map = NULL;
4467
4468 id.copy_decl = copy_decl_no_change;
4469 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4470 id.transform_new_cfg = false;
4471 id.transform_return_to_modify = false;
4472 id.transform_lang_insert_block = NULL;
4473
4474 /* Walk the tree once to find local labels. */
4475 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4476
4477 /* Walk the tree again, copying, remapping, and unsaving. */
4478 walk_tree (&expr, unsave_r, &id, NULL);
4479
4480 /* Clean up. */
4481 pointer_map_destroy (id.decl_map);
4482 if (id.debug_map)
4483 pointer_map_destroy (id.debug_map);
4484
4485 return expr;
4486 }
4487
4488 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4489 label, copies the declaration and enters it in the splay_tree in DATA (which
4490 is really a 'copy_body_data *'. */
4491
4492 static tree
4493 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4494 bool *handled_ops_p ATTRIBUTE_UNUSED,
4495 struct walk_stmt_info *wi)
4496 {
4497 copy_body_data *id = (copy_body_data *) wi->info;
4498 gimple stmt = gsi_stmt (*gsip);
4499
4500 if (gimple_code (stmt) == GIMPLE_LABEL)
4501 {
4502 tree decl = gimple_label_label (stmt);
4503
4504 /* Copy the decl and remember the copy. */
4505 insert_decl_map (id, decl, id->copy_decl (decl, id));
4506 }
4507
4508 return NULL_TREE;
4509 }
4510
4511
4512 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4513 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4514 remaps all local declarations to appropriate replacements in gimple
4515 operands. */
4516
4517 static tree
4518 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4519 {
4520 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4521 copy_body_data *id = (copy_body_data *) wi->info;
4522 struct pointer_map_t *st = id->decl_map;
4523 tree *n;
4524 tree expr = *tp;
4525
4526 /* Only a local declaration (variable or label). */
4527 if ((TREE_CODE (expr) == VAR_DECL
4528 && !TREE_STATIC (expr))
4529 || TREE_CODE (expr) == LABEL_DECL)
4530 {
4531 /* Lookup the declaration. */
4532 n = (tree *) pointer_map_contains (st, expr);
4533
4534 /* If it's there, remap it. */
4535 if (n)
4536 *tp = *n;
4537 *walk_subtrees = 0;
4538 }
4539 else if (TREE_CODE (expr) == STATEMENT_LIST
4540 || TREE_CODE (expr) == BIND_EXPR
4541 || TREE_CODE (expr) == SAVE_EXPR)
4542 gcc_unreachable ();
4543 else if (TREE_CODE (expr) == TARGET_EXPR)
4544 {
4545 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4546 It's OK for this to happen if it was part of a subtree that
4547 isn't immediately expanded, such as operand 2 of another
4548 TARGET_EXPR. */
4549 if (!TREE_OPERAND (expr, 1))
4550 {
4551 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4552 TREE_OPERAND (expr, 3) = NULL_TREE;
4553 }
4554 }
4555
4556 /* Keep iterating. */
4557 return NULL_TREE;
4558 }
4559
4560
4561 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4562 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4563 remaps all local declarations to appropriate replacements in gimple
4564 statements. */
4565
4566 static tree
4567 replace_locals_stmt (gimple_stmt_iterator *gsip,
4568 bool *handled_ops_p ATTRIBUTE_UNUSED,
4569 struct walk_stmt_info *wi)
4570 {
4571 copy_body_data *id = (copy_body_data *) wi->info;
4572 gimple stmt = gsi_stmt (*gsip);
4573
4574 if (gimple_code (stmt) == GIMPLE_BIND)
4575 {
4576 tree block = gimple_bind_block (stmt);
4577
4578 if (block)
4579 {
4580 remap_block (&block, id);
4581 gimple_bind_set_block (stmt, block);
4582 }
4583
4584 /* This will remap a lot of the same decls again, but this should be
4585 harmless. */
4586 if (gimple_bind_vars (stmt))
4587 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4588 }
4589
4590 /* Keep iterating. */
4591 return NULL_TREE;
4592 }
4593
4594
4595 /* Copies everything in SEQ and replaces variables and labels local to
4596 current_function_decl. */
4597
4598 gimple_seq
4599 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4600 {
4601 copy_body_data id;
4602 struct walk_stmt_info wi;
4603 struct pointer_set_t *visited;
4604 gimple_seq copy;
4605
4606 /* There's nothing to do for NULL_TREE. */
4607 if (seq == NULL)
4608 return seq;
4609
4610 /* Set up ID. */
4611 memset (&id, 0, sizeof (id));
4612 id.src_fn = current_function_decl;
4613 id.dst_fn = current_function_decl;
4614 id.decl_map = pointer_map_create ();
4615 id.debug_map = NULL;
4616
4617 id.copy_decl = copy_decl_no_change;
4618 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4619 id.transform_new_cfg = false;
4620 id.transform_return_to_modify = false;
4621 id.transform_lang_insert_block = NULL;
4622
4623 /* Walk the tree once to find local labels. */
4624 memset (&wi, 0, sizeof (wi));
4625 visited = pointer_set_create ();
4626 wi.info = &id;
4627 wi.pset = visited;
4628 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4629 pointer_set_destroy (visited);
4630
4631 copy = gimple_seq_copy (seq);
4632
4633 /* Walk the copy, remapping decls. */
4634 memset (&wi, 0, sizeof (wi));
4635 wi.info = &id;
4636 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4637
4638 /* Clean up. */
4639 pointer_map_destroy (id.decl_map);
4640 if (id.debug_map)
4641 pointer_map_destroy (id.debug_map);
4642
4643 return copy;
4644 }
4645
4646
4647 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4648
4649 static tree
4650 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4651 {
4652 if (*tp == data)
4653 return (tree) data;
4654 else
4655 return NULL;
4656 }
4657
4658 DEBUG_FUNCTION bool
4659 debug_find_tree (tree top, tree search)
4660 {
4661 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4662 }
4663
4664
4665 /* Declare the variables created by the inliner. Add all the variables in
4666 VARS to BIND_EXPR. */
4667
4668 static void
4669 declare_inline_vars (tree block, tree vars)
4670 {
4671 tree t;
4672 for (t = vars; t; t = DECL_CHAIN (t))
4673 {
4674 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4675 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4676 add_local_decl (cfun, t);
4677 }
4678
4679 if (block)
4680 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4681 }
4682
4683 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4684 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4685 VAR_DECL translation. */
4686
4687 static tree
4688 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4689 {
4690 /* Don't generate debug information for the copy if we wouldn't have
4691 generated it for the copy either. */
4692 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4693 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4694
4695 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4696 declaration inspired this copy. */
4697 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4698
4699 /* The new variable/label has no RTL, yet. */
4700 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4701 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4702 SET_DECL_RTL (copy, 0);
4703
4704 /* These args would always appear unused, if not for this. */
4705 TREE_USED (copy) = 1;
4706
4707 /* Set the context for the new declaration. */
4708 if (!DECL_CONTEXT (decl))
4709 /* Globals stay global. */
4710 ;
4711 else if (DECL_CONTEXT (decl) != id->src_fn)
4712 /* Things that weren't in the scope of the function we're inlining
4713 from aren't in the scope we're inlining to, either. */
4714 ;
4715 else if (TREE_STATIC (decl))
4716 /* Function-scoped static variables should stay in the original
4717 function. */
4718 ;
4719 else
4720 /* Ordinary automatic local variables are now in the scope of the
4721 new function. */
4722 DECL_CONTEXT (copy) = id->dst_fn;
4723
4724 return copy;
4725 }
4726
4727 static tree
4728 copy_decl_to_var (tree decl, copy_body_data *id)
4729 {
4730 tree copy, type;
4731
4732 gcc_assert (TREE_CODE (decl) == PARM_DECL
4733 || TREE_CODE (decl) == RESULT_DECL);
4734
4735 type = TREE_TYPE (decl);
4736
4737 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4738 VAR_DECL, DECL_NAME (decl), type);
4739 if (DECL_PT_UID_SET_P (decl))
4740 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4741 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4742 TREE_READONLY (copy) = TREE_READONLY (decl);
4743 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4744 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4745
4746 return copy_decl_for_dup_finish (id, decl, copy);
4747 }
4748
4749 /* Like copy_decl_to_var, but create a return slot object instead of a
4750 pointer variable for return by invisible reference. */
4751
4752 static tree
4753 copy_result_decl_to_var (tree decl, copy_body_data *id)
4754 {
4755 tree copy, type;
4756
4757 gcc_assert (TREE_CODE (decl) == PARM_DECL
4758 || TREE_CODE (decl) == RESULT_DECL);
4759
4760 type = TREE_TYPE (decl);
4761 if (DECL_BY_REFERENCE (decl))
4762 type = TREE_TYPE (type);
4763
4764 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4765 VAR_DECL, DECL_NAME (decl), type);
4766 if (DECL_PT_UID_SET_P (decl))
4767 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
4768 TREE_READONLY (copy) = TREE_READONLY (decl);
4769 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4770 if (!DECL_BY_REFERENCE (decl))
4771 {
4772 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4773 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4774 }
4775
4776 return copy_decl_for_dup_finish (id, decl, copy);
4777 }
4778
4779 tree
4780 copy_decl_no_change (tree decl, copy_body_data *id)
4781 {
4782 tree copy;
4783
4784 copy = copy_node (decl);
4785
4786 /* The COPY is not abstract; it will be generated in DST_FN. */
4787 DECL_ABSTRACT (copy) = 0;
4788 lang_hooks.dup_lang_specific_decl (copy);
4789
4790 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4791 been taken; it's for internal bookkeeping in expand_goto_internal. */
4792 if (TREE_CODE (copy) == LABEL_DECL)
4793 {
4794 TREE_ADDRESSABLE (copy) = 0;
4795 LABEL_DECL_UID (copy) = -1;
4796 }
4797
4798 return copy_decl_for_dup_finish (id, decl, copy);
4799 }
4800
4801 static tree
4802 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4803 {
4804 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4805 return copy_decl_to_var (decl, id);
4806 else
4807 return copy_decl_no_change (decl, id);
4808 }
4809
4810 /* Return a copy of the function's argument tree. */
4811 static tree
4812 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4813 bitmap args_to_skip, tree *vars)
4814 {
4815 tree arg, *parg;
4816 tree new_parm = NULL;
4817 int i = 0;
4818
4819 parg = &new_parm;
4820
4821 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
4822 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4823 {
4824 tree new_tree = remap_decl (arg, id);
4825 if (TREE_CODE (new_tree) != PARM_DECL)
4826 new_tree = id->copy_decl (arg, id);
4827 lang_hooks.dup_lang_specific_decl (new_tree);
4828 *parg = new_tree;
4829 parg = &DECL_CHAIN (new_tree);
4830 }
4831 else if (!pointer_map_contains (id->decl_map, arg))
4832 {
4833 /* Make an equivalent VAR_DECL. If the argument was used
4834 as temporary variable later in function, the uses will be
4835 replaced by local variable. */
4836 tree var = copy_decl_to_var (arg, id);
4837 insert_decl_map (id, arg, var);
4838 /* Declare this new variable. */
4839 DECL_CHAIN (var) = *vars;
4840 *vars = var;
4841 }
4842 return new_parm;
4843 }
4844
4845 /* Return a copy of the function's static chain. */
4846 static tree
4847 copy_static_chain (tree static_chain, copy_body_data * id)
4848 {
4849 tree *chain_copy, *pvar;
4850
4851 chain_copy = &static_chain;
4852 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
4853 {
4854 tree new_tree = remap_decl (*pvar, id);
4855 lang_hooks.dup_lang_specific_decl (new_tree);
4856 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
4857 *pvar = new_tree;
4858 }
4859 return static_chain;
4860 }
4861
4862 /* Return true if the function is allowed to be versioned.
4863 This is a guard for the versioning functionality. */
4864
4865 bool
4866 tree_versionable_function_p (tree fndecl)
4867 {
4868 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4869 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4870 }
4871
4872 /* Delete all unreachable basic blocks and update callgraph.
4873 Doing so is somewhat nontrivial because we need to update all clones and
4874 remove inline function that become unreachable. */
4875
4876 static bool
4877 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4878 {
4879 bool changed = false;
4880 basic_block b, next_bb;
4881
4882 find_unreachable_blocks ();
4883
4884 /* Delete all unreachable basic blocks. */
4885
4886 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4887 {
4888 next_bb = b->next_bb;
4889
4890 if (!(b->flags & BB_REACHABLE))
4891 {
4892 gimple_stmt_iterator bsi;
4893
4894 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4895 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4896 {
4897 struct cgraph_edge *e;
4898 struct cgraph_node *node;
4899
4900 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4901 {
4902 if (!e->inline_failed)
4903 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
4904 else
4905 cgraph_remove_edge (e);
4906 }
4907 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4908 && id->dst_node->clones)
4909 for (node = id->dst_node->clones; node != id->dst_node;)
4910 {
4911 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4912 {
4913 if (!e->inline_failed)
4914 cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
4915 else
4916 cgraph_remove_edge (e);
4917 }
4918
4919 if (node->clones)
4920 node = node->clones;
4921 else if (node->next_sibling_clone)
4922 node = node->next_sibling_clone;
4923 else
4924 {
4925 while (node != id->dst_node && !node->next_sibling_clone)
4926 node = node->clone_of;
4927 if (node != id->dst_node)
4928 node = node->next_sibling_clone;
4929 }
4930 }
4931 }
4932 delete_basic_block (b);
4933 changed = true;
4934 }
4935 }
4936
4937 return changed;
4938 }
4939
4940 /* Update clone info after duplication. */
4941
4942 static void
4943 update_clone_info (copy_body_data * id)
4944 {
4945 struct cgraph_node *node;
4946 if (!id->dst_node->clones)
4947 return;
4948 for (node = id->dst_node->clones; node != id->dst_node;)
4949 {
4950 /* First update replace maps to match the new body. */
4951 if (node->clone.tree_map)
4952 {
4953 unsigned int i;
4954 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4955 {
4956 struct ipa_replace_map *replace_info;
4957 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4958 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4959 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4960 }
4961 }
4962 if (node->clones)
4963 node = node->clones;
4964 else if (node->next_sibling_clone)
4965 node = node->next_sibling_clone;
4966 else
4967 {
4968 while (node != id->dst_node && !node->next_sibling_clone)
4969 node = node->clone_of;
4970 if (node != id->dst_node)
4971 node = node->next_sibling_clone;
4972 }
4973 }
4974 }
4975
4976 /* Create a copy of a function's tree.
4977 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4978 of the original function and the new copied function
4979 respectively. In case we want to replace a DECL
4980 tree with another tree while duplicating the function's
4981 body, TREE_MAP represents the mapping between these
4982 trees. If UPDATE_CLONES is set, the call_stmt fields
4983 of edges of clones of the function will be updated.
4984
4985 If non-NULL ARGS_TO_SKIP determine function parameters to remove
4986 from new version.
4987 If SKIP_RETURN is true, the new version will return void.
4988 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
4989 If non_NULL NEW_ENTRY determine new entry BB of the clone.
4990 */
4991 void
4992 tree_function_versioning (tree old_decl, tree new_decl,
4993 VEC(ipa_replace_map_p,gc)* tree_map,
4994 bool update_clones, bitmap args_to_skip,
4995 bool skip_return, bitmap blocks_to_copy,
4996 basic_block new_entry)
4997 {
4998 struct cgraph_node *old_version_node;
4999 struct cgraph_node *new_version_node;
5000 copy_body_data id;
5001 tree p;
5002 unsigned i;
5003 struct ipa_replace_map *replace_info;
5004 basic_block old_entry_block, bb;
5005 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
5006
5007 tree old_current_function_decl = current_function_decl;
5008 tree vars = NULL_TREE;
5009
5010 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5011 && TREE_CODE (new_decl) == FUNCTION_DECL);
5012 DECL_POSSIBLY_INLINED (old_decl) = 1;
5013
5014 old_version_node = cgraph_get_node (old_decl);
5015 gcc_checking_assert (old_version_node);
5016 new_version_node = cgraph_get_node (new_decl);
5017 gcc_checking_assert (new_version_node);
5018
5019 /* Copy over debug args. */
5020 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5021 {
5022 VEC(tree, gc) **new_debug_args, **old_debug_args;
5023 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5024 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5025 old_debug_args = decl_debug_args_lookup (old_decl);
5026 if (old_debug_args)
5027 {
5028 new_debug_args = decl_debug_args_insert (new_decl);
5029 *new_debug_args = VEC_copy (tree, gc, *old_debug_args);
5030 }
5031 }
5032
5033 /* Output the inlining info for this abstract function, since it has been
5034 inlined. If we don't do this now, we can lose the information about the
5035 variables in the function when the blocks get blown away as soon as we
5036 remove the cgraph node. */
5037 (*debug_hooks->outlining_inline_function) (old_decl);
5038
5039 DECL_ARTIFICIAL (new_decl) = 1;
5040 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5041 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5042
5043 /* Prepare the data structures for the tree copy. */
5044 memset (&id, 0, sizeof (id));
5045
5046 /* Generate a new name for the new version. */
5047 id.statements_to_fold = pointer_set_create ();
5048
5049 id.decl_map = pointer_map_create ();
5050 id.debug_map = NULL;
5051 id.src_fn = old_decl;
5052 id.dst_fn = new_decl;
5053 id.src_node = old_version_node;
5054 id.dst_node = new_version_node;
5055 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5056 if (id.src_node->ipa_transforms_to_apply)
5057 {
5058 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
5059 unsigned int i;
5060
5061 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
5062 id.src_node->ipa_transforms_to_apply);
5063 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
5064 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
5065 VEC_index (ipa_opt_pass,
5066 old_transforms_to_apply,
5067 i));
5068 }
5069
5070 id.copy_decl = copy_decl_no_change;
5071 id.transform_call_graph_edges
5072 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5073 id.transform_new_cfg = true;
5074 id.transform_return_to_modify = false;
5075 id.transform_lang_insert_block = NULL;
5076
5077 current_function_decl = new_decl;
5078 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
5079 (DECL_STRUCT_FUNCTION (old_decl));
5080 initialize_cfun (new_decl, old_decl,
5081 old_entry_block->count);
5082 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5083 = id.src_cfun->gimple_df->ipa_pta;
5084 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
5085
5086 /* Copy the function's static chain. */
5087 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5088 if (p)
5089 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5090 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5091 &id);
5092
5093 /* If there's a tree_map, prepare for substitution. */
5094 if (tree_map)
5095 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
5096 {
5097 gimple init;
5098 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
5099 if (replace_info->replace_p)
5100 {
5101 tree op = replace_info->new_tree;
5102 if (!replace_info->old_tree)
5103 {
5104 int i = replace_info->parm_num;
5105 tree parm;
5106 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5107 i --;
5108 replace_info->old_tree = parm;
5109 }
5110
5111
5112 STRIP_NOPS (op);
5113
5114 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
5115 op = TREE_OPERAND (op, 0);
5116
5117 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5118 init = setup_one_parameter (&id, replace_info->old_tree,
5119 replace_info->new_tree, id.src_fn,
5120 NULL,
5121 &vars);
5122 if (init)
5123 VEC_safe_push (gimple, heap, init_stmts, init);
5124 }
5125 }
5126 /* Copy the function's arguments. */
5127 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5128 DECL_ARGUMENTS (new_decl) =
5129 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5130 args_to_skip, &vars);
5131
5132 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5133 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5134
5135 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5136
5137 if (!VEC_empty (tree, DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5138 /* Add local vars. */
5139 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5140
5141 if (DECL_RESULT (old_decl) == NULL_TREE)
5142 ;
5143 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5144 {
5145 DECL_RESULT (new_decl)
5146 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5147 RESULT_DECL, NULL_TREE, void_type_node);
5148 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5149 cfun->returns_struct = 0;
5150 cfun->returns_pcc_struct = 0;
5151 }
5152 else
5153 {
5154 tree old_name;
5155 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5156 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5157 if (gimple_in_ssa_p (id.src_cfun)
5158 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5159 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5160 {
5161 tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
5162 insert_decl_map (&id, old_name, new_name);
5163 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5164 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5165 }
5166 }
5167
5168 /* Copy the Function's body. */
5169 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5170 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
5171
5172 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5173 number_blocks (new_decl);
5174
5175 /* We want to create the BB unconditionally, so that the addition of
5176 debug stmts doesn't affect BB count, which may in the end cause
5177 codegen differences. */
5178 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
5179 while (VEC_length (gimple, init_stmts))
5180 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
5181 update_clone_info (&id);
5182
5183 /* Remap the nonlocal_goto_save_area, if any. */
5184 if (cfun->nonlocal_goto_save_area)
5185 {
5186 struct walk_stmt_info wi;
5187
5188 memset (&wi, 0, sizeof (wi));
5189 wi.info = &id;
5190 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5191 }
5192
5193 /* Clean up. */
5194 pointer_map_destroy (id.decl_map);
5195 if (id.debug_map)
5196 pointer_map_destroy (id.debug_map);
5197 free_dominance_info (CDI_DOMINATORS);
5198 free_dominance_info (CDI_POST_DOMINATORS);
5199
5200 fold_marked_statements (0, id.statements_to_fold);
5201 pointer_set_destroy (id.statements_to_fold);
5202 fold_cond_expr_cond ();
5203 delete_unreachable_blocks_update_callgraph (&id);
5204 if (id.dst_node->analyzed)
5205 cgraph_rebuild_references ();
5206 update_ssa (TODO_update_ssa);
5207
5208 /* After partial cloning we need to rescale frequencies, so they are
5209 within proper range in the cloned function. */
5210 if (new_entry)
5211 {
5212 struct cgraph_edge *e;
5213 rebuild_frequencies ();
5214
5215 new_version_node->count = ENTRY_BLOCK_PTR->count;
5216 for (e = new_version_node->callees; e; e = e->next_callee)
5217 {
5218 basic_block bb = gimple_bb (e->call_stmt);
5219 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5220 bb);
5221 e->count = bb->count;
5222 }
5223 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5224 {
5225 basic_block bb = gimple_bb (e->call_stmt);
5226 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5227 bb);
5228 e->count = bb->count;
5229 }
5230 }
5231
5232 free_dominance_info (CDI_DOMINATORS);
5233 free_dominance_info (CDI_POST_DOMINATORS);
5234
5235 gcc_assert (!id.debug_stmts);
5236 VEC_free (gimple, heap, init_stmts);
5237 pop_cfun ();
5238 current_function_decl = old_current_function_decl;
5239 gcc_assert (!current_function_decl
5240 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
5241 return;
5242 }
5243
5244 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5245 the callee and return the inlined body on success. */
5246
5247 tree
5248 maybe_inline_call_in_expr (tree exp)
5249 {
5250 tree fn = get_callee_fndecl (exp);
5251
5252 /* We can only try to inline "const" functions. */
5253 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5254 {
5255 struct pointer_map_t *decl_map = pointer_map_create ();
5256 call_expr_arg_iterator iter;
5257 copy_body_data id;
5258 tree param, arg, t;
5259
5260 /* Remap the parameters. */
5261 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5262 param;
5263 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5264 *pointer_map_insert (decl_map, param) = arg;
5265
5266 memset (&id, 0, sizeof (id));
5267 id.src_fn = fn;
5268 id.dst_fn = current_function_decl;
5269 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5270 id.decl_map = decl_map;
5271
5272 id.copy_decl = copy_decl_no_change;
5273 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5274 id.transform_new_cfg = false;
5275 id.transform_return_to_modify = true;
5276 id.transform_lang_insert_block = NULL;
5277
5278 /* Make sure not to unshare trees behind the front-end's back
5279 since front-end specific mechanisms may rely on sharing. */
5280 id.regimplify = false;
5281 id.do_not_unshare = true;
5282
5283 /* We're not inside any EH region. */
5284 id.eh_lp_nr = 0;
5285
5286 t = copy_tree_body (&id);
5287 pointer_map_destroy (decl_map);
5288
5289 /* We can only return something suitable for use in a GENERIC
5290 expression tree. */
5291 if (TREE_CODE (t) == MODIFY_EXPR)
5292 return TREE_OPERAND (t, 1);
5293 }
5294
5295 return NULL_TREE;
5296 }
5297
5298 /* Duplicate a type, fields and all. */
5299
5300 tree
5301 build_duplicate_type (tree type)
5302 {
5303 struct copy_body_data id;
5304
5305 memset (&id, 0, sizeof (id));
5306 id.src_fn = current_function_decl;
5307 id.dst_fn = current_function_decl;
5308 id.src_cfun = cfun;
5309 id.decl_map = pointer_map_create ();
5310 id.debug_map = NULL;
5311 id.copy_decl = copy_decl_no_change;
5312
5313 type = remap_type_1 (type, &id);
5314
5315 pointer_map_destroy (id.decl_map);
5316 if (id.debug_map)
5317 pointer_map_destroy (id.debug_map);
5318
5319 TYPE_CANONICAL (type) = type;
5320
5321 return type;
5322 }