]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-inline.c
Merge lto branch into trunk.
[thirdparty/gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "tree.h"
28 #include "tree-inline.h"
29 #include "rtl.h"
30 #include "expr.h"
31 #include "flags.h"
32 #include "params.h"
33 #include "input.h"
34 #include "insn-config.h"
35 #include "hashtab.h"
36 #include "langhooks.h"
37 #include "basic-block.h"
38 #include "tree-iterator.h"
39 #include "cgraph.h"
40 #include "intl.h"
41 #include "tree-mudflap.h"
42 #include "tree-flow.h"
43 #include "function.h"
44 #include "ggc.h"
45 #include "tree-flow.h"
46 #include "diagnostic.h"
47 #include "except.h"
48 #include "debug.h"
49 #include "pointer-set.h"
50 #include "ipa-prop.h"
51 #include "value-prof.h"
52 #include "tree-pass.h"
53 #include "target.h"
54 #include "integrate.h"
55
56 /* I'm not real happy about this, but we need to handle gimple and
57 non-gimple trees. */
58 #include "gimple.h"
59
60 /* Inlining, Cloning, Versioning, Parallelization
61
62 Inlining: a function body is duplicated, but the PARM_DECLs are
63 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
64 MODIFY_EXPRs that store to a dedicated returned-value variable.
65 The duplicated eh_region info of the copy will later be appended
66 to the info for the caller; the eh_region info in copied throwing
67 statements and RESX statements are adjusted accordingly.
68
69 Cloning: (only in C++) We have one body for a con/de/structor, and
70 multiple function decls, each with a unique parameter list.
71 Duplicate the body, using the given splay tree; some parameters
72 will become constants (like 0 or 1).
73
74 Versioning: a function body is duplicated and the result is a new
75 function rather than into blocks of an existing function as with
76 inlining. Some parameters will become constants.
77
78 Parallelization: a region of a function is duplicated resulting in
79 a new function. Variables may be replaced with complex expressions
80 to enable shared variable semantics.
81
82 All of these will simultaneously lookup any callgraph edges. If
83 we're going to inline the duplicated function body, and the given
84 function has some cloned callgraph nodes (one for each place this
85 function will be inlined) those callgraph edges will be duplicated.
86 If we're cloning the body, those callgraph edges will be
87 updated to point into the new body. (Note that the original
88 callgraph node and edge list will not be altered.)
89
90 See the CALL_EXPR handling case in copy_tree_body_r (). */
91
92 /* To Do:
93
94 o In order to make inlining-on-trees work, we pessimized
95 function-local static constants. In particular, they are now
96 always output, even when not addressed. Fix this by treating
97 function-local static constants just like global static
98 constants; the back-end already knows not to output them if they
99 are not needed.
100
101 o Provide heuristics to clamp inlining of recursive template
102 calls? */
103
104
105 /* Weights that estimate_num_insns uses for heuristics in inlining. */
106
107 eni_weights eni_inlining_weights;
108
109 /* Weights that estimate_num_insns uses to estimate the size of the
110 produced code. */
111
112 eni_weights eni_size_weights;
113
114 /* Weights that estimate_num_insns uses to estimate the time necessary
115 to execute the produced code. */
116
117 eni_weights eni_time_weights;
118
119 /* Prototypes. */
120
121 static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
122 static void remap_block (tree *, copy_body_data *);
123 static void copy_bind_expr (tree *, int *, copy_body_data *);
124 static tree mark_local_for_remap_r (tree *, int *, void *);
125 static void unsave_expr_1 (tree);
126 static tree unsave_r (tree *, int *, void *);
127 static void declare_inline_vars (tree, tree);
128 static void remap_save_expr (tree *, void *, int *);
129 static void prepend_lexical_block (tree current_block, tree new_block);
130 static tree copy_decl_to_var (tree, copy_body_data *);
131 static tree copy_result_decl_to_var (tree, copy_body_data *);
132 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
133 static gimple remap_gimple_stmt (gimple, copy_body_data *);
134 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
135
136 /* Insert a tree->tree mapping for ID. Despite the name suggests
137 that the trees should be variables, it is used for more than that. */
138
139 void
140 insert_decl_map (copy_body_data *id, tree key, tree value)
141 {
142 *pointer_map_insert (id->decl_map, key) = value;
143
144 /* Always insert an identity map as well. If we see this same new
145 node again, we won't want to duplicate it a second time. */
146 if (key != value)
147 *pointer_map_insert (id->decl_map, value) = value;
148 }
149
150 /* Insert a tree->tree mapping for ID. This is only used for
151 variables. */
152
153 static void
154 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
155 {
156 if (!gimple_in_ssa_p (id->src_cfun))
157 return;
158
159 if (!MAY_HAVE_DEBUG_STMTS)
160 return;
161
162 if (!target_for_debug_bind (key))
163 return;
164
165 gcc_assert (TREE_CODE (key) == PARM_DECL);
166 gcc_assert (TREE_CODE (value) == VAR_DECL);
167
168 if (!id->debug_map)
169 id->debug_map = pointer_map_create ();
170
171 *pointer_map_insert (id->debug_map, key) = value;
172 }
173
174 /* Construct new SSA name for old NAME. ID is the inline context. */
175
176 static tree
177 remap_ssa_name (tree name, copy_body_data *id)
178 {
179 tree new_tree;
180 tree *n;
181
182 gcc_assert (TREE_CODE (name) == SSA_NAME);
183
184 n = (tree *) pointer_map_contains (id->decl_map, name);
185 if (n)
186 return unshare_expr (*n);
187
188 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
189 in copy_bb. */
190 new_tree = remap_decl (SSA_NAME_VAR (name), id);
191
192 /* We might've substituted constant or another SSA_NAME for
193 the variable.
194
195 Replace the SSA name representing RESULT_DECL by variable during
196 inlining: this saves us from need to introduce PHI node in a case
197 return value is just partly initialized. */
198 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
199 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
200 || !id->transform_return_to_modify))
201 {
202 new_tree = make_ssa_name (new_tree, NULL);
203 insert_decl_map (id, name, new_tree);
204 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
205 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
206 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
207 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
208 {
209 /* By inlining function having uninitialized variable, we might
210 extend the lifetime (variable might get reused). This cause
211 ICE in the case we end up extending lifetime of SSA name across
212 abnormal edge, but also increase register pressure.
213
214 We simply initialize all uninitialized vars by 0 except
215 for case we are inlining to very first BB. We can avoid
216 this for all BBs that are not inside strongly connected
217 regions of the CFG, but this is expensive to test. */
218 if (id->entry_bb
219 && is_gimple_reg (SSA_NAME_VAR (name))
220 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
221 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
222 || EDGE_COUNT (id->entry_bb->preds) != 1))
223 {
224 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
225 gimple init_stmt;
226
227 init_stmt = gimple_build_assign (new_tree,
228 fold_convert (TREE_TYPE (new_tree),
229 integer_zero_node));
230 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
231 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
232 }
233 else
234 {
235 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
236 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
237 == name)
238 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
239 }
240 }
241 }
242 else
243 insert_decl_map (id, name, new_tree);
244 return new_tree;
245 }
246
247 /* If nonzero, we're remapping the contents of inlined debug
248 statements. If negative, an error has occurred, such as a
249 reference to a variable that isn't available in the inlined
250 context. */
251 int processing_debug_stmt = 0;
252
253 /* Remap DECL during the copying of the BLOCK tree for the function. */
254
255 tree
256 remap_decl (tree decl, copy_body_data *id)
257 {
258 tree *n;
259 tree fn;
260
261 /* We only remap local variables in the current function. */
262 fn = id->src_fn;
263
264 /* See if we have remapped this declaration. */
265
266 n = (tree *) pointer_map_contains (id->decl_map, decl);
267
268 if (!n && processing_debug_stmt)
269 {
270 processing_debug_stmt = -1;
271 return decl;
272 }
273
274 /* If we didn't already have an equivalent for this declaration,
275 create one now. */
276 if (!n)
277 {
278 /* Make a copy of the variable or label. */
279 tree t = id->copy_decl (decl, id);
280
281 /* Remember it, so that if we encounter this local entity again
282 we can reuse this copy. Do this early because remap_type may
283 need this decl for TYPE_STUB_DECL. */
284 insert_decl_map (id, decl, t);
285
286 if (!DECL_P (t))
287 return t;
288
289 /* Remap types, if necessary. */
290 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
291 if (TREE_CODE (t) == TYPE_DECL)
292 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
293
294 /* Remap sizes as necessary. */
295 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
296 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
297
298 /* If fields, do likewise for offset and qualifier. */
299 if (TREE_CODE (t) == FIELD_DECL)
300 {
301 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
302 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
303 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
304 }
305
306 if (cfun && gimple_in_ssa_p (cfun)
307 && (TREE_CODE (t) == VAR_DECL
308 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
309 {
310 tree def = gimple_default_def (id->src_cfun, decl);
311 get_var_ann (t);
312 if (TREE_CODE (decl) != PARM_DECL && def)
313 {
314 tree map = remap_ssa_name (def, id);
315 /* Watch out RESULT_DECLs whose SSA names map directly
316 to them. */
317 if (TREE_CODE (map) == SSA_NAME
318 && gimple_nop_p (SSA_NAME_DEF_STMT (map)))
319 set_default_def (t, map);
320 }
321 add_referenced_var (t);
322 }
323 return t;
324 }
325
326 if (id->do_not_unshare)
327 return *n;
328 else
329 return unshare_expr (*n);
330 }
331
332 static tree
333 remap_type_1 (tree type, copy_body_data *id)
334 {
335 tree new_tree, t;
336
337 /* We do need a copy. build and register it now. If this is a pointer or
338 reference type, remap the designated type and make a new pointer or
339 reference type. */
340 if (TREE_CODE (type) == POINTER_TYPE)
341 {
342 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
343 TYPE_MODE (type),
344 TYPE_REF_CAN_ALIAS_ALL (type));
345 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
346 new_tree = build_type_attribute_qual_variant (new_tree,
347 TYPE_ATTRIBUTES (type),
348 TYPE_QUALS (type));
349 insert_decl_map (id, type, new_tree);
350 return new_tree;
351 }
352 else if (TREE_CODE (type) == REFERENCE_TYPE)
353 {
354 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
355 TYPE_MODE (type),
356 TYPE_REF_CAN_ALIAS_ALL (type));
357 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
358 new_tree = build_type_attribute_qual_variant (new_tree,
359 TYPE_ATTRIBUTES (type),
360 TYPE_QUALS (type));
361 insert_decl_map (id, type, new_tree);
362 return new_tree;
363 }
364 else
365 new_tree = copy_node (type);
366
367 insert_decl_map (id, type, new_tree);
368
369 /* This is a new type, not a copy of an old type. Need to reassociate
370 variants. We can handle everything except the main variant lazily. */
371 t = TYPE_MAIN_VARIANT (type);
372 if (type != t)
373 {
374 t = remap_type (t, id);
375 TYPE_MAIN_VARIANT (new_tree) = t;
376 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
377 TYPE_NEXT_VARIANT (t) = new_tree;
378 }
379 else
380 {
381 TYPE_MAIN_VARIANT (new_tree) = new_tree;
382 TYPE_NEXT_VARIANT (new_tree) = NULL;
383 }
384
385 if (TYPE_STUB_DECL (type))
386 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
387
388 /* Lazily create pointer and reference types. */
389 TYPE_POINTER_TO (new_tree) = NULL;
390 TYPE_REFERENCE_TO (new_tree) = NULL;
391
392 switch (TREE_CODE (new_tree))
393 {
394 case INTEGER_TYPE:
395 case REAL_TYPE:
396 case FIXED_POINT_TYPE:
397 case ENUMERAL_TYPE:
398 case BOOLEAN_TYPE:
399 t = TYPE_MIN_VALUE (new_tree);
400 if (t && TREE_CODE (t) != INTEGER_CST)
401 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
402
403 t = TYPE_MAX_VALUE (new_tree);
404 if (t && TREE_CODE (t) != INTEGER_CST)
405 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
406 return new_tree;
407
408 case FUNCTION_TYPE:
409 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
410 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
411 return new_tree;
412
413 case ARRAY_TYPE:
414 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
415 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
416 break;
417
418 case RECORD_TYPE:
419 case UNION_TYPE:
420 case QUAL_UNION_TYPE:
421 {
422 tree f, nf = NULL;
423
424 for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
425 {
426 t = remap_decl (f, id);
427 DECL_CONTEXT (t) = new_tree;
428 TREE_CHAIN (t) = nf;
429 nf = t;
430 }
431 TYPE_FIELDS (new_tree) = nreverse (nf);
432 }
433 break;
434
435 case OFFSET_TYPE:
436 default:
437 /* Shouldn't have been thought variable sized. */
438 gcc_unreachable ();
439 }
440
441 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
442 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
443
444 return new_tree;
445 }
446
447 tree
448 remap_type (tree type, copy_body_data *id)
449 {
450 tree *node;
451 tree tmp;
452
453 if (type == NULL)
454 return type;
455
456 /* See if we have remapped this type. */
457 node = (tree *) pointer_map_contains (id->decl_map, type);
458 if (node)
459 return *node;
460
461 /* The type only needs remapping if it's variably modified. */
462 if (! variably_modified_type_p (type, id->src_fn))
463 {
464 insert_decl_map (id, type, type);
465 return type;
466 }
467
468 id->remapping_type_depth++;
469 tmp = remap_type_1 (type, id);
470 id->remapping_type_depth--;
471
472 return tmp;
473 }
474
475 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
476 is NULL or TYPE has not been remapped before. */
477
478 static tree
479 remapped_type (tree type, copy_body_data *id)
480 {
481 tree *node;
482
483 if (type == NULL)
484 return type;
485
486 /* See if we have remapped this type. */
487 node = (tree *) pointer_map_contains (id->decl_map, type);
488 if (node)
489 return *node;
490 else
491 return NULL;
492 }
493
494 /* The type only needs remapping if it's variably modified. */
495 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
496
497 static bool
498 can_be_nonlocal (tree decl, copy_body_data *id)
499 {
500 /* We can not duplicate function decls. */
501 if (TREE_CODE (decl) == FUNCTION_DECL)
502 return true;
503
504 /* Local static vars must be non-local or we get multiple declaration
505 problems. */
506 if (TREE_CODE (decl) == VAR_DECL
507 && !auto_var_in_fn_p (decl, id->src_fn))
508 return true;
509
510 /* At the moment dwarf2out can handle only these types of nodes. We
511 can support more later. */
512 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
513 return false;
514
515 /* We must use global type. We call remapped_type instead of
516 remap_type since we don't want to remap this type here if it
517 hasn't been remapped before. */
518 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
519 return false;
520
521 /* Wihtout SSA we can't tell if variable is used. */
522 if (!gimple_in_ssa_p (cfun))
523 return false;
524
525 /* Live variables must be copied so we can attach DECL_RTL. */
526 if (var_ann (decl))
527 return false;
528
529 return true;
530 }
531
532 static tree
533 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
534 {
535 tree old_var;
536 tree new_decls = NULL_TREE;
537
538 /* Remap its variables. */
539 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
540 {
541 tree new_var;
542 tree origin_var = DECL_ORIGIN (old_var);
543
544 if (can_be_nonlocal (old_var, id))
545 {
546 if (TREE_CODE (old_var) == VAR_DECL
547 && ! DECL_EXTERNAL (old_var)
548 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
549 cfun->local_decls = tree_cons (NULL_TREE, old_var,
550 cfun->local_decls);
551 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
552 && !DECL_IGNORED_P (old_var)
553 && nonlocalized_list)
554 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
555 continue;
556 }
557
558 /* Remap the variable. */
559 new_var = remap_decl (old_var, id);
560
561 /* If we didn't remap this variable, we can't mess with its
562 TREE_CHAIN. If we remapped this variable to the return slot, it's
563 already declared somewhere else, so don't declare it here. */
564
565 if (new_var == id->retvar)
566 ;
567 else if (!new_var)
568 {
569 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
570 && !DECL_IGNORED_P (old_var)
571 && nonlocalized_list)
572 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
573 }
574 else
575 {
576 gcc_assert (DECL_P (new_var));
577 TREE_CHAIN (new_var) = new_decls;
578 new_decls = new_var;
579 }
580 }
581
582 return nreverse (new_decls);
583 }
584
585 /* Copy the BLOCK to contain remapped versions of the variables
586 therein. And hook the new block into the block-tree. */
587
588 static void
589 remap_block (tree *block, copy_body_data *id)
590 {
591 tree old_block;
592 tree new_block;
593 tree fn;
594
595 /* Make the new block. */
596 old_block = *block;
597 new_block = make_node (BLOCK);
598 TREE_USED (new_block) = TREE_USED (old_block);
599 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
600 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
601 BLOCK_NONLOCALIZED_VARS (new_block)
602 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
603 *block = new_block;
604
605 /* Remap its variables. */
606 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
607 &BLOCK_NONLOCALIZED_VARS (new_block),
608 id);
609
610 fn = id->dst_fn;
611
612 if (id->transform_lang_insert_block)
613 id->transform_lang_insert_block (new_block);
614
615 /* Remember the remapped block. */
616 insert_decl_map (id, old_block, new_block);
617 }
618
619 /* Copy the whole block tree and root it in id->block. */
620 static tree
621 remap_blocks (tree block, copy_body_data *id)
622 {
623 tree t;
624 tree new_tree = block;
625
626 if (!block)
627 return NULL;
628
629 remap_block (&new_tree, id);
630 gcc_assert (new_tree != block);
631 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
632 prepend_lexical_block (new_tree, remap_blocks (t, id));
633 /* Blocks are in arbitrary order, but make things slightly prettier and do
634 not swap order when producing a copy. */
635 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
636 return new_tree;
637 }
638
639 static void
640 copy_statement_list (tree *tp)
641 {
642 tree_stmt_iterator oi, ni;
643 tree new_tree;
644
645 new_tree = alloc_stmt_list ();
646 ni = tsi_start (new_tree);
647 oi = tsi_start (*tp);
648 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
649 *tp = new_tree;
650
651 for (; !tsi_end_p (oi); tsi_next (&oi))
652 {
653 tree stmt = tsi_stmt (oi);
654 if (TREE_CODE (stmt) == STATEMENT_LIST)
655 copy_statement_list (&stmt);
656 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
657 }
658 }
659
660 static void
661 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
662 {
663 tree block = BIND_EXPR_BLOCK (*tp);
664 /* Copy (and replace) the statement. */
665 copy_tree_r (tp, walk_subtrees, NULL);
666 if (block)
667 {
668 remap_block (&block, id);
669 BIND_EXPR_BLOCK (*tp) = block;
670 }
671
672 if (BIND_EXPR_VARS (*tp))
673 /* This will remap a lot of the same decls again, but this should be
674 harmless. */
675 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
676 }
677
678
679 /* Create a new gimple_seq by remapping all the statements in BODY
680 using the inlining information in ID. */
681
682 gimple_seq
683 remap_gimple_seq (gimple_seq body, copy_body_data *id)
684 {
685 gimple_stmt_iterator si;
686 gimple_seq new_body = NULL;
687
688 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
689 {
690 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
691 gimple_seq_add_stmt (&new_body, new_stmt);
692 }
693
694 return new_body;
695 }
696
697
698 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
699 block using the mapping information in ID. */
700
701 static gimple
702 copy_gimple_bind (gimple stmt, copy_body_data *id)
703 {
704 gimple new_bind;
705 tree new_block, new_vars;
706 gimple_seq body, new_body;
707
708 /* Copy the statement. Note that we purposely don't use copy_stmt
709 here because we need to remap statements as we copy. */
710 body = gimple_bind_body (stmt);
711 new_body = remap_gimple_seq (body, id);
712
713 new_block = gimple_bind_block (stmt);
714 if (new_block)
715 remap_block (&new_block, id);
716
717 /* This will remap a lot of the same decls again, but this should be
718 harmless. */
719 new_vars = gimple_bind_vars (stmt);
720 if (new_vars)
721 new_vars = remap_decls (new_vars, NULL, id);
722
723 new_bind = gimple_build_bind (new_vars, new_body, new_block);
724
725 return new_bind;
726 }
727
728
729 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
730 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
731 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
732 recursing into the children nodes of *TP. */
733
734 static tree
735 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
736 {
737 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
738 copy_body_data *id = (copy_body_data *) wi_p->info;
739 tree fn = id->src_fn;
740
741 if (TREE_CODE (*tp) == SSA_NAME)
742 {
743 *tp = remap_ssa_name (*tp, id);
744 *walk_subtrees = 0;
745 return NULL;
746 }
747 else if (auto_var_in_fn_p (*tp, fn))
748 {
749 /* Local variables and labels need to be replaced by equivalent
750 variables. We don't want to copy static variables; there's
751 only one of those, no matter how many times we inline the
752 containing function. Similarly for globals from an outer
753 function. */
754 tree new_decl;
755
756 /* Remap the declaration. */
757 new_decl = remap_decl (*tp, id);
758 gcc_assert (new_decl);
759 /* Replace this variable with the copy. */
760 STRIP_TYPE_NOPS (new_decl);
761 /* ??? The C++ frontend uses void * pointer zero to initialize
762 any other type. This confuses the middle-end type verification.
763 As cloned bodies do not go through gimplification again the fixup
764 there doesn't trigger. */
765 if (TREE_CODE (new_decl) == INTEGER_CST
766 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
767 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
768 *tp = new_decl;
769 *walk_subtrees = 0;
770 }
771 else if (TREE_CODE (*tp) == STATEMENT_LIST)
772 gcc_unreachable ();
773 else if (TREE_CODE (*tp) == SAVE_EXPR)
774 gcc_unreachable ();
775 else if (TREE_CODE (*tp) == LABEL_DECL
776 && (!DECL_CONTEXT (*tp)
777 || decl_function_context (*tp) == id->src_fn))
778 /* These may need to be remapped for EH handling. */
779 *tp = remap_decl (*tp, id);
780 else if (TYPE_P (*tp))
781 /* Types may need remapping as well. */
782 *tp = remap_type (*tp, id);
783 else if (CONSTANT_CLASS_P (*tp))
784 {
785 /* If this is a constant, we have to copy the node iff the type
786 will be remapped. copy_tree_r will not copy a constant. */
787 tree new_type = remap_type (TREE_TYPE (*tp), id);
788
789 if (new_type == TREE_TYPE (*tp))
790 *walk_subtrees = 0;
791
792 else if (TREE_CODE (*tp) == INTEGER_CST)
793 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
794 TREE_INT_CST_HIGH (*tp));
795 else
796 {
797 *tp = copy_node (*tp);
798 TREE_TYPE (*tp) = new_type;
799 }
800 }
801 else
802 {
803 /* Otherwise, just copy the node. Note that copy_tree_r already
804 knows not to copy VAR_DECLs, etc., so this is safe. */
805 if (TREE_CODE (*tp) == INDIRECT_REF)
806 {
807 /* Get rid of *& from inline substitutions that can happen when a
808 pointer argument is an ADDR_EXPR. */
809 tree decl = TREE_OPERAND (*tp, 0);
810 tree *n;
811
812 n = (tree *) pointer_map_contains (id->decl_map, decl);
813 if (n)
814 {
815 tree type, new_tree, old;
816
817 /* If we happen to get an ADDR_EXPR in n->value, strip
818 it manually here as we'll eventually get ADDR_EXPRs
819 which lie about their types pointed to. In this case
820 build_fold_indirect_ref wouldn't strip the
821 INDIRECT_REF, but we absolutely rely on that. As
822 fold_indirect_ref does other useful transformations,
823 try that first, though. */
824 type = TREE_TYPE (TREE_TYPE (*n));
825 new_tree = unshare_expr (*n);
826 old = *tp;
827 *tp = gimple_fold_indirect_ref (new_tree);
828 if (!*tp)
829 {
830 if (TREE_CODE (new_tree) == ADDR_EXPR)
831 {
832 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
833 type, new_tree);
834 /* ??? We should either assert here or build
835 a VIEW_CONVERT_EXPR instead of blindly leaking
836 incompatible types to our IL. */
837 if (! *tp)
838 *tp = TREE_OPERAND (new_tree, 0);
839 }
840 else
841 {
842 *tp = build1 (INDIRECT_REF, type, new_tree);
843 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
844 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
845 }
846 }
847 *walk_subtrees = 0;
848 return NULL;
849 }
850 }
851
852 /* Here is the "usual case". Copy this tree node, and then
853 tweak some special cases. */
854 copy_tree_r (tp, walk_subtrees, NULL);
855
856 /* Global variables we haven't seen yet need to go into referenced
857 vars. If not referenced from types only. */
858 if (gimple_in_ssa_p (cfun)
859 && TREE_CODE (*tp) == VAR_DECL
860 && id->remapping_type_depth == 0
861 && !processing_debug_stmt)
862 add_referenced_var (*tp);
863
864 /* We should never have TREE_BLOCK set on non-statements. */
865 if (EXPR_P (*tp))
866 gcc_assert (!TREE_BLOCK (*tp));
867
868 if (TREE_CODE (*tp) != OMP_CLAUSE)
869 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
870
871 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
872 {
873 /* The copied TARGET_EXPR has never been expanded, even if the
874 original node was expanded already. */
875 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
876 TREE_OPERAND (*tp, 3) = NULL_TREE;
877 }
878 else if (TREE_CODE (*tp) == ADDR_EXPR)
879 {
880 /* Variable substitution need not be simple. In particular,
881 the INDIRECT_REF substitution above. Make sure that
882 TREE_CONSTANT and friends are up-to-date. But make sure
883 to not improperly set TREE_BLOCK on some sub-expressions. */
884 int invariant = is_gimple_min_invariant (*tp);
885 tree block = id->block;
886 id->block = NULL_TREE;
887 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
888 id->block = block;
889
890 /* Handle the case where we substituted an INDIRECT_REF
891 into the operand of the ADDR_EXPR. */
892 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
893 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
894 else
895 recompute_tree_invariant_for_addr_expr (*tp);
896
897 /* If this used to be invariant, but is not any longer,
898 then regimplification is probably needed. */
899 if (invariant && !is_gimple_min_invariant (*tp))
900 id->regimplify = true;
901
902 *walk_subtrees = 0;
903 }
904 }
905
906 /* Keep iterating. */
907 return NULL_TREE;
908 }
909
910
911 /* Called from copy_body_id via walk_tree. DATA is really a
912 `copy_body_data *'. */
913
914 tree
915 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
916 {
917 copy_body_data *id = (copy_body_data *) data;
918 tree fn = id->src_fn;
919 tree new_block;
920
921 /* Begin by recognizing trees that we'll completely rewrite for the
922 inlining context. Our output for these trees is completely
923 different from out input (e.g. RETURN_EXPR is deleted, and morphs
924 into an edge). Further down, we'll handle trees that get
925 duplicated and/or tweaked. */
926
927 /* When requested, RETURN_EXPRs should be transformed to just the
928 contained MODIFY_EXPR. The branch semantics of the return will
929 be handled elsewhere by manipulating the CFG rather than a statement. */
930 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
931 {
932 tree assignment = TREE_OPERAND (*tp, 0);
933
934 /* If we're returning something, just turn that into an
935 assignment into the equivalent of the original RESULT_DECL.
936 If the "assignment" is just the result decl, the result
937 decl has already been set (e.g. a recent "foo (&result_decl,
938 ...)"); just toss the entire RETURN_EXPR. */
939 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
940 {
941 /* Replace the RETURN_EXPR with (a copy of) the
942 MODIFY_EXPR hanging underneath. */
943 *tp = copy_node (assignment);
944 }
945 else /* Else the RETURN_EXPR returns no value. */
946 {
947 *tp = NULL;
948 return (tree) (void *)1;
949 }
950 }
951 else if (TREE_CODE (*tp) == SSA_NAME)
952 {
953 *tp = remap_ssa_name (*tp, id);
954 *walk_subtrees = 0;
955 return NULL;
956 }
957
958 /* Local variables and labels need to be replaced by equivalent
959 variables. We don't want to copy static variables; there's only
960 one of those, no matter how many times we inline the containing
961 function. Similarly for globals from an outer function. */
962 else if (auto_var_in_fn_p (*tp, fn))
963 {
964 tree new_decl;
965
966 /* Remap the declaration. */
967 new_decl = remap_decl (*tp, id);
968 gcc_assert (new_decl);
969 /* Replace this variable with the copy. */
970 STRIP_TYPE_NOPS (new_decl);
971 *tp = new_decl;
972 *walk_subtrees = 0;
973 }
974 else if (TREE_CODE (*tp) == STATEMENT_LIST)
975 copy_statement_list (tp);
976 else if (TREE_CODE (*tp) == SAVE_EXPR
977 || TREE_CODE (*tp) == TARGET_EXPR)
978 remap_save_expr (tp, id->decl_map, walk_subtrees);
979 else if (TREE_CODE (*tp) == LABEL_DECL
980 && (! DECL_CONTEXT (*tp)
981 || decl_function_context (*tp) == id->src_fn))
982 /* These may need to be remapped for EH handling. */
983 *tp = remap_decl (*tp, id);
984 else if (TREE_CODE (*tp) == BIND_EXPR)
985 copy_bind_expr (tp, walk_subtrees, id);
986 /* Types may need remapping as well. */
987 else if (TYPE_P (*tp))
988 *tp = remap_type (*tp, id);
989
990 /* If this is a constant, we have to copy the node iff the type will be
991 remapped. copy_tree_r will not copy a constant. */
992 else if (CONSTANT_CLASS_P (*tp))
993 {
994 tree new_type = remap_type (TREE_TYPE (*tp), id);
995
996 if (new_type == TREE_TYPE (*tp))
997 *walk_subtrees = 0;
998
999 else if (TREE_CODE (*tp) == INTEGER_CST)
1000 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1001 TREE_INT_CST_HIGH (*tp));
1002 else
1003 {
1004 *tp = copy_node (*tp);
1005 TREE_TYPE (*tp) = new_type;
1006 }
1007 }
1008
1009 /* Otherwise, just copy the node. Note that copy_tree_r already
1010 knows not to copy VAR_DECLs, etc., so this is safe. */
1011 else
1012 {
1013 /* Here we handle trees that are not completely rewritten.
1014 First we detect some inlining-induced bogosities for
1015 discarding. */
1016 if (TREE_CODE (*tp) == MODIFY_EXPR
1017 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1018 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1019 {
1020 /* Some assignments VAR = VAR; don't generate any rtl code
1021 and thus don't count as variable modification. Avoid
1022 keeping bogosities like 0 = 0. */
1023 tree decl = TREE_OPERAND (*tp, 0), value;
1024 tree *n;
1025
1026 n = (tree *) pointer_map_contains (id->decl_map, decl);
1027 if (n)
1028 {
1029 value = *n;
1030 STRIP_TYPE_NOPS (value);
1031 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1032 {
1033 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1034 return copy_tree_body_r (tp, walk_subtrees, data);
1035 }
1036 }
1037 }
1038 else if (TREE_CODE (*tp) == INDIRECT_REF)
1039 {
1040 /* Get rid of *& from inline substitutions that can happen when a
1041 pointer argument is an ADDR_EXPR. */
1042 tree decl = TREE_OPERAND (*tp, 0);
1043 tree *n;
1044
1045 n = (tree *) pointer_map_contains (id->decl_map, decl);
1046 if (n)
1047 {
1048 tree new_tree;
1049 tree old;
1050 /* If we happen to get an ADDR_EXPR in n->value, strip
1051 it manually here as we'll eventually get ADDR_EXPRs
1052 which lie about their types pointed to. In this case
1053 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1054 but we absolutely rely on that. As fold_indirect_ref
1055 does other useful transformations, try that first, though. */
1056 tree type = TREE_TYPE (TREE_TYPE (*n));
1057 if (id->do_not_unshare)
1058 new_tree = *n;
1059 else
1060 new_tree = unshare_expr (*n);
1061 old = *tp;
1062 *tp = gimple_fold_indirect_ref (new_tree);
1063 if (! *tp)
1064 {
1065 if (TREE_CODE (new_tree) == ADDR_EXPR)
1066 {
1067 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1068 type, new_tree);
1069 /* ??? We should either assert here or build
1070 a VIEW_CONVERT_EXPR instead of blindly leaking
1071 incompatible types to our IL. */
1072 if (! *tp)
1073 *tp = TREE_OPERAND (new_tree, 0);
1074 }
1075 else
1076 {
1077 *tp = build1 (INDIRECT_REF, type, new_tree);
1078 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1079 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1080 }
1081 }
1082 *walk_subtrees = 0;
1083 return NULL;
1084 }
1085 }
1086
1087 /* Here is the "usual case". Copy this tree node, and then
1088 tweak some special cases. */
1089 copy_tree_r (tp, walk_subtrees, NULL);
1090
1091 /* Global variables we haven't seen yet needs to go into referenced
1092 vars. If not referenced from types or debug stmts only. */
1093 if (gimple_in_ssa_p (cfun)
1094 && TREE_CODE (*tp) == VAR_DECL
1095 && id->remapping_type_depth == 0
1096 && !processing_debug_stmt)
1097 add_referenced_var (*tp);
1098
1099 /* If EXPR has block defined, map it to newly constructed block.
1100 When inlining we want EXPRs without block appear in the block
1101 of function call. */
1102 if (EXPR_P (*tp))
1103 {
1104 new_block = id->block;
1105 if (TREE_BLOCK (*tp))
1106 {
1107 tree *n;
1108 n = (tree *) pointer_map_contains (id->decl_map,
1109 TREE_BLOCK (*tp));
1110 gcc_assert (n);
1111 new_block = *n;
1112 }
1113 TREE_BLOCK (*tp) = new_block;
1114 }
1115
1116 if (TREE_CODE (*tp) != OMP_CLAUSE)
1117 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1118
1119 /* The copied TARGET_EXPR has never been expanded, even if the
1120 original node was expanded already. */
1121 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1122 {
1123 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1124 TREE_OPERAND (*tp, 3) = NULL_TREE;
1125 }
1126
1127 /* Variable substitution need not be simple. In particular, the
1128 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1129 and friends are up-to-date. */
1130 else if (TREE_CODE (*tp) == ADDR_EXPR)
1131 {
1132 int invariant = is_gimple_min_invariant (*tp);
1133 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1134
1135 /* Handle the case where we substituted an INDIRECT_REF
1136 into the operand of the ADDR_EXPR. */
1137 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1138 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1139 else
1140 recompute_tree_invariant_for_addr_expr (*tp);
1141
1142 /* If this used to be invariant, but is not any longer,
1143 then regimplification is probably needed. */
1144 if (invariant && !is_gimple_min_invariant (*tp))
1145 id->regimplify = true;
1146
1147 *walk_subtrees = 0;
1148 }
1149 }
1150
1151 /* Keep iterating. */
1152 return NULL_TREE;
1153 }
1154
1155 /* Helper for remap_gimple_stmt. Given an EH region number for the
1156 source function, map that to the duplicate EH region number in
1157 the destination function. */
1158
1159 static int
1160 remap_eh_region_nr (int old_nr, copy_body_data *id)
1161 {
1162 eh_region old_r, new_r;
1163 void **slot;
1164
1165 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1166 slot = pointer_map_contains (id->eh_map, old_r);
1167 new_r = (eh_region) *slot;
1168
1169 return new_r->index;
1170 }
1171
1172 /* Similar, but operate on INTEGER_CSTs. */
1173
1174 static tree
1175 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1176 {
1177 int old_nr, new_nr;
1178
1179 old_nr = tree_low_cst (old_t_nr, 0);
1180 new_nr = remap_eh_region_nr (old_nr, id);
1181
1182 return build_int_cst (NULL, new_nr);
1183 }
1184
1185 /* Helper for copy_bb. Remap statement STMT using the inlining
1186 information in ID. Return the new statement copy. */
1187
1188 static gimple
1189 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1190 {
1191 gimple copy = NULL;
1192 struct walk_stmt_info wi;
1193 tree new_block;
1194 bool skip_first = false;
1195
1196 /* Begin by recognizing trees that we'll completely rewrite for the
1197 inlining context. Our output for these trees is completely
1198 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1199 into an edge). Further down, we'll handle trees that get
1200 duplicated and/or tweaked. */
1201
1202 /* When requested, GIMPLE_RETURNs should be transformed to just the
1203 contained GIMPLE_ASSIGN. The branch semantics of the return will
1204 be handled elsewhere by manipulating the CFG rather than the
1205 statement. */
1206 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1207 {
1208 tree retval = gimple_return_retval (stmt);
1209
1210 /* If we're returning something, just turn that into an
1211 assignment into the equivalent of the original RESULT_DECL.
1212 If RETVAL is just the result decl, the result decl has
1213 already been set (e.g. a recent "foo (&result_decl, ...)");
1214 just toss the entire GIMPLE_RETURN. */
1215 if (retval && TREE_CODE (retval) != RESULT_DECL)
1216 {
1217 copy = gimple_build_assign (id->retvar, retval);
1218 /* id->retvar is already substituted. Skip it on later remapping. */
1219 skip_first = true;
1220 }
1221 else
1222 return gimple_build_nop ();
1223 }
1224 else if (gimple_has_substatements (stmt))
1225 {
1226 gimple_seq s1, s2;
1227
1228 /* When cloning bodies from the C++ front end, we will be handed bodies
1229 in High GIMPLE form. Handle here all the High GIMPLE statements that
1230 have embedded statements. */
1231 switch (gimple_code (stmt))
1232 {
1233 case GIMPLE_BIND:
1234 copy = copy_gimple_bind (stmt, id);
1235 break;
1236
1237 case GIMPLE_CATCH:
1238 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1239 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1240 break;
1241
1242 case GIMPLE_EH_FILTER:
1243 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1244 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1245 break;
1246
1247 case GIMPLE_TRY:
1248 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1249 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1250 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1251 break;
1252
1253 case GIMPLE_WITH_CLEANUP_EXPR:
1254 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1255 copy = gimple_build_wce (s1);
1256 break;
1257
1258 case GIMPLE_OMP_PARALLEL:
1259 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1260 copy = gimple_build_omp_parallel
1261 (s1,
1262 gimple_omp_parallel_clauses (stmt),
1263 gimple_omp_parallel_child_fn (stmt),
1264 gimple_omp_parallel_data_arg (stmt));
1265 break;
1266
1267 case GIMPLE_OMP_TASK:
1268 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1269 copy = gimple_build_omp_task
1270 (s1,
1271 gimple_omp_task_clauses (stmt),
1272 gimple_omp_task_child_fn (stmt),
1273 gimple_omp_task_data_arg (stmt),
1274 gimple_omp_task_copy_fn (stmt),
1275 gimple_omp_task_arg_size (stmt),
1276 gimple_omp_task_arg_align (stmt));
1277 break;
1278
1279 case GIMPLE_OMP_FOR:
1280 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1281 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1282 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1283 gimple_omp_for_collapse (stmt), s2);
1284 {
1285 size_t i;
1286 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1287 {
1288 gimple_omp_for_set_index (copy, i,
1289 gimple_omp_for_index (stmt, i));
1290 gimple_omp_for_set_initial (copy, i,
1291 gimple_omp_for_initial (stmt, i));
1292 gimple_omp_for_set_final (copy, i,
1293 gimple_omp_for_final (stmt, i));
1294 gimple_omp_for_set_incr (copy, i,
1295 gimple_omp_for_incr (stmt, i));
1296 gimple_omp_for_set_cond (copy, i,
1297 gimple_omp_for_cond (stmt, i));
1298 }
1299 }
1300 break;
1301
1302 case GIMPLE_OMP_MASTER:
1303 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1304 copy = gimple_build_omp_master (s1);
1305 break;
1306
1307 case GIMPLE_OMP_ORDERED:
1308 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1309 copy = gimple_build_omp_ordered (s1);
1310 break;
1311
1312 case GIMPLE_OMP_SECTION:
1313 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1314 copy = gimple_build_omp_section (s1);
1315 break;
1316
1317 case GIMPLE_OMP_SECTIONS:
1318 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1319 copy = gimple_build_omp_sections
1320 (s1, gimple_omp_sections_clauses (stmt));
1321 break;
1322
1323 case GIMPLE_OMP_SINGLE:
1324 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1325 copy = gimple_build_omp_single
1326 (s1, gimple_omp_single_clauses (stmt));
1327 break;
1328
1329 case GIMPLE_OMP_CRITICAL:
1330 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1331 copy
1332 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1333 break;
1334
1335 default:
1336 gcc_unreachable ();
1337 }
1338 }
1339 else
1340 {
1341 if (gimple_assign_copy_p (stmt)
1342 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1343 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1344 {
1345 /* Here we handle statements that are not completely rewritten.
1346 First we detect some inlining-induced bogosities for
1347 discarding. */
1348
1349 /* Some assignments VAR = VAR; don't generate any rtl code
1350 and thus don't count as variable modification. Avoid
1351 keeping bogosities like 0 = 0. */
1352 tree decl = gimple_assign_lhs (stmt), value;
1353 tree *n;
1354
1355 n = (tree *) pointer_map_contains (id->decl_map, decl);
1356 if (n)
1357 {
1358 value = *n;
1359 STRIP_TYPE_NOPS (value);
1360 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1361 return gimple_build_nop ();
1362 }
1363 }
1364
1365 if (gimple_debug_bind_p (stmt))
1366 {
1367 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1368 gimple_debug_bind_get_value (stmt),
1369 stmt);
1370 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1371 return copy;
1372 }
1373
1374 /* Create a new deep copy of the statement. */
1375 copy = gimple_copy (stmt);
1376
1377 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1378 RESX and EH_DISPATCH. */
1379 if (id->eh_map)
1380 switch (gimple_code (copy))
1381 {
1382 case GIMPLE_CALL:
1383 {
1384 tree r, fndecl = gimple_call_fndecl (copy);
1385 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1386 switch (DECL_FUNCTION_CODE (fndecl))
1387 {
1388 case BUILT_IN_EH_COPY_VALUES:
1389 r = gimple_call_arg (copy, 1);
1390 r = remap_eh_region_tree_nr (r, id);
1391 gimple_call_set_arg (copy, 1, r);
1392 /* FALLTHRU */
1393
1394 case BUILT_IN_EH_POINTER:
1395 case BUILT_IN_EH_FILTER:
1396 r = gimple_call_arg (copy, 0);
1397 r = remap_eh_region_tree_nr (r, id);
1398 gimple_call_set_arg (copy, 0, r);
1399 break;
1400
1401 default:
1402 break;
1403 }
1404 }
1405 break;
1406
1407 case GIMPLE_RESX:
1408 {
1409 int r = gimple_resx_region (copy);
1410 r = remap_eh_region_nr (r, id);
1411 gimple_resx_set_region (copy, r);
1412 }
1413 break;
1414
1415 case GIMPLE_EH_DISPATCH:
1416 {
1417 int r = gimple_eh_dispatch_region (copy);
1418 r = remap_eh_region_nr (r, id);
1419 gimple_eh_dispatch_set_region (copy, r);
1420 }
1421 break;
1422
1423 default:
1424 break;
1425 }
1426 }
1427
1428 /* If STMT has a block defined, map it to the newly constructed
1429 block. When inlining we want statements without a block to
1430 appear in the block of the function call. */
1431 new_block = id->block;
1432 if (gimple_block (copy))
1433 {
1434 tree *n;
1435 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1436 gcc_assert (n);
1437 new_block = *n;
1438 }
1439
1440 gimple_set_block (copy, new_block);
1441
1442 if (gimple_debug_bind_p (copy))
1443 return copy;
1444
1445 /* Remap all the operands in COPY. */
1446 memset (&wi, 0, sizeof (wi));
1447 wi.info = id;
1448 if (skip_first)
1449 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1450 else
1451 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1452
1453 /* Clear the copied virtual operands. We are not remapping them here
1454 but are going to recreate them from scratch. */
1455 if (gimple_has_mem_ops (copy))
1456 {
1457 gimple_set_vdef (copy, NULL_TREE);
1458 gimple_set_vuse (copy, NULL_TREE);
1459 }
1460
1461 return copy;
1462 }
1463
1464
1465 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1466 later */
1467
1468 static basic_block
1469 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1470 gcov_type count_scale)
1471 {
1472 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1473 basic_block copy_basic_block;
1474 tree decl;
1475
1476 /* create_basic_block() will append every new block to
1477 basic_block_info automatically. */
1478 copy_basic_block = create_basic_block (NULL, (void *) 0,
1479 (basic_block) bb->prev_bb->aux);
1480 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1481
1482 /* We are going to rebuild frequencies from scratch. These values
1483 have just small importance to drive canonicalize_loop_headers. */
1484 copy_basic_block->frequency = ((gcov_type)bb->frequency
1485 * frequency_scale / REG_BR_PROB_BASE);
1486
1487 if (copy_basic_block->frequency > BB_FREQ_MAX)
1488 copy_basic_block->frequency = BB_FREQ_MAX;
1489
1490 copy_gsi = gsi_start_bb (copy_basic_block);
1491
1492 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1493 {
1494 gimple stmt = gsi_stmt (gsi);
1495 gimple orig_stmt = stmt;
1496
1497 id->regimplify = false;
1498 stmt = remap_gimple_stmt (stmt, id);
1499 if (gimple_nop_p (stmt))
1500 continue;
1501
1502 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1503 seq_gsi = copy_gsi;
1504
1505 /* With return slot optimization we can end up with
1506 non-gimple (foo *)&this->m, fix that here. */
1507 if (is_gimple_assign (stmt)
1508 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1509 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1510 {
1511 tree new_rhs;
1512 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1513 gimple_assign_rhs1 (stmt),
1514 true, NULL, false, GSI_NEW_STMT);
1515 gimple_assign_set_rhs1 (stmt, new_rhs);
1516 id->regimplify = false;
1517 }
1518
1519 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1520
1521 if (id->regimplify)
1522 gimple_regimplify_operands (stmt, &seq_gsi);
1523
1524 /* If copy_basic_block has been empty at the start of this iteration,
1525 call gsi_start_bb again to get at the newly added statements. */
1526 if (gsi_end_p (copy_gsi))
1527 copy_gsi = gsi_start_bb (copy_basic_block);
1528 else
1529 gsi_next (&copy_gsi);
1530
1531 /* Process the new statement. The call to gimple_regimplify_operands
1532 possibly turned the statement into multiple statements, we
1533 need to process all of them. */
1534 do
1535 {
1536 tree fn;
1537
1538 stmt = gsi_stmt (copy_gsi);
1539 if (is_gimple_call (stmt)
1540 && gimple_call_va_arg_pack_p (stmt)
1541 && id->gimple_call)
1542 {
1543 /* __builtin_va_arg_pack () should be replaced by
1544 all arguments corresponding to ... in the caller. */
1545 tree p;
1546 gimple new_call;
1547 VEC(tree, heap) *argarray;
1548 size_t nargs = gimple_call_num_args (id->gimple_call);
1549 size_t n;
1550
1551 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1552 nargs--;
1553
1554 /* Create the new array of arguments. */
1555 n = nargs + gimple_call_num_args (stmt);
1556 argarray = VEC_alloc (tree, heap, n);
1557 VEC_safe_grow (tree, heap, argarray, n);
1558
1559 /* Copy all the arguments before '...' */
1560 memcpy (VEC_address (tree, argarray),
1561 gimple_call_arg_ptr (stmt, 0),
1562 gimple_call_num_args (stmt) * sizeof (tree));
1563
1564 /* Append the arguments passed in '...' */
1565 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1566 gimple_call_arg_ptr (id->gimple_call, 0)
1567 + (gimple_call_num_args (id->gimple_call) - nargs),
1568 nargs * sizeof (tree));
1569
1570 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1571 argarray);
1572
1573 VEC_free (tree, heap, argarray);
1574
1575 /* Copy all GIMPLE_CALL flags, location and block, except
1576 GF_CALL_VA_ARG_PACK. */
1577 gimple_call_copy_flags (new_call, stmt);
1578 gimple_call_set_va_arg_pack (new_call, false);
1579 gimple_set_location (new_call, gimple_location (stmt));
1580 gimple_set_block (new_call, gimple_block (stmt));
1581 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1582
1583 gsi_replace (&copy_gsi, new_call, false);
1584 gimple_set_bb (stmt, NULL);
1585 stmt = new_call;
1586 }
1587 else if (is_gimple_call (stmt)
1588 && id->gimple_call
1589 && (decl = gimple_call_fndecl (stmt))
1590 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1591 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1592 {
1593 /* __builtin_va_arg_pack_len () should be replaced by
1594 the number of anonymous arguments. */
1595 size_t nargs = gimple_call_num_args (id->gimple_call);
1596 tree count, p;
1597 gimple new_stmt;
1598
1599 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1600 nargs--;
1601
1602 count = build_int_cst (integer_type_node, nargs);
1603 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1604 gsi_replace (&copy_gsi, new_stmt, false);
1605 stmt = new_stmt;
1606 }
1607
1608 /* Statements produced by inlining can be unfolded, especially
1609 when we constant propagated some operands. We can't fold
1610 them right now for two reasons:
1611 1) folding require SSA_NAME_DEF_STMTs to be correct
1612 2) we can't change function calls to builtins.
1613 So we just mark statement for later folding. We mark
1614 all new statements, instead just statements that has changed
1615 by some nontrivial substitution so even statements made
1616 foldable indirectly are updated. If this turns out to be
1617 expensive, copy_body can be told to watch for nontrivial
1618 changes. */
1619 if (id->statements_to_fold)
1620 pointer_set_insert (id->statements_to_fold, stmt);
1621
1622 /* We're duplicating a CALL_EXPR. Find any corresponding
1623 callgraph edges and update or duplicate them. */
1624 if (is_gimple_call (stmt))
1625 {
1626 struct cgraph_edge *edge;
1627 int flags;
1628
1629 switch (id->transform_call_graph_edges)
1630 {
1631 case CB_CGE_DUPLICATE:
1632 edge = cgraph_edge (id->src_node, orig_stmt);
1633 if (edge)
1634 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1635 gimple_uid (stmt),
1636 REG_BR_PROB_BASE, 1,
1637 edge->frequency, true);
1638 break;
1639
1640 case CB_CGE_MOVE_CLONES:
1641 cgraph_set_call_stmt_including_clones (id->dst_node,
1642 orig_stmt, stmt);
1643 edge = cgraph_edge (id->dst_node, stmt);
1644 break;
1645
1646 case CB_CGE_MOVE:
1647 edge = cgraph_edge (id->dst_node, orig_stmt);
1648 if (edge)
1649 cgraph_set_call_stmt (edge, stmt);
1650 break;
1651
1652 default:
1653 gcc_unreachable ();
1654 }
1655
1656 /* Constant propagation on argument done during inlining
1657 may create new direct call. Produce an edge for it. */
1658 if ((!edge
1659 || (edge->indirect_call
1660 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1661 && is_gimple_call (stmt)
1662 && (fn = gimple_call_fndecl (stmt)) != NULL)
1663 {
1664 struct cgraph_node *dest = cgraph_node (fn);
1665
1666 /* We have missing edge in the callgraph. This can happen
1667 when previous inlining turned an indirect call into a
1668 direct call by constant propagating arguments. In all
1669 other cases we hit a bug (incorrect node sharing is the
1670 most common reason for missing edges). */
1671 gcc_assert (dest->needed || !dest->analyzed);
1672 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1673 cgraph_create_edge_including_clones
1674 (id->dst_node, dest, stmt, bb->count,
1675 compute_call_stmt_bb_frequency (id->dst_node->decl, bb),
1676 bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
1677 else
1678 cgraph_create_edge (id->dst_node, dest, stmt,
1679 bb->count, CGRAPH_FREQ_BASE,
1680 bb->loop_depth)->inline_failed
1681 = CIF_ORIGINALLY_INDIRECT_CALL;
1682 if (dump_file)
1683 {
1684 fprintf (dump_file, "Created new direct edge to %s",
1685 cgraph_node_name (dest));
1686 }
1687 }
1688
1689 flags = gimple_call_flags (stmt);
1690 if (flags & ECF_MAY_BE_ALLOCA)
1691 cfun->calls_alloca = true;
1692 if (flags & ECF_RETURNS_TWICE)
1693 cfun->calls_setjmp = true;
1694 }
1695
1696 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1697 id->eh_map, id->eh_lp_nr);
1698
1699 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1700 {
1701 ssa_op_iter i;
1702 tree def;
1703
1704 find_new_referenced_vars (gsi_stmt (copy_gsi));
1705 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1706 if (TREE_CODE (def) == SSA_NAME)
1707 SSA_NAME_DEF_STMT (def) = stmt;
1708 }
1709
1710 gsi_next (&copy_gsi);
1711 }
1712 while (!gsi_end_p (copy_gsi));
1713
1714 copy_gsi = gsi_last_bb (copy_basic_block);
1715 }
1716
1717 return copy_basic_block;
1718 }
1719
1720 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1721 form is quite easy, since dominator relationship for old basic blocks does
1722 not change.
1723
1724 There is however exception where inlining might change dominator relation
1725 across EH edges from basic block within inlined functions destinating
1726 to landing pads in function we inline into.
1727
1728 The function fills in PHI_RESULTs of such PHI nodes if they refer
1729 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1730 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1731 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1732 set, and this means that there will be no overlapping live ranges
1733 for the underlying symbol.
1734
1735 This might change in future if we allow redirecting of EH edges and
1736 we might want to change way build CFG pre-inlining to include
1737 all the possible edges then. */
1738 static void
1739 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1740 bool can_throw, bool nonlocal_goto)
1741 {
1742 edge e;
1743 edge_iterator ei;
1744
1745 FOR_EACH_EDGE (e, ei, bb->succs)
1746 if (!e->dest->aux
1747 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1748 {
1749 gimple phi;
1750 gimple_stmt_iterator si;
1751
1752 if (!nonlocal_goto)
1753 gcc_assert (e->flags & EDGE_EH);
1754
1755 if (!can_throw)
1756 gcc_assert (!(e->flags & EDGE_EH));
1757
1758 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1759 {
1760 edge re;
1761
1762 phi = gsi_stmt (si);
1763
1764 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1765 gcc_assert (!e->dest->aux);
1766
1767 gcc_assert ((e->flags & EDGE_EH)
1768 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1769
1770 if (!is_gimple_reg (PHI_RESULT (phi)))
1771 {
1772 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1773 continue;
1774 }
1775
1776 re = find_edge (ret_bb, e->dest);
1777 gcc_assert (re);
1778 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1779 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1780
1781 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1782 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1783 }
1784 }
1785 }
1786
1787
1788 /* Copy edges from BB into its copy constructed earlier, scale profile
1789 accordingly. Edges will be taken care of later. Assume aux
1790 pointers to point to the copies of each BB. */
1791
1792 static void
1793 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1794 {
1795 basic_block new_bb = (basic_block) bb->aux;
1796 edge_iterator ei;
1797 edge old_edge;
1798 gimple_stmt_iterator si;
1799 int flags;
1800
1801 /* Use the indices from the original blocks to create edges for the
1802 new ones. */
1803 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1804 if (!(old_edge->flags & EDGE_EH))
1805 {
1806 edge new_edge;
1807
1808 flags = old_edge->flags;
1809
1810 /* Return edges do get a FALLTHRU flag when the get inlined. */
1811 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1812 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1813 flags |= EDGE_FALLTHRU;
1814 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1815 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1816 new_edge->probability = old_edge->probability;
1817 }
1818
1819 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1820 return;
1821
1822 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1823 {
1824 gimple copy_stmt;
1825 bool can_throw, nonlocal_goto;
1826
1827 copy_stmt = gsi_stmt (si);
1828 if (!is_gimple_debug (copy_stmt))
1829 {
1830 update_stmt (copy_stmt);
1831 if (gimple_in_ssa_p (cfun))
1832 mark_symbols_for_renaming (copy_stmt);
1833 }
1834
1835 /* Do this before the possible split_block. */
1836 gsi_next (&si);
1837
1838 /* If this tree could throw an exception, there are two
1839 cases where we need to add abnormal edge(s): the
1840 tree wasn't in a region and there is a "current
1841 region" in the caller; or the original tree had
1842 EH edges. In both cases split the block after the tree,
1843 and add abnormal edge(s) as needed; we need both
1844 those from the callee and the caller.
1845 We check whether the copy can throw, because the const
1846 propagation can change an INDIRECT_REF which throws
1847 into a COMPONENT_REF which doesn't. If the copy
1848 can throw, the original could also throw. */
1849 can_throw = stmt_can_throw_internal (copy_stmt);
1850 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1851
1852 if (can_throw || nonlocal_goto)
1853 {
1854 if (!gsi_end_p (si))
1855 /* Note that bb's predecessor edges aren't necessarily
1856 right at this point; split_block doesn't care. */
1857 {
1858 edge e = split_block (new_bb, copy_stmt);
1859
1860 new_bb = e->dest;
1861 new_bb->aux = e->src->aux;
1862 si = gsi_start_bb (new_bb);
1863 }
1864 }
1865
1866 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1867 make_eh_dispatch_edges (copy_stmt);
1868 else if (can_throw)
1869 make_eh_edges (copy_stmt);
1870
1871 if (nonlocal_goto)
1872 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1873
1874 if ((can_throw || nonlocal_goto)
1875 && gimple_in_ssa_p (cfun))
1876 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1877 can_throw, nonlocal_goto);
1878 }
1879 }
1880
1881 /* Copy the PHIs. All blocks and edges are copied, some blocks
1882 was possibly split and new outgoing EH edges inserted.
1883 BB points to the block of original function and AUX pointers links
1884 the original and newly copied blocks. */
1885
1886 static void
1887 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1888 {
1889 basic_block const new_bb = (basic_block) bb->aux;
1890 edge_iterator ei;
1891 gimple phi;
1892 gimple_stmt_iterator si;
1893
1894 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1895 {
1896 tree res, new_res;
1897 gimple new_phi;
1898 edge new_edge;
1899
1900 phi = gsi_stmt (si);
1901 res = PHI_RESULT (phi);
1902 new_res = res;
1903 if (is_gimple_reg (res))
1904 {
1905 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1906 SSA_NAME_DEF_STMT (new_res)
1907 = new_phi = create_phi_node (new_res, new_bb);
1908 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1909 {
1910 edge const old_edge
1911 = find_edge ((basic_block) new_edge->src->aux, bb);
1912 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1913 tree new_arg = arg;
1914 tree block = id->block;
1915 id->block = NULL_TREE;
1916 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1917 id->block = block;
1918 gcc_assert (new_arg);
1919 /* With return slot optimization we can end up with
1920 non-gimple (foo *)&this->m, fix that here. */
1921 if (TREE_CODE (new_arg) != SSA_NAME
1922 && TREE_CODE (new_arg) != FUNCTION_DECL
1923 && !is_gimple_val (new_arg))
1924 {
1925 gimple_seq stmts = NULL;
1926 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1927 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
1928 }
1929 add_phi_arg (new_phi, new_arg, new_edge,
1930 gimple_phi_arg_location_from_edge (phi, old_edge));
1931 }
1932 }
1933 }
1934 }
1935
1936
1937 /* Wrapper for remap_decl so it can be used as a callback. */
1938
1939 static tree
1940 remap_decl_1 (tree decl, void *data)
1941 {
1942 return remap_decl (decl, (copy_body_data *) data);
1943 }
1944
1945 /* Build struct function and associated datastructures for the new clone
1946 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1947
1948 static void
1949 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
1950 int frequency)
1951 {
1952 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1953 gcov_type count_scale, frequency_scale;
1954
1955 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1956 count_scale = (REG_BR_PROB_BASE * count
1957 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1958 else
1959 count_scale = 1;
1960
1961 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1962 frequency_scale = (REG_BR_PROB_BASE * frequency
1963 /
1964 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1965 else
1966 frequency_scale = count_scale;
1967
1968 /* Register specific tree functions. */
1969 gimple_register_cfg_hooks ();
1970
1971 /* Get clean struct function. */
1972 push_struct_function (new_fndecl);
1973
1974 /* We will rebuild these, so just sanity check that they are empty. */
1975 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
1976 gcc_assert (cfun->local_decls == NULL);
1977 gcc_assert (cfun->cfg == NULL);
1978 gcc_assert (cfun->decl == new_fndecl);
1979
1980 /* Copy items we preserve during clonning. */
1981 cfun->static_chain_decl = src_cfun->static_chain_decl;
1982 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
1983 cfun->function_end_locus = src_cfun->function_end_locus;
1984 cfun->curr_properties = src_cfun->curr_properties;
1985 cfun->last_verified = src_cfun->last_verified;
1986 if (src_cfun->ipa_transforms_to_apply)
1987 cfun->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
1988 src_cfun->ipa_transforms_to_apply);
1989 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
1990 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
1991 cfun->function_frequency = src_cfun->function_frequency;
1992 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
1993 cfun->stdarg = src_cfun->stdarg;
1994 cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
1995 cfun->after_inlining = src_cfun->after_inlining;
1996 cfun->returns_struct = src_cfun->returns_struct;
1997 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
1998 cfun->after_tree_profile = src_cfun->after_tree_profile;
1999
2000 init_empty_tree_cfg ();
2001
2002 ENTRY_BLOCK_PTR->count =
2003 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2004 REG_BR_PROB_BASE);
2005 ENTRY_BLOCK_PTR->frequency =
2006 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
2007 frequency_scale / REG_BR_PROB_BASE);
2008 EXIT_BLOCK_PTR->count =
2009 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2010 REG_BR_PROB_BASE);
2011 EXIT_BLOCK_PTR->frequency =
2012 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
2013 frequency_scale / REG_BR_PROB_BASE);
2014 if (src_cfun->eh)
2015 init_eh_for_function ();
2016
2017 if (src_cfun->gimple_df)
2018 {
2019 init_tree_ssa (cfun);
2020 cfun->gimple_df->in_ssa_p = true;
2021 init_ssa_operands ();
2022 }
2023 pop_cfun ();
2024 }
2025
2026 /* Make a copy of the body of FN so that it can be inserted inline in
2027 another function. Walks FN via CFG, returns new fndecl. */
2028
2029 static tree
2030 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
2031 basic_block entry_block_map, basic_block exit_block_map)
2032 {
2033 tree callee_fndecl = id->src_fn;
2034 /* Original cfun for the callee, doesn't change. */
2035 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2036 struct function *cfun_to_copy;
2037 basic_block bb;
2038 tree new_fndecl = NULL;
2039 gcov_type count_scale, frequency_scale;
2040 int last;
2041
2042 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2043 count_scale = (REG_BR_PROB_BASE * count
2044 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2045 else
2046 count_scale = 1;
2047
2048 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
2049 frequency_scale = (REG_BR_PROB_BASE * frequency
2050 /
2051 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
2052 else
2053 frequency_scale = count_scale;
2054
2055 /* Register specific tree functions. */
2056 gimple_register_cfg_hooks ();
2057
2058 /* Must have a CFG here at this point. */
2059 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2060 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2061
2062 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2063
2064 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2065 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2066 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2067 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2068
2069 /* Duplicate any exception-handling regions. */
2070 if (cfun->eh)
2071 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2072 remap_decl_1, id);
2073
2074 /* Use aux pointers to map the original blocks to copy. */
2075 FOR_EACH_BB_FN (bb, cfun_to_copy)
2076 {
2077 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2078 bb->aux = new_bb;
2079 new_bb->aux = bb;
2080 }
2081
2082 last = last_basic_block;
2083
2084 /* Now that we've duplicated the blocks, duplicate their edges. */
2085 FOR_ALL_BB_FN (bb, cfun_to_copy)
2086 copy_edges_for_bb (bb, count_scale, exit_block_map);
2087
2088 if (gimple_in_ssa_p (cfun))
2089 FOR_ALL_BB_FN (bb, cfun_to_copy)
2090 copy_phis_for_bb (bb, id);
2091
2092 FOR_ALL_BB_FN (bb, cfun_to_copy)
2093 {
2094 ((basic_block)bb->aux)->aux = NULL;
2095 bb->aux = NULL;
2096 }
2097
2098 /* Zero out AUX fields of newly created block during EH edge
2099 insertion. */
2100 for (; last < last_basic_block; last++)
2101 BASIC_BLOCK (last)->aux = NULL;
2102 entry_block_map->aux = NULL;
2103 exit_block_map->aux = NULL;
2104
2105 if (id->eh_map)
2106 {
2107 pointer_map_destroy (id->eh_map);
2108 id->eh_map = NULL;
2109 }
2110
2111 return new_fndecl;
2112 }
2113
2114 /* Copy the debug STMT using ID. We deal with these statements in a
2115 special way: if any variable in their VALUE expression wasn't
2116 remapped yet, we won't remap it, because that would get decl uids
2117 out of sync, causing codegen differences between -g and -g0. If
2118 this arises, we drop the VALUE expression altogether. */
2119
2120 static void
2121 copy_debug_stmt (gimple stmt, copy_body_data *id)
2122 {
2123 tree t, *n;
2124 struct walk_stmt_info wi;
2125
2126 t = id->block;
2127 if (gimple_block (stmt))
2128 {
2129 tree *n;
2130 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2131 if (n)
2132 t = *n;
2133 }
2134 gimple_set_block (stmt, t);
2135
2136 /* Remap all the operands in COPY. */
2137 memset (&wi, 0, sizeof (wi));
2138 wi.info = id;
2139
2140 processing_debug_stmt = 1;
2141
2142 t = gimple_debug_bind_get_var (stmt);
2143
2144 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2145 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2146 {
2147 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2148 t = *n;
2149 }
2150 else
2151 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2152
2153 gimple_debug_bind_set_var (stmt, t);
2154
2155 if (gimple_debug_bind_has_value_p (stmt))
2156 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2157 remap_gimple_op_r, &wi, NULL);
2158
2159 /* Punt if any decl couldn't be remapped. */
2160 if (processing_debug_stmt < 0)
2161 gimple_debug_bind_reset_value (stmt);
2162
2163 processing_debug_stmt = 0;
2164
2165 update_stmt (stmt);
2166 if (gimple_in_ssa_p (cfun))
2167 mark_symbols_for_renaming (stmt);
2168 }
2169
2170 /* Process deferred debug stmts. In order to give values better odds
2171 of being successfully remapped, we delay the processing of debug
2172 stmts until all other stmts that might require remapping are
2173 processed. */
2174
2175 static void
2176 copy_debug_stmts (copy_body_data *id)
2177 {
2178 size_t i;
2179 gimple stmt;
2180
2181 if (!id->debug_stmts)
2182 return;
2183
2184 for (i = 0; VEC_iterate (gimple, id->debug_stmts, i, stmt); i++)
2185 copy_debug_stmt (stmt, id);
2186
2187 VEC_free (gimple, heap, id->debug_stmts);
2188 }
2189
2190 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2191 another function. */
2192
2193 static tree
2194 copy_tree_body (copy_body_data *id)
2195 {
2196 tree fndecl = id->src_fn;
2197 tree body = DECL_SAVED_TREE (fndecl);
2198
2199 walk_tree (&body, copy_tree_body_r, id, NULL);
2200
2201 return body;
2202 }
2203
2204 /* Make a copy of the body of FN so that it can be inserted inline in
2205 another function. */
2206
2207 static tree
2208 copy_body (copy_body_data *id, gcov_type count, int frequency,
2209 basic_block entry_block_map, basic_block exit_block_map)
2210 {
2211 tree fndecl = id->src_fn;
2212 tree body;
2213
2214 /* If this body has a CFG, walk CFG and copy. */
2215 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2216 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
2217 copy_debug_stmts (id);
2218
2219 return body;
2220 }
2221
2222 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2223 defined in function FN, or of a data member thereof. */
2224
2225 static bool
2226 self_inlining_addr_expr (tree value, tree fn)
2227 {
2228 tree var;
2229
2230 if (TREE_CODE (value) != ADDR_EXPR)
2231 return false;
2232
2233 var = get_base_address (TREE_OPERAND (value, 0));
2234
2235 return var && auto_var_in_fn_p (var, fn);
2236 }
2237
2238 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2239 lexical block and line number information from base_stmt, if given,
2240 or from the last stmt of the block otherwise. */
2241
2242 static gimple
2243 insert_init_debug_bind (copy_body_data *id,
2244 basic_block bb, tree var, tree value,
2245 gimple base_stmt)
2246 {
2247 gimple note;
2248 gimple_stmt_iterator gsi;
2249 tree tracked_var;
2250
2251 if (!gimple_in_ssa_p (id->src_cfun))
2252 return NULL;
2253
2254 if (!MAY_HAVE_DEBUG_STMTS)
2255 return NULL;
2256
2257 tracked_var = target_for_debug_bind (var);
2258 if (!tracked_var)
2259 return NULL;
2260
2261 if (bb)
2262 {
2263 gsi = gsi_last_bb (bb);
2264 if (!base_stmt && !gsi_end_p (gsi))
2265 base_stmt = gsi_stmt (gsi);
2266 }
2267
2268 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2269
2270 if (bb)
2271 {
2272 if (!gsi_end_p (gsi))
2273 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2274 else
2275 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2276 }
2277
2278 return note;
2279 }
2280
2281 static void
2282 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2283 {
2284 /* If VAR represents a zero-sized variable, it's possible that the
2285 assignment statement may result in no gimple statements. */
2286 if (init_stmt)
2287 {
2288 gimple_stmt_iterator si = gsi_last_bb (bb);
2289
2290 /* We can end up with init statements that store to a non-register
2291 from a rhs with a conversion. Handle that here by forcing the
2292 rhs into a temporary. gimple_regimplify_operands is not
2293 prepared to do this for us. */
2294 if (!is_gimple_debug (init_stmt)
2295 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2296 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2297 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2298 {
2299 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2300 gimple_expr_type (init_stmt),
2301 gimple_assign_rhs1 (init_stmt));
2302 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2303 GSI_NEW_STMT);
2304 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2305 gimple_assign_set_rhs1 (init_stmt, rhs);
2306 }
2307 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2308 gimple_regimplify_operands (init_stmt, &si);
2309 mark_symbols_for_renaming (init_stmt);
2310
2311 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2312 {
2313 tree var, def = gimple_assign_lhs (init_stmt);
2314
2315 if (TREE_CODE (def) == SSA_NAME)
2316 var = SSA_NAME_VAR (def);
2317 else
2318 var = def;
2319
2320 insert_init_debug_bind (id, bb, var, def, init_stmt);
2321 }
2322 }
2323 }
2324
2325 /* Initialize parameter P with VALUE. If needed, produce init statement
2326 at the end of BB. When BB is NULL, we return init statement to be
2327 output later. */
2328 static gimple
2329 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2330 basic_block bb, tree *vars)
2331 {
2332 gimple init_stmt = NULL;
2333 tree var;
2334 tree rhs = value;
2335 tree def = (gimple_in_ssa_p (cfun)
2336 ? gimple_default_def (id->src_cfun, p) : NULL);
2337
2338 if (value
2339 && value != error_mark_node
2340 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2341 {
2342 if (fold_convertible_p (TREE_TYPE (p), value))
2343 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2344 else
2345 /* ??? For valid (GIMPLE) programs we should not end up here.
2346 Still if something has gone wrong and we end up with truly
2347 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2348 to not leak invalid GIMPLE to the following passes. */
2349 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2350 }
2351
2352 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2353 here since the type of this decl must be visible to the calling
2354 function. */
2355 var = copy_decl_to_var (p, id);
2356
2357 /* We're actually using the newly-created var. */
2358 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2359 {
2360 get_var_ann (var);
2361 add_referenced_var (var);
2362 }
2363
2364 /* Declare this new variable. */
2365 TREE_CHAIN (var) = *vars;
2366 *vars = var;
2367
2368 /* Make gimplifier happy about this variable. */
2369 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2370
2371 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2372 we would not need to create a new variable here at all, if it
2373 weren't for debug info. Still, we can just use the argument
2374 value. */
2375 if (TREE_READONLY (p)
2376 && !TREE_ADDRESSABLE (p)
2377 && value && !TREE_SIDE_EFFECTS (value)
2378 && !def)
2379 {
2380 /* We may produce non-gimple trees by adding NOPs or introduce
2381 invalid sharing when operand is not really constant.
2382 It is not big deal to prohibit constant propagation here as
2383 we will constant propagate in DOM1 pass anyway. */
2384 if (is_gimple_min_invariant (value)
2385 && useless_type_conversion_p (TREE_TYPE (p),
2386 TREE_TYPE (value))
2387 /* We have to be very careful about ADDR_EXPR. Make sure
2388 the base variable isn't a local variable of the inlined
2389 function, e.g., when doing recursive inlining, direct or
2390 mutually-recursive or whatever, which is why we don't
2391 just test whether fn == current_function_decl. */
2392 && ! self_inlining_addr_expr (value, fn))
2393 {
2394 insert_decl_map (id, p, value);
2395 insert_debug_decl_map (id, p, var);
2396 return insert_init_debug_bind (id, bb, var, value, NULL);
2397 }
2398 }
2399
2400 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2401 that way, when the PARM_DECL is encountered, it will be
2402 automatically replaced by the VAR_DECL. */
2403 insert_decl_map (id, p, var);
2404
2405 /* Even if P was TREE_READONLY, the new VAR should not be.
2406 In the original code, we would have constructed a
2407 temporary, and then the function body would have never
2408 changed the value of P. However, now, we will be
2409 constructing VAR directly. The constructor body may
2410 change its value multiple times as it is being
2411 constructed. Therefore, it must not be TREE_READONLY;
2412 the back-end assumes that TREE_READONLY variable is
2413 assigned to only once. */
2414 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2415 TREE_READONLY (var) = 0;
2416
2417 /* If there is no setup required and we are in SSA, take the easy route
2418 replacing all SSA names representing the function parameter by the
2419 SSA name passed to function.
2420
2421 We need to construct map for the variable anyway as it might be used
2422 in different SSA names when parameter is set in function.
2423
2424 Do replacement at -O0 for const arguments replaced by constant.
2425 This is important for builtin_constant_p and other construct requiring
2426 constant argument to be visible in inlined function body. */
2427 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2428 && (optimize
2429 || (TREE_READONLY (p)
2430 && is_gimple_min_invariant (rhs)))
2431 && (TREE_CODE (rhs) == SSA_NAME
2432 || is_gimple_min_invariant (rhs))
2433 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2434 {
2435 insert_decl_map (id, def, rhs);
2436 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2437 }
2438
2439 /* If the value of argument is never used, don't care about initializing
2440 it. */
2441 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2442 {
2443 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2444 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2445 }
2446
2447 /* Initialize this VAR_DECL from the equivalent argument. Convert
2448 the argument to the proper type in case it was promoted. */
2449 if (value)
2450 {
2451 if (rhs == error_mark_node)
2452 {
2453 insert_decl_map (id, p, var);
2454 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2455 }
2456
2457 STRIP_USELESS_TYPE_CONVERSION (rhs);
2458
2459 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2460 keep our trees in gimple form. */
2461 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2462 {
2463 def = remap_ssa_name (def, id);
2464 init_stmt = gimple_build_assign (def, rhs);
2465 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2466 set_default_def (var, NULL);
2467 }
2468 else
2469 init_stmt = gimple_build_assign (var, rhs);
2470
2471 if (bb && init_stmt)
2472 insert_init_stmt (id, bb, init_stmt);
2473 }
2474 return init_stmt;
2475 }
2476
2477 /* Generate code to initialize the parameters of the function at the
2478 top of the stack in ID from the GIMPLE_CALL STMT. */
2479
2480 static void
2481 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2482 tree fn, basic_block bb)
2483 {
2484 tree parms;
2485 size_t i;
2486 tree p;
2487 tree vars = NULL_TREE;
2488 tree static_chain = gimple_call_chain (stmt);
2489
2490 /* Figure out what the parameters are. */
2491 parms = DECL_ARGUMENTS (fn);
2492
2493 /* Loop through the parameter declarations, replacing each with an
2494 equivalent VAR_DECL, appropriately initialized. */
2495 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2496 {
2497 tree val;
2498 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2499 setup_one_parameter (id, p, val, fn, bb, &vars);
2500 }
2501
2502 /* Initialize the static chain. */
2503 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2504 gcc_assert (fn != current_function_decl);
2505 if (p)
2506 {
2507 /* No static chain? Seems like a bug in tree-nested.c. */
2508 gcc_assert (static_chain);
2509
2510 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2511 }
2512
2513 declare_inline_vars (id->block, vars);
2514 }
2515
2516
2517 /* Declare a return variable to replace the RESULT_DECL for the
2518 function we are calling. An appropriate DECL_STMT is returned.
2519 The USE_STMT is filled to contain a use of the declaration to
2520 indicate the return value of the function.
2521
2522 RETURN_SLOT, if non-null is place where to store the result. It
2523 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2524 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2525
2526 The return value is a (possibly null) value that is the result of the
2527 function as seen by the callee. *USE_P is a (possibly null) value that
2528 holds the result as seen by the caller. */
2529
2530 static tree
2531 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2532 tree *use_p)
2533 {
2534 tree callee = id->src_fn;
2535 tree caller = id->dst_fn;
2536 tree result = DECL_RESULT (callee);
2537 tree callee_type = TREE_TYPE (result);
2538 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
2539 tree var, use;
2540
2541 /* We don't need to do anything for functions that don't return
2542 anything. */
2543 if (!result || VOID_TYPE_P (callee_type))
2544 {
2545 *use_p = NULL_TREE;
2546 return NULL_TREE;
2547 }
2548
2549 /* If there was a return slot, then the return value is the
2550 dereferenced address of that object. */
2551 if (return_slot)
2552 {
2553 /* The front end shouldn't have used both return_slot and
2554 a modify expression. */
2555 gcc_assert (!modify_dest);
2556 if (DECL_BY_REFERENCE (result))
2557 {
2558 tree return_slot_addr = build_fold_addr_expr (return_slot);
2559 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2560
2561 /* We are going to construct *&return_slot and we can't do that
2562 for variables believed to be not addressable.
2563
2564 FIXME: This check possibly can match, because values returned
2565 via return slot optimization are not believed to have address
2566 taken by alias analysis. */
2567 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2568 if (gimple_in_ssa_p (cfun))
2569 {
2570 HOST_WIDE_INT bitsize;
2571 HOST_WIDE_INT bitpos;
2572 tree offset;
2573 enum machine_mode mode;
2574 int unsignedp;
2575 int volatilep;
2576 tree base;
2577 base = get_inner_reference (return_slot, &bitsize, &bitpos,
2578 &offset,
2579 &mode, &unsignedp, &volatilep,
2580 false);
2581 if (TREE_CODE (base) == INDIRECT_REF)
2582 base = TREE_OPERAND (base, 0);
2583 if (TREE_CODE (base) == SSA_NAME)
2584 base = SSA_NAME_VAR (base);
2585 mark_sym_for_renaming (base);
2586 }
2587 var = return_slot_addr;
2588 }
2589 else
2590 {
2591 var = return_slot;
2592 gcc_assert (TREE_CODE (var) != SSA_NAME);
2593 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2594 }
2595 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2596 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2597 && !DECL_GIMPLE_REG_P (result)
2598 && DECL_P (var))
2599 DECL_GIMPLE_REG_P (var) = 0;
2600 use = NULL;
2601 goto done;
2602 }
2603
2604 /* All types requiring non-trivial constructors should have been handled. */
2605 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2606
2607 /* Attempt to avoid creating a new temporary variable. */
2608 if (modify_dest
2609 && TREE_CODE (modify_dest) != SSA_NAME)
2610 {
2611 bool use_it = false;
2612
2613 /* We can't use MODIFY_DEST if there's type promotion involved. */
2614 if (!useless_type_conversion_p (callee_type, caller_type))
2615 use_it = false;
2616
2617 /* ??? If we're assigning to a variable sized type, then we must
2618 reuse the destination variable, because we've no good way to
2619 create variable sized temporaries at this point. */
2620 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2621 use_it = true;
2622
2623 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2624 reuse it as the result of the call directly. Don't do this if
2625 it would promote MODIFY_DEST to addressable. */
2626 else if (TREE_ADDRESSABLE (result))
2627 use_it = false;
2628 else
2629 {
2630 tree base_m = get_base_address (modify_dest);
2631
2632 /* If the base isn't a decl, then it's a pointer, and we don't
2633 know where that's going to go. */
2634 if (!DECL_P (base_m))
2635 use_it = false;
2636 else if (is_global_var (base_m))
2637 use_it = false;
2638 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2639 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2640 && !DECL_GIMPLE_REG_P (result)
2641 && DECL_GIMPLE_REG_P (base_m))
2642 use_it = false;
2643 else if (!TREE_ADDRESSABLE (base_m))
2644 use_it = true;
2645 }
2646
2647 if (use_it)
2648 {
2649 var = modify_dest;
2650 use = NULL;
2651 goto done;
2652 }
2653 }
2654
2655 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2656
2657 var = copy_result_decl_to_var (result, id);
2658 if (gimple_in_ssa_p (cfun))
2659 {
2660 get_var_ann (var);
2661 add_referenced_var (var);
2662 }
2663
2664 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2665 DECL_STRUCT_FUNCTION (caller)->local_decls
2666 = tree_cons (NULL_TREE, var,
2667 DECL_STRUCT_FUNCTION (caller)->local_decls);
2668
2669 /* Do not have the rest of GCC warn about this variable as it should
2670 not be visible to the user. */
2671 TREE_NO_WARNING (var) = 1;
2672
2673 declare_inline_vars (id->block, var);
2674
2675 /* Build the use expr. If the return type of the function was
2676 promoted, convert it back to the expected type. */
2677 use = var;
2678 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2679 use = fold_convert (caller_type, var);
2680
2681 STRIP_USELESS_TYPE_CONVERSION (use);
2682
2683 if (DECL_BY_REFERENCE (result))
2684 {
2685 TREE_ADDRESSABLE (var) = 1;
2686 var = build_fold_addr_expr (var);
2687 }
2688
2689 done:
2690 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2691 way, when the RESULT_DECL is encountered, it will be
2692 automatically replaced by the VAR_DECL. */
2693 insert_decl_map (id, result, var);
2694
2695 /* Remember this so we can ignore it in remap_decls. */
2696 id->retvar = var;
2697
2698 *use_p = use;
2699 return var;
2700 }
2701
2702 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2703 to a local label. */
2704
2705 static tree
2706 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2707 {
2708 tree node = *nodep;
2709 tree fn = (tree) fnp;
2710
2711 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2712 return node;
2713
2714 if (TYPE_P (node))
2715 *walk_subtrees = 0;
2716
2717 return NULL_TREE;
2718 }
2719
2720 /* Callback through walk_tree. Determine if we've got an aggregate
2721 type that we can't support; return non-null if so. */
2722
2723 static tree
2724 cannot_copy_type_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
2725 void *data ATTRIBUTE_UNUSED)
2726 {
2727 tree t, node = *nodep;
2728
2729 if (TREE_CODE (node) == RECORD_TYPE || TREE_CODE (node) == UNION_TYPE)
2730 {
2731 /* We cannot inline a function of the form
2732
2733 void F (int i) { struct S { int ar[i]; } s; }
2734
2735 Attempting to do so produces a catch-22.
2736 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
2737 UNION_TYPE nodes, then it goes into infinite recursion on a
2738 structure containing a pointer to its own type. If it doesn't,
2739 then the type node for S doesn't get adjusted properly when
2740 F is inlined.
2741
2742 ??? This is likely no longer true, but it's too late in the 4.0
2743 cycle to try to find out. This should be checked for 4.1. */
2744 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
2745 if (variably_modified_type_p (TREE_TYPE (t), NULL))
2746 return node;
2747 }
2748
2749 return NULL_TREE;
2750 }
2751
2752
2753 /* Determine if the function can be copied. If so return NULL. If
2754 not return a string describng the reason for failure. */
2755
2756 static const char *
2757 copy_forbidden (struct function *fun, tree fndecl)
2758 {
2759 const char *reason = fun->cannot_be_copied_reason;
2760 tree step;
2761
2762 /* Only examine the function once. */
2763 if (fun->cannot_be_copied_set)
2764 return reason;
2765
2766 /* We cannot copy a function that receives a non-local goto
2767 because we cannot remap the destination label used in the
2768 function that is performing the non-local goto. */
2769 /* ??? Actually, this should be possible, if we work at it.
2770 No doubt there's just a handful of places that simply
2771 assume it doesn't happen and don't substitute properly. */
2772 if (fun->has_nonlocal_label)
2773 {
2774 reason = G_("function %q+F can never be copied "
2775 "because it receives a non-local goto");
2776 goto fail;
2777 }
2778
2779 for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2780 {
2781 tree decl = TREE_VALUE (step);
2782
2783 if (TREE_CODE (decl) == VAR_DECL
2784 && TREE_STATIC (decl)
2785 && !DECL_EXTERNAL (decl)
2786 && DECL_INITIAL (decl)
2787 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2788 has_label_address_in_static_1,
2789 fndecl))
2790 {
2791 reason = G_("function %q+F can never be copied because it saves "
2792 "address of local label in a static variable");
2793 goto fail;
2794 }
2795
2796 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2797 && variably_modified_type_p (TREE_TYPE (decl), NULL)
2798 && walk_tree_without_duplicates (&TREE_TYPE (decl),
2799 cannot_copy_type_1, NULL))
2800 {
2801 reason = G_("function %q+F can never be copied "
2802 "because it uses variable sized variables");
2803 goto fail;
2804 }
2805 }
2806
2807 fail:
2808 fun->cannot_be_copied_reason = reason;
2809 fun->cannot_be_copied_set = true;
2810 return reason;
2811 }
2812
2813
2814 static const char *inline_forbidden_reason;
2815
2816 /* A callback for walk_gimple_seq to handle statements. Returns non-null
2817 iff a function can not be inlined. Also sets the reason why. */
2818
2819 static tree
2820 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2821 struct walk_stmt_info *wip)
2822 {
2823 tree fn = (tree) wip->info;
2824 tree t;
2825 gimple stmt = gsi_stmt (*gsi);
2826
2827 switch (gimple_code (stmt))
2828 {
2829 case GIMPLE_CALL:
2830 /* Refuse to inline alloca call unless user explicitly forced so as
2831 this may change program's memory overhead drastically when the
2832 function using alloca is called in loop. In GCC present in
2833 SPEC2000 inlining into schedule_block cause it to require 2GB of
2834 RAM instead of 256MB. */
2835 if (gimple_alloca_call_p (stmt)
2836 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2837 {
2838 inline_forbidden_reason
2839 = G_("function %q+F can never be inlined because it uses "
2840 "alloca (override using the always_inline attribute)");
2841 *handled_ops_p = true;
2842 return fn;
2843 }
2844
2845 t = gimple_call_fndecl (stmt);
2846 if (t == NULL_TREE)
2847 break;
2848
2849 /* We cannot inline functions that call setjmp. */
2850 if (setjmp_call_p (t))
2851 {
2852 inline_forbidden_reason
2853 = G_("function %q+F can never be inlined because it uses setjmp");
2854 *handled_ops_p = true;
2855 return t;
2856 }
2857
2858 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
2859 switch (DECL_FUNCTION_CODE (t))
2860 {
2861 /* We cannot inline functions that take a variable number of
2862 arguments. */
2863 case BUILT_IN_VA_START:
2864 case BUILT_IN_NEXT_ARG:
2865 case BUILT_IN_VA_END:
2866 inline_forbidden_reason
2867 = G_("function %q+F can never be inlined because it "
2868 "uses variable argument lists");
2869 *handled_ops_p = true;
2870 return t;
2871
2872 case BUILT_IN_LONGJMP:
2873 /* We can't inline functions that call __builtin_longjmp at
2874 all. The non-local goto machinery really requires the
2875 destination be in a different function. If we allow the
2876 function calling __builtin_longjmp to be inlined into the
2877 function calling __builtin_setjmp, Things will Go Awry. */
2878 inline_forbidden_reason
2879 = G_("function %q+F can never be inlined because "
2880 "it uses setjmp-longjmp exception handling");
2881 *handled_ops_p = true;
2882 return t;
2883
2884 case BUILT_IN_NONLOCAL_GOTO:
2885 /* Similarly. */
2886 inline_forbidden_reason
2887 = G_("function %q+F can never be inlined because "
2888 "it uses non-local goto");
2889 *handled_ops_p = true;
2890 return t;
2891
2892 case BUILT_IN_RETURN:
2893 case BUILT_IN_APPLY_ARGS:
2894 /* If a __builtin_apply_args caller would be inlined,
2895 it would be saving arguments of the function it has
2896 been inlined into. Similarly __builtin_return would
2897 return from the function the inline has been inlined into. */
2898 inline_forbidden_reason
2899 = G_("function %q+F can never be inlined because "
2900 "it uses __builtin_return or __builtin_apply_args");
2901 *handled_ops_p = true;
2902 return t;
2903
2904 default:
2905 break;
2906 }
2907 break;
2908
2909 case GIMPLE_GOTO:
2910 t = gimple_goto_dest (stmt);
2911
2912 /* We will not inline a function which uses computed goto. The
2913 addresses of its local labels, which may be tucked into
2914 global storage, are of course not constant across
2915 instantiations, which causes unexpected behavior. */
2916 if (TREE_CODE (t) != LABEL_DECL)
2917 {
2918 inline_forbidden_reason
2919 = G_("function %q+F can never be inlined "
2920 "because it contains a computed goto");
2921 *handled_ops_p = true;
2922 return t;
2923 }
2924 break;
2925
2926 default:
2927 break;
2928 }
2929
2930 *handled_ops_p = false;
2931 return NULL_TREE;
2932 }
2933
2934 /* Return true if FNDECL is a function that cannot be inlined into
2935 another one. */
2936
2937 static bool
2938 inline_forbidden_p (tree fndecl)
2939 {
2940 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
2941 struct walk_stmt_info wi;
2942 struct pointer_set_t *visited_nodes;
2943 basic_block bb;
2944 bool forbidden_p = false;
2945
2946 /* First check for shared reasons not to copy the code. */
2947 inline_forbidden_reason = copy_forbidden (fun, fndecl);
2948 if (inline_forbidden_reason != NULL)
2949 return true;
2950
2951 /* Next, walk the statements of the function looking for
2952 constraucts we can't handle, or are non-optimal for inlining. */
2953 visited_nodes = pointer_set_create ();
2954 memset (&wi, 0, sizeof (wi));
2955 wi.info = (void *) fndecl;
2956 wi.pset = visited_nodes;
2957
2958 FOR_EACH_BB_FN (bb, fun)
2959 {
2960 gimple ret;
2961 gimple_seq seq = bb_seq (bb);
2962 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
2963 forbidden_p = (ret != NULL);
2964 if (forbidden_p)
2965 break;
2966 }
2967
2968 pointer_set_destroy (visited_nodes);
2969 return forbidden_p;
2970 }
2971
2972 /* Returns nonzero if FN is a function that does not have any
2973 fundamental inline blocking properties. */
2974
2975 bool
2976 tree_inlinable_function_p (tree fn)
2977 {
2978 bool inlinable = true;
2979 bool do_warning;
2980 tree always_inline;
2981
2982 /* If we've already decided this function shouldn't be inlined,
2983 there's no need to check again. */
2984 if (DECL_UNINLINABLE (fn))
2985 return false;
2986
2987 /* We only warn for functions declared `inline' by the user. */
2988 do_warning = (warn_inline
2989 && DECL_DECLARED_INLINE_P (fn)
2990 && !DECL_NO_INLINE_WARNING_P (fn)
2991 && !DECL_IN_SYSTEM_HEADER (fn));
2992
2993 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
2994
2995 if (flag_no_inline
2996 && always_inline == NULL)
2997 {
2998 if (do_warning)
2999 warning (OPT_Winline, "function %q+F can never be inlined because it "
3000 "is suppressed using -fno-inline", fn);
3001 inlinable = false;
3002 }
3003
3004 /* Don't auto-inline anything that might not be bound within
3005 this unit of translation. */
3006 else if (!DECL_DECLARED_INLINE_P (fn)
3007 && DECL_REPLACEABLE_P (fn))
3008 inlinable = false;
3009
3010 else if (!function_attribute_inlinable_p (fn))
3011 {
3012 if (do_warning)
3013 warning (OPT_Winline, "function %q+F can never be inlined because it "
3014 "uses attributes conflicting with inlining", fn);
3015 inlinable = false;
3016 }
3017
3018 else if (inline_forbidden_p (fn))
3019 {
3020 /* See if we should warn about uninlinable functions. Previously,
3021 some of these warnings would be issued while trying to expand
3022 the function inline, but that would cause multiple warnings
3023 about functions that would for example call alloca. But since
3024 this a property of the function, just one warning is enough.
3025 As a bonus we can now give more details about the reason why a
3026 function is not inlinable. */
3027 if (always_inline)
3028 sorry (inline_forbidden_reason, fn);
3029 else if (do_warning)
3030 warning (OPT_Winline, inline_forbidden_reason, fn);
3031
3032 inlinable = false;
3033 }
3034
3035 /* Squirrel away the result so that we don't have to check again. */
3036 DECL_UNINLINABLE (fn) = !inlinable;
3037
3038 return inlinable;
3039 }
3040
3041 /* Estimate the cost of a memory move. Use machine dependent
3042 word size and take possible memcpy call into account. */
3043
3044 int
3045 estimate_move_cost (tree type)
3046 {
3047 HOST_WIDE_INT size;
3048
3049 gcc_assert (!VOID_TYPE_P (type));
3050
3051 size = int_size_in_bytes (type);
3052
3053 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3054 /* Cost of a memcpy call, 3 arguments and the call. */
3055 return 4;
3056 else
3057 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3058 }
3059
3060 /* Returns cost of operation CODE, according to WEIGHTS */
3061
3062 static int
3063 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3064 tree op1 ATTRIBUTE_UNUSED, tree op2)
3065 {
3066 switch (code)
3067 {
3068 /* These are "free" conversions, or their presumed cost
3069 is folded into other operations. */
3070 case RANGE_EXPR:
3071 CASE_CONVERT:
3072 case COMPLEX_EXPR:
3073 case PAREN_EXPR:
3074 return 0;
3075
3076 /* Assign cost of 1 to usual operations.
3077 ??? We may consider mapping RTL costs to this. */
3078 case COND_EXPR:
3079 case VEC_COND_EXPR:
3080
3081 case PLUS_EXPR:
3082 case POINTER_PLUS_EXPR:
3083 case MINUS_EXPR:
3084 case MULT_EXPR:
3085
3086 case FIXED_CONVERT_EXPR:
3087 case FIX_TRUNC_EXPR:
3088
3089 case NEGATE_EXPR:
3090 case FLOAT_EXPR:
3091 case MIN_EXPR:
3092 case MAX_EXPR:
3093 case ABS_EXPR:
3094
3095 case LSHIFT_EXPR:
3096 case RSHIFT_EXPR:
3097 case LROTATE_EXPR:
3098 case RROTATE_EXPR:
3099 case VEC_LSHIFT_EXPR:
3100 case VEC_RSHIFT_EXPR:
3101
3102 case BIT_IOR_EXPR:
3103 case BIT_XOR_EXPR:
3104 case BIT_AND_EXPR:
3105 case BIT_NOT_EXPR:
3106
3107 case TRUTH_ANDIF_EXPR:
3108 case TRUTH_ORIF_EXPR:
3109 case TRUTH_AND_EXPR:
3110 case TRUTH_OR_EXPR:
3111 case TRUTH_XOR_EXPR:
3112 case TRUTH_NOT_EXPR:
3113
3114 case LT_EXPR:
3115 case LE_EXPR:
3116 case GT_EXPR:
3117 case GE_EXPR:
3118 case EQ_EXPR:
3119 case NE_EXPR:
3120 case ORDERED_EXPR:
3121 case UNORDERED_EXPR:
3122
3123 case UNLT_EXPR:
3124 case UNLE_EXPR:
3125 case UNGT_EXPR:
3126 case UNGE_EXPR:
3127 case UNEQ_EXPR:
3128 case LTGT_EXPR:
3129
3130 case CONJ_EXPR:
3131
3132 case PREDECREMENT_EXPR:
3133 case PREINCREMENT_EXPR:
3134 case POSTDECREMENT_EXPR:
3135 case POSTINCREMENT_EXPR:
3136
3137 case REALIGN_LOAD_EXPR:
3138
3139 case REDUC_MAX_EXPR:
3140 case REDUC_MIN_EXPR:
3141 case REDUC_PLUS_EXPR:
3142 case WIDEN_SUM_EXPR:
3143 case WIDEN_MULT_EXPR:
3144 case DOT_PROD_EXPR:
3145
3146 case VEC_WIDEN_MULT_HI_EXPR:
3147 case VEC_WIDEN_MULT_LO_EXPR:
3148 case VEC_UNPACK_HI_EXPR:
3149 case VEC_UNPACK_LO_EXPR:
3150 case VEC_UNPACK_FLOAT_HI_EXPR:
3151 case VEC_UNPACK_FLOAT_LO_EXPR:
3152 case VEC_PACK_TRUNC_EXPR:
3153 case VEC_PACK_SAT_EXPR:
3154 case VEC_PACK_FIX_TRUNC_EXPR:
3155 case VEC_EXTRACT_EVEN_EXPR:
3156 case VEC_EXTRACT_ODD_EXPR:
3157 case VEC_INTERLEAVE_HIGH_EXPR:
3158 case VEC_INTERLEAVE_LOW_EXPR:
3159
3160 return 1;
3161
3162 /* Few special cases of expensive operations. This is useful
3163 to avoid inlining on functions having too many of these. */
3164 case TRUNC_DIV_EXPR:
3165 case CEIL_DIV_EXPR:
3166 case FLOOR_DIV_EXPR:
3167 case ROUND_DIV_EXPR:
3168 case EXACT_DIV_EXPR:
3169 case TRUNC_MOD_EXPR:
3170 case CEIL_MOD_EXPR:
3171 case FLOOR_MOD_EXPR:
3172 case ROUND_MOD_EXPR:
3173 case RDIV_EXPR:
3174 if (TREE_CODE (op2) != INTEGER_CST)
3175 return weights->div_mod_cost;
3176 return 1;
3177
3178 default:
3179 /* We expect a copy assignment with no operator. */
3180 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3181 return 0;
3182 }
3183 }
3184
3185
3186 /* Estimate number of instructions that will be created by expanding
3187 the statements in the statement sequence STMTS.
3188 WEIGHTS contains weights attributed to various constructs. */
3189
3190 static
3191 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3192 {
3193 int cost;
3194 gimple_stmt_iterator gsi;
3195
3196 cost = 0;
3197 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3198 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3199
3200 return cost;
3201 }
3202
3203
3204 /* Estimate number of instructions that will be created by expanding STMT.
3205 WEIGHTS contains weights attributed to various constructs. */
3206
3207 int
3208 estimate_num_insns (gimple stmt, eni_weights *weights)
3209 {
3210 unsigned cost, i;
3211 enum gimple_code code = gimple_code (stmt);
3212 tree lhs;
3213 tree rhs;
3214
3215 switch (code)
3216 {
3217 case GIMPLE_ASSIGN:
3218 /* Try to estimate the cost of assignments. We have three cases to
3219 deal with:
3220 1) Simple assignments to registers;
3221 2) Stores to things that must live in memory. This includes
3222 "normal" stores to scalars, but also assignments of large
3223 structures, or constructors of big arrays;
3224
3225 Let us look at the first two cases, assuming we have "a = b + C":
3226 <GIMPLE_ASSIGN <var_decl "a">
3227 <plus_expr <var_decl "b"> <constant C>>
3228 If "a" is a GIMPLE register, the assignment to it is free on almost
3229 any target, because "a" usually ends up in a real register. Hence
3230 the only cost of this expression comes from the PLUS_EXPR, and we
3231 can ignore the GIMPLE_ASSIGN.
3232 If "a" is not a GIMPLE register, the assignment to "a" will most
3233 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3234 of moving something into "a", which we compute using the function
3235 estimate_move_cost. */
3236 lhs = gimple_assign_lhs (stmt);
3237 rhs = gimple_assign_rhs1 (stmt);
3238
3239 if (is_gimple_reg (lhs))
3240 cost = 0;
3241 else
3242 cost = estimate_move_cost (TREE_TYPE (lhs));
3243
3244 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3245 cost += estimate_move_cost (TREE_TYPE (rhs));
3246
3247 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3248 gimple_assign_rhs1 (stmt),
3249 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3250 == GIMPLE_BINARY_RHS
3251 ? gimple_assign_rhs2 (stmt) : NULL);
3252 break;
3253
3254 case GIMPLE_COND:
3255 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3256 gimple_op (stmt, 0),
3257 gimple_op (stmt, 1));
3258 break;
3259
3260 case GIMPLE_SWITCH:
3261 /* Take into account cost of the switch + guess 2 conditional jumps for
3262 each case label.
3263
3264 TODO: once the switch expansion logic is sufficiently separated, we can
3265 do better job on estimating cost of the switch. */
3266 if (weights->time_based)
3267 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3268 else
3269 cost = gimple_switch_num_labels (stmt) * 2;
3270 break;
3271
3272 case GIMPLE_CALL:
3273 {
3274 tree decl = gimple_call_fndecl (stmt);
3275 tree addr = gimple_call_fn (stmt);
3276 tree funtype = TREE_TYPE (addr);
3277
3278 if (POINTER_TYPE_P (funtype))
3279 funtype = TREE_TYPE (funtype);
3280
3281 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
3282 cost = weights->target_builtin_call_cost;
3283 else
3284 cost = weights->call_cost;
3285
3286 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3287 switch (DECL_FUNCTION_CODE (decl))
3288 {
3289 case BUILT_IN_CONSTANT_P:
3290 return 0;
3291 case BUILT_IN_EXPECT:
3292 return 0;
3293
3294 /* Prefetch instruction is not expensive. */
3295 case BUILT_IN_PREFETCH:
3296 cost = weights->target_builtin_call_cost;
3297 break;
3298
3299 default:
3300 break;
3301 }
3302
3303 if (decl)
3304 funtype = TREE_TYPE (decl);
3305
3306 if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3307 cost += estimate_move_cost (TREE_TYPE (funtype));
3308 /* Our cost must be kept in sync with
3309 cgraph_estimate_size_after_inlining that does use function
3310 declaration to figure out the arguments. */
3311 if (decl && DECL_ARGUMENTS (decl))
3312 {
3313 tree arg;
3314 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3315 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3316 cost += estimate_move_cost (TREE_TYPE (arg));
3317 }
3318 else if (funtype && prototype_p (funtype))
3319 {
3320 tree t;
3321 for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3322 t = TREE_CHAIN (t))
3323 if (!VOID_TYPE_P (TREE_VALUE (t)))
3324 cost += estimate_move_cost (TREE_VALUE (t));
3325 }
3326 else
3327 {
3328 for (i = 0; i < gimple_call_num_args (stmt); i++)
3329 {
3330 tree arg = gimple_call_arg (stmt, i);
3331 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3332 cost += estimate_move_cost (TREE_TYPE (arg));
3333 }
3334 }
3335
3336 break;
3337 }
3338
3339 case GIMPLE_GOTO:
3340 case GIMPLE_LABEL:
3341 case GIMPLE_NOP:
3342 case GIMPLE_PHI:
3343 case GIMPLE_RETURN:
3344 case GIMPLE_PREDICT:
3345 case GIMPLE_DEBUG:
3346 return 0;
3347
3348 case GIMPLE_ASM:
3349 return 1;
3350
3351 case GIMPLE_RESX:
3352 /* This is either going to be an external function call with one
3353 argument, or two register copy statements plus a goto. */
3354 return 2;
3355
3356 case GIMPLE_EH_DISPATCH:
3357 /* ??? This is going to turn into a switch statement. Ideally
3358 we'd have a look at the eh region and estimate the number of
3359 edges involved. */
3360 return 10;
3361
3362 case GIMPLE_BIND:
3363 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3364
3365 case GIMPLE_EH_FILTER:
3366 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3367
3368 case GIMPLE_CATCH:
3369 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3370
3371 case GIMPLE_TRY:
3372 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3373 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3374
3375 /* OpenMP directives are generally very expensive. */
3376
3377 case GIMPLE_OMP_RETURN:
3378 case GIMPLE_OMP_SECTIONS_SWITCH:
3379 case GIMPLE_OMP_ATOMIC_STORE:
3380 case GIMPLE_OMP_CONTINUE:
3381 /* ...except these, which are cheap. */
3382 return 0;
3383
3384 case GIMPLE_OMP_ATOMIC_LOAD:
3385 return weights->omp_cost;
3386
3387 case GIMPLE_OMP_FOR:
3388 return (weights->omp_cost
3389 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3390 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3391
3392 case GIMPLE_OMP_PARALLEL:
3393 case GIMPLE_OMP_TASK:
3394 case GIMPLE_OMP_CRITICAL:
3395 case GIMPLE_OMP_MASTER:
3396 case GIMPLE_OMP_ORDERED:
3397 case GIMPLE_OMP_SECTION:
3398 case GIMPLE_OMP_SECTIONS:
3399 case GIMPLE_OMP_SINGLE:
3400 return (weights->omp_cost
3401 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3402
3403 default:
3404 gcc_unreachable ();
3405 }
3406
3407 return cost;
3408 }
3409
3410 /* Estimate number of instructions that will be created by expanding
3411 function FNDECL. WEIGHTS contains weights attributed to various
3412 constructs. */
3413
3414 int
3415 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3416 {
3417 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3418 gimple_stmt_iterator bsi;
3419 basic_block bb;
3420 int n = 0;
3421
3422 gcc_assert (my_function && my_function->cfg);
3423 FOR_EACH_BB_FN (bb, my_function)
3424 {
3425 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3426 n += estimate_num_insns (gsi_stmt (bsi), weights);
3427 }
3428
3429 return n;
3430 }
3431
3432
3433 /* Initializes weights used by estimate_num_insns. */
3434
3435 void
3436 init_inline_once (void)
3437 {
3438 eni_size_weights.call_cost = 1;
3439 eni_size_weights.target_builtin_call_cost = 1;
3440 eni_size_weights.div_mod_cost = 1;
3441 eni_size_weights.omp_cost = 40;
3442 eni_size_weights.time_based = false;
3443
3444 /* Estimating time for call is difficult, since we have no idea what the
3445 called function does. In the current uses of eni_time_weights,
3446 underestimating the cost does less harm than overestimating it, so
3447 we choose a rather small value here. */
3448 eni_time_weights.call_cost = 10;
3449 eni_time_weights.target_builtin_call_cost = 10;
3450 eni_time_weights.div_mod_cost = 10;
3451 eni_time_weights.omp_cost = 40;
3452 eni_time_weights.time_based = true;
3453 }
3454
3455 /* Estimate the number of instructions in a gimple_seq. */
3456
3457 int
3458 count_insns_seq (gimple_seq seq, eni_weights *weights)
3459 {
3460 gimple_stmt_iterator gsi;
3461 int n = 0;
3462 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3463 n += estimate_num_insns (gsi_stmt (gsi), weights);
3464
3465 return n;
3466 }
3467
3468
3469 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3470
3471 static void
3472 prepend_lexical_block (tree current_block, tree new_block)
3473 {
3474 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3475 BLOCK_SUBBLOCKS (current_block) = new_block;
3476 BLOCK_SUPERCONTEXT (new_block) = current_block;
3477 }
3478
3479 /* Fetch callee declaration from the call graph edge going from NODE and
3480 associated with STMR call statement. Return NULL_TREE if not found. */
3481 static tree
3482 get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3483 {
3484 struct cgraph_edge *cs;
3485
3486 cs = cgraph_edge (node, stmt);
3487 if (cs)
3488 return cs->callee->decl;
3489
3490 return NULL_TREE;
3491 }
3492
3493 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3494
3495 static bool
3496 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3497 {
3498 tree retvar, use_retvar;
3499 tree fn;
3500 struct pointer_map_t *st, *dst;
3501 tree return_slot;
3502 tree modify_dest;
3503 location_t saved_location;
3504 struct cgraph_edge *cg_edge;
3505 cgraph_inline_failed_t reason;
3506 basic_block return_block;
3507 edge e;
3508 gimple_stmt_iterator gsi, stmt_gsi;
3509 bool successfully_inlined = FALSE;
3510 bool purge_dead_abnormal_edges;
3511 tree t_step;
3512 tree var;
3513
3514 /* Set input_location here so we get the right instantiation context
3515 if we call instantiate_decl from inlinable_function_p. */
3516 saved_location = input_location;
3517 if (gimple_has_location (stmt))
3518 input_location = gimple_location (stmt);
3519
3520 /* From here on, we're only interested in CALL_EXPRs. */
3521 if (gimple_code (stmt) != GIMPLE_CALL)
3522 goto egress;
3523
3524 /* First, see if we can figure out what function is being called.
3525 If we cannot, then there is no hope of inlining the function. */
3526 fn = gimple_call_fndecl (stmt);
3527 if (!fn)
3528 {
3529 fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3530 if (!fn)
3531 goto egress;
3532 }
3533
3534 /* Turn forward declarations into real ones. */
3535 fn = cgraph_node (fn)->decl;
3536
3537 /* If FN is a declaration of a function in a nested scope that was
3538 globally declared inline, we don't set its DECL_INITIAL.
3539 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3540 C++ front-end uses it for cdtors to refer to their internal
3541 declarations, that are not real functions. Fortunately those
3542 don't have trees to be saved, so we can tell by checking their
3543 gimple_body. */
3544 if (!DECL_INITIAL (fn)
3545 && DECL_ABSTRACT_ORIGIN (fn)
3546 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3547 fn = DECL_ABSTRACT_ORIGIN (fn);
3548
3549 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3550 Kill this check once this is fixed. */
3551 if (!id->dst_node->analyzed)
3552 goto egress;
3553
3554 cg_edge = cgraph_edge (id->dst_node, stmt);
3555
3556 /* Don't inline functions with different EH personalities. */
3557 if (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3558 && DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)
3559 && (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3560 != DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)))
3561 goto egress;
3562
3563 /* Don't try to inline functions that are not well-suited to
3564 inlining. */
3565 if (!cgraph_inline_p (cg_edge, &reason))
3566 {
3567 /* If this call was originally indirect, we do not want to emit any
3568 inlining related warnings or sorry messages because there are no
3569 guarantees regarding those. */
3570 if (cg_edge->indirect_call)
3571 goto egress;
3572
3573 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3574 /* Avoid warnings during early inline pass. */
3575 && cgraph_global_info_ready)
3576 {
3577 sorry ("inlining failed in call to %q+F: %s", fn,
3578 cgraph_inline_failed_string (reason));
3579 sorry ("called from here");
3580 }
3581 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3582 && !DECL_IN_SYSTEM_HEADER (fn)
3583 && reason != CIF_UNSPECIFIED
3584 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3585 /* Avoid warnings during early inline pass. */
3586 && cgraph_global_info_ready)
3587 {
3588 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3589 fn, cgraph_inline_failed_string (reason));
3590 warning (OPT_Winline, "called from here");
3591 }
3592 goto egress;
3593 }
3594 fn = cg_edge->callee->decl;
3595
3596 #ifdef ENABLE_CHECKING
3597 if (cg_edge->callee->decl != id->dst_node->decl)
3598 verify_cgraph_node (cg_edge->callee);
3599 #endif
3600
3601 /* We will be inlining this callee. */
3602 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3603
3604 /* Update the callers EH personality. */
3605 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3606 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3607 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3608
3609 /* Split the block holding the GIMPLE_CALL. */
3610 e = split_block (bb, stmt);
3611 bb = e->src;
3612 return_block = e->dest;
3613 remove_edge (e);
3614
3615 /* split_block splits after the statement; work around this by
3616 moving the call into the second block manually. Not pretty,
3617 but seems easier than doing the CFG manipulation by hand
3618 when the GIMPLE_CALL is in the last statement of BB. */
3619 stmt_gsi = gsi_last_bb (bb);
3620 gsi_remove (&stmt_gsi, false);
3621
3622 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3623 been the source of abnormal edges. In this case, schedule
3624 the removal of dead abnormal edges. */
3625 gsi = gsi_start_bb (return_block);
3626 if (gsi_end_p (gsi))
3627 {
3628 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3629 purge_dead_abnormal_edges = true;
3630 }
3631 else
3632 {
3633 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3634 purge_dead_abnormal_edges = false;
3635 }
3636
3637 stmt_gsi = gsi_start_bb (return_block);
3638
3639 /* Build a block containing code to initialize the arguments, the
3640 actual inline expansion of the body, and a label for the return
3641 statements within the function to jump to. The type of the
3642 statement expression is the return type of the function call. */
3643 id->block = make_node (BLOCK);
3644 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3645 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3646 prepend_lexical_block (gimple_block (stmt), id->block);
3647
3648 /* Local declarations will be replaced by their equivalents in this
3649 map. */
3650 st = id->decl_map;
3651 id->decl_map = pointer_map_create ();
3652 dst = id->debug_map;
3653 id->debug_map = NULL;
3654
3655 /* Record the function we are about to inline. */
3656 id->src_fn = fn;
3657 id->src_node = cg_edge->callee;
3658 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3659 id->gimple_call = stmt;
3660
3661 gcc_assert (!id->src_cfun->after_inlining);
3662
3663 id->entry_bb = bb;
3664 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3665 {
3666 gimple_stmt_iterator si = gsi_last_bb (bb);
3667 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3668 NOT_TAKEN),
3669 GSI_NEW_STMT);
3670 }
3671 initialize_inlined_parameters (id, stmt, fn, bb);
3672
3673 if (DECL_INITIAL (fn))
3674 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3675
3676 /* Return statements in the function body will be replaced by jumps
3677 to the RET_LABEL. */
3678 gcc_assert (DECL_INITIAL (fn));
3679 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3680
3681 /* Find the LHS to which the result of this call is assigned. */
3682 return_slot = NULL;
3683 if (gimple_call_lhs (stmt))
3684 {
3685 modify_dest = gimple_call_lhs (stmt);
3686
3687 /* The function which we are inlining might not return a value,
3688 in which case we should issue a warning that the function
3689 does not return a value. In that case the optimizers will
3690 see that the variable to which the value is assigned was not
3691 initialized. We do not want to issue a warning about that
3692 uninitialized variable. */
3693 if (DECL_P (modify_dest))
3694 TREE_NO_WARNING (modify_dest) = 1;
3695
3696 if (gimple_call_return_slot_opt_p (stmt))
3697 {
3698 return_slot = modify_dest;
3699 modify_dest = NULL;
3700 }
3701 }
3702 else
3703 modify_dest = NULL;
3704
3705 /* If we are inlining a call to the C++ operator new, we don't want
3706 to use type based alias analysis on the return value. Otherwise
3707 we may get confused if the compiler sees that the inlined new
3708 function returns a pointer which was just deleted. See bug
3709 33407. */
3710 if (DECL_IS_OPERATOR_NEW (fn))
3711 {
3712 return_slot = NULL;
3713 modify_dest = NULL;
3714 }
3715
3716 /* Declare the return variable for the function. */
3717 retvar = declare_return_variable (id, return_slot, modify_dest, &use_retvar);
3718
3719 /* Add local vars in this inlined callee to caller. */
3720 t_step = id->src_cfun->local_decls;
3721 for (; t_step; t_step = TREE_CHAIN (t_step))
3722 {
3723 var = TREE_VALUE (t_step);
3724 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3725 {
3726 if (var_ann (var) && add_referenced_var (var))
3727 cfun->local_decls = tree_cons (NULL_TREE, var,
3728 cfun->local_decls);
3729 }
3730 else if (!can_be_nonlocal (var, id))
3731 cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3732 cfun->local_decls);
3733 }
3734
3735 /* This is it. Duplicate the callee body. Assume callee is
3736 pre-gimplified. Note that we must not alter the caller
3737 function in any way before this point, as this CALL_EXPR may be
3738 a self-referential call; if we're calling ourselves, we need to
3739 duplicate our body before altering anything. */
3740 copy_body (id, bb->count, bb->frequency, bb, return_block);
3741
3742 /* Reset the escaped and callused solutions. */
3743 if (cfun->gimple_df)
3744 {
3745 pt_solution_reset (&cfun->gimple_df->escaped);
3746 pt_solution_reset (&cfun->gimple_df->callused);
3747 }
3748
3749 /* Clean up. */
3750 if (id->debug_map)
3751 {
3752 pointer_map_destroy (id->debug_map);
3753 id->debug_map = dst;
3754 }
3755 pointer_map_destroy (id->decl_map);
3756 id->decl_map = st;
3757
3758 /* Unlink the calls virtual operands before replacing it. */
3759 unlink_stmt_vdef (stmt);
3760
3761 /* If the inlined function returns a result that we care about,
3762 substitute the GIMPLE_CALL with an assignment of the return
3763 variable to the LHS of the call. That is, if STMT was
3764 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3765 if (use_retvar && gimple_call_lhs (stmt))
3766 {
3767 gimple old_stmt = stmt;
3768 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3769 gsi_replace (&stmt_gsi, stmt, false);
3770 if (gimple_in_ssa_p (cfun))
3771 mark_symbols_for_renaming (stmt);
3772 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3773 }
3774 else
3775 {
3776 /* Handle the case of inlining a function with no return
3777 statement, which causes the return value to become undefined. */
3778 if (gimple_call_lhs (stmt)
3779 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3780 {
3781 tree name = gimple_call_lhs (stmt);
3782 tree var = SSA_NAME_VAR (name);
3783 tree def = gimple_default_def (cfun, var);
3784
3785 if (def)
3786 {
3787 /* If the variable is used undefined, make this name
3788 undefined via a move. */
3789 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3790 gsi_replace (&stmt_gsi, stmt, true);
3791 }
3792 else
3793 {
3794 /* Otherwise make this variable undefined. */
3795 gsi_remove (&stmt_gsi, true);
3796 set_default_def (var, name);
3797 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
3798 }
3799 }
3800 else
3801 gsi_remove (&stmt_gsi, true);
3802 }
3803
3804 if (purge_dead_abnormal_edges)
3805 gimple_purge_dead_abnormal_call_edges (return_block);
3806
3807 /* If the value of the new expression is ignored, that's OK. We
3808 don't warn about this for CALL_EXPRs, so we shouldn't warn about
3809 the equivalent inlined version either. */
3810 if (is_gimple_assign (stmt))
3811 {
3812 gcc_assert (gimple_assign_single_p (stmt)
3813 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
3814 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3815 }
3816
3817 /* Output the inlining info for this abstract function, since it has been
3818 inlined. If we don't do this now, we can lose the information about the
3819 variables in the function when the blocks get blown away as soon as we
3820 remove the cgraph node. */
3821 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
3822
3823 /* Update callgraph if needed. */
3824 cgraph_remove_node (cg_edge->callee);
3825
3826 id->block = NULL_TREE;
3827 successfully_inlined = TRUE;
3828
3829 egress:
3830 input_location = saved_location;
3831 return successfully_inlined;
3832 }
3833
3834 /* Expand call statements reachable from STMT_P.
3835 We can only have CALL_EXPRs as the "toplevel" tree code or nested
3836 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
3837 unfortunately not use that function here because we need a pointer
3838 to the CALL_EXPR, not the tree itself. */
3839
3840 static bool
3841 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
3842 {
3843 gimple_stmt_iterator gsi;
3844
3845 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3846 {
3847 gimple stmt = gsi_stmt (gsi);
3848
3849 if (is_gimple_call (stmt)
3850 && expand_call_inline (bb, stmt, id))
3851 return true;
3852 }
3853
3854 return false;
3855 }
3856
3857
3858 /* Walk all basic blocks created after FIRST and try to fold every statement
3859 in the STATEMENTS pointer set. */
3860
3861 static void
3862 fold_marked_statements (int first, struct pointer_set_t *statements)
3863 {
3864 for (; first < n_basic_blocks; first++)
3865 if (BASIC_BLOCK (first))
3866 {
3867 gimple_stmt_iterator gsi;
3868
3869 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
3870 !gsi_end_p (gsi);
3871 gsi_next (&gsi))
3872 if (pointer_set_contains (statements, gsi_stmt (gsi)))
3873 {
3874 gimple old_stmt = gsi_stmt (gsi);
3875 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
3876
3877 if (old_decl && DECL_BUILT_IN (old_decl))
3878 {
3879 /* Folding builtins can create multiple instructions,
3880 we need to look at all of them. */
3881 gimple_stmt_iterator i2 = gsi;
3882 gsi_prev (&i2);
3883 if (fold_stmt (&gsi))
3884 {
3885 gimple new_stmt;
3886 if (gsi_end_p (i2))
3887 i2 = gsi_start_bb (BASIC_BLOCK (first));
3888 else
3889 gsi_next (&i2);
3890 while (1)
3891 {
3892 new_stmt = gsi_stmt (i2);
3893 update_stmt (new_stmt);
3894 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3895 new_stmt);
3896
3897 if (new_stmt == gsi_stmt (gsi))
3898 {
3899 /* It is okay to check only for the very last
3900 of these statements. If it is a throwing
3901 statement nothing will change. If it isn't
3902 this can remove EH edges. If that weren't
3903 correct then because some intermediate stmts
3904 throw, but not the last one. That would mean
3905 we'd have to split the block, which we can't
3906 here and we'd loose anyway. And as builtins
3907 probably never throw, this all
3908 is mood anyway. */
3909 if (maybe_clean_or_replace_eh_stmt (old_stmt,
3910 new_stmt))
3911 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3912 break;
3913 }
3914 gsi_next (&i2);
3915 }
3916 }
3917 }
3918 else if (fold_stmt (&gsi))
3919 {
3920 /* Re-read the statement from GSI as fold_stmt() may
3921 have changed it. */
3922 gimple new_stmt = gsi_stmt (gsi);
3923 update_stmt (new_stmt);
3924
3925 if (is_gimple_call (old_stmt)
3926 || is_gimple_call (new_stmt))
3927 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3928 new_stmt);
3929
3930 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
3931 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3932 }
3933 }
3934 }
3935 }
3936
3937 /* Return true if BB has at least one abnormal outgoing edge. */
3938
3939 static inline bool
3940 has_abnormal_outgoing_edge_p (basic_block bb)
3941 {
3942 edge e;
3943 edge_iterator ei;
3944
3945 FOR_EACH_EDGE (e, ei, bb->succs)
3946 if (e->flags & EDGE_ABNORMAL)
3947 return true;
3948
3949 return false;
3950 }
3951
3952 /* Expand calls to inline functions in the body of FN. */
3953
3954 unsigned int
3955 optimize_inline_calls (tree fn)
3956 {
3957 copy_body_data id;
3958 tree prev_fn;
3959 basic_block bb;
3960 int last = n_basic_blocks;
3961 struct gimplify_ctx gctx;
3962
3963 /* There is no point in performing inlining if errors have already
3964 occurred -- and we might crash if we try to inline invalid
3965 code. */
3966 if (errorcount || sorrycount)
3967 return 0;
3968
3969 /* Clear out ID. */
3970 memset (&id, 0, sizeof (id));
3971
3972 id.src_node = id.dst_node = cgraph_node (fn);
3973 id.dst_fn = fn;
3974 /* Or any functions that aren't finished yet. */
3975 prev_fn = NULL_TREE;
3976 if (current_function_decl)
3977 {
3978 id.dst_fn = current_function_decl;
3979 prev_fn = current_function_decl;
3980 }
3981
3982 id.copy_decl = copy_decl_maybe_to_var;
3983 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3984 id.transform_new_cfg = false;
3985 id.transform_return_to_modify = true;
3986 id.transform_lang_insert_block = NULL;
3987 id.statements_to_fold = pointer_set_create ();
3988
3989 push_gimplify_context (&gctx);
3990
3991 /* We make no attempts to keep dominance info up-to-date. */
3992 free_dominance_info (CDI_DOMINATORS);
3993 free_dominance_info (CDI_POST_DOMINATORS);
3994
3995 /* Register specific gimple functions. */
3996 gimple_register_cfg_hooks ();
3997
3998 /* Reach the trees by walking over the CFG, and note the
3999 enclosing basic-blocks in the call edges. */
4000 /* We walk the blocks going forward, because inlined function bodies
4001 will split id->current_basic_block, and the new blocks will
4002 follow it; we'll trudge through them, processing their CALL_EXPRs
4003 along the way. */
4004 FOR_EACH_BB (bb)
4005 gimple_expand_calls_inline (bb, &id);
4006
4007 pop_gimplify_context (NULL);
4008
4009 #ifdef ENABLE_CHECKING
4010 {
4011 struct cgraph_edge *e;
4012
4013 verify_cgraph_node (id.dst_node);
4014
4015 /* Double check that we inlined everything we are supposed to inline. */
4016 for (e = id.dst_node->callees; e; e = e->next_callee)
4017 gcc_assert (e->inline_failed);
4018 }
4019 #endif
4020
4021 /* Fold the statements before compacting/renumbering the basic blocks. */
4022 fold_marked_statements (last, id.statements_to_fold);
4023 pointer_set_destroy (id.statements_to_fold);
4024
4025 gcc_assert (!id.debug_stmts);
4026
4027 /* Renumber the (code) basic_blocks consecutively. */
4028 compact_blocks ();
4029 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4030 number_blocks (fn);
4031
4032 fold_cond_expr_cond ();
4033 delete_unreachable_blocks_update_callgraph (&id);
4034 #ifdef ENABLE_CHECKING
4035 verify_cgraph_node (id.dst_node);
4036 #endif
4037
4038 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4039 not possible yet - the IPA passes might make various functions to not
4040 throw and they don't care to proactively update local EH info. This is
4041 done later in fixup_cfg pass that also execute the verification. */
4042 return (TODO_update_ssa
4043 | TODO_cleanup_cfg
4044 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4045 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4046 }
4047
4048 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4049
4050 tree
4051 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4052 {
4053 enum tree_code code = TREE_CODE (*tp);
4054 enum tree_code_class cl = TREE_CODE_CLASS (code);
4055
4056 /* We make copies of most nodes. */
4057 if (IS_EXPR_CODE_CLASS (cl)
4058 || code == TREE_LIST
4059 || code == TREE_VEC
4060 || code == TYPE_DECL
4061 || code == OMP_CLAUSE)
4062 {
4063 /* Because the chain gets clobbered when we make a copy, we save it
4064 here. */
4065 tree chain = NULL_TREE, new_tree;
4066
4067 chain = TREE_CHAIN (*tp);
4068
4069 /* Copy the node. */
4070 new_tree = copy_node (*tp);
4071
4072 /* Propagate mudflap marked-ness. */
4073 if (flag_mudflap && mf_marked_p (*tp))
4074 mf_mark (new_tree);
4075
4076 *tp = new_tree;
4077
4078 /* Now, restore the chain, if appropriate. That will cause
4079 walk_tree to walk into the chain as well. */
4080 if (code == PARM_DECL
4081 || code == TREE_LIST
4082 || code == OMP_CLAUSE)
4083 TREE_CHAIN (*tp) = chain;
4084
4085 /* For now, we don't update BLOCKs when we make copies. So, we
4086 have to nullify all BIND_EXPRs. */
4087 if (TREE_CODE (*tp) == BIND_EXPR)
4088 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4089 }
4090 else if (code == CONSTRUCTOR)
4091 {
4092 /* CONSTRUCTOR nodes need special handling because
4093 we need to duplicate the vector of elements. */
4094 tree new_tree;
4095
4096 new_tree = copy_node (*tp);
4097
4098 /* Propagate mudflap marked-ness. */
4099 if (flag_mudflap && mf_marked_p (*tp))
4100 mf_mark (new_tree);
4101
4102 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4103 CONSTRUCTOR_ELTS (*tp));
4104 *tp = new_tree;
4105 }
4106 else if (TREE_CODE_CLASS (code) == tcc_type)
4107 *walk_subtrees = 0;
4108 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4109 *walk_subtrees = 0;
4110 else if (TREE_CODE_CLASS (code) == tcc_constant)
4111 *walk_subtrees = 0;
4112 else
4113 gcc_assert (code != STATEMENT_LIST);
4114 return NULL_TREE;
4115 }
4116
4117 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4118 information indicating to what new SAVE_EXPR this one should be mapped,
4119 use that one. Otherwise, create a new node and enter it in ST. FN is
4120 the function into which the copy will be placed. */
4121
4122 static void
4123 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4124 {
4125 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4126 tree *n;
4127 tree t;
4128
4129 /* See if we already encountered this SAVE_EXPR. */
4130 n = (tree *) pointer_map_contains (st, *tp);
4131
4132 /* If we didn't already remap this SAVE_EXPR, do so now. */
4133 if (!n)
4134 {
4135 t = copy_node (*tp);
4136
4137 /* Remember this SAVE_EXPR. */
4138 *pointer_map_insert (st, *tp) = t;
4139 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4140 *pointer_map_insert (st, t) = t;
4141 }
4142 else
4143 {
4144 /* We've already walked into this SAVE_EXPR; don't do it again. */
4145 *walk_subtrees = 0;
4146 t = *n;
4147 }
4148
4149 /* Replace this SAVE_EXPR with the copy. */
4150 *tp = t;
4151 }
4152
4153 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4154 copies the declaration and enters it in the splay_tree in DATA (which is
4155 really an `copy_body_data *'). */
4156
4157 static tree
4158 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4159 void *data)
4160 {
4161 copy_body_data *id = (copy_body_data *) data;
4162
4163 /* Don't walk into types. */
4164 if (TYPE_P (*tp))
4165 *walk_subtrees = 0;
4166
4167 else if (TREE_CODE (*tp) == LABEL_EXPR)
4168 {
4169 tree decl = TREE_OPERAND (*tp, 0);
4170
4171 /* Copy the decl and remember the copy. */
4172 insert_decl_map (id, decl, id->copy_decl (decl, id));
4173 }
4174
4175 return NULL_TREE;
4176 }
4177
4178 /* Perform any modifications to EXPR required when it is unsaved. Does
4179 not recurse into EXPR's subtrees. */
4180
4181 static void
4182 unsave_expr_1 (tree expr)
4183 {
4184 switch (TREE_CODE (expr))
4185 {
4186 case TARGET_EXPR:
4187 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4188 It's OK for this to happen if it was part of a subtree that
4189 isn't immediately expanded, such as operand 2 of another
4190 TARGET_EXPR. */
4191 if (TREE_OPERAND (expr, 1))
4192 break;
4193
4194 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4195 TREE_OPERAND (expr, 3) = NULL_TREE;
4196 break;
4197
4198 default:
4199 break;
4200 }
4201 }
4202
4203 /* Called via walk_tree when an expression is unsaved. Using the
4204 splay_tree pointed to by ST (which is really a `splay_tree'),
4205 remaps all local declarations to appropriate replacements. */
4206
4207 static tree
4208 unsave_r (tree *tp, int *walk_subtrees, void *data)
4209 {
4210 copy_body_data *id = (copy_body_data *) data;
4211 struct pointer_map_t *st = id->decl_map;
4212 tree *n;
4213
4214 /* Only a local declaration (variable or label). */
4215 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4216 || TREE_CODE (*tp) == LABEL_DECL)
4217 {
4218 /* Lookup the declaration. */
4219 n = (tree *) pointer_map_contains (st, *tp);
4220
4221 /* If it's there, remap it. */
4222 if (n)
4223 *tp = *n;
4224 }
4225
4226 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4227 gcc_unreachable ();
4228 else if (TREE_CODE (*tp) == BIND_EXPR)
4229 copy_bind_expr (tp, walk_subtrees, id);
4230 else if (TREE_CODE (*tp) == SAVE_EXPR
4231 || TREE_CODE (*tp) == TARGET_EXPR)
4232 remap_save_expr (tp, st, walk_subtrees);
4233 else
4234 {
4235 copy_tree_r (tp, walk_subtrees, NULL);
4236
4237 /* Do whatever unsaving is required. */
4238 unsave_expr_1 (*tp);
4239 }
4240
4241 /* Keep iterating. */
4242 return NULL_TREE;
4243 }
4244
4245 /* Copies everything in EXPR and replaces variables, labels
4246 and SAVE_EXPRs local to EXPR. */
4247
4248 tree
4249 unsave_expr_now (tree expr)
4250 {
4251 copy_body_data id;
4252
4253 /* There's nothing to do for NULL_TREE. */
4254 if (expr == 0)
4255 return expr;
4256
4257 /* Set up ID. */
4258 memset (&id, 0, sizeof (id));
4259 id.src_fn = current_function_decl;
4260 id.dst_fn = current_function_decl;
4261 id.decl_map = pointer_map_create ();
4262 id.debug_map = NULL;
4263
4264 id.copy_decl = copy_decl_no_change;
4265 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4266 id.transform_new_cfg = false;
4267 id.transform_return_to_modify = false;
4268 id.transform_lang_insert_block = NULL;
4269
4270 /* Walk the tree once to find local labels. */
4271 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4272
4273 /* Walk the tree again, copying, remapping, and unsaving. */
4274 walk_tree (&expr, unsave_r, &id, NULL);
4275
4276 /* Clean up. */
4277 pointer_map_destroy (id.decl_map);
4278 if (id.debug_map)
4279 pointer_map_destroy (id.debug_map);
4280
4281 return expr;
4282 }
4283
4284 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4285 label, copies the declaration and enters it in the splay_tree in DATA (which
4286 is really a 'copy_body_data *'. */
4287
4288 static tree
4289 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4290 bool *handled_ops_p ATTRIBUTE_UNUSED,
4291 struct walk_stmt_info *wi)
4292 {
4293 copy_body_data *id = (copy_body_data *) wi->info;
4294 gimple stmt = gsi_stmt (*gsip);
4295
4296 if (gimple_code (stmt) == GIMPLE_LABEL)
4297 {
4298 tree decl = gimple_label_label (stmt);
4299
4300 /* Copy the decl and remember the copy. */
4301 insert_decl_map (id, decl, id->copy_decl (decl, id));
4302 }
4303
4304 return NULL_TREE;
4305 }
4306
4307
4308 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4309 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4310 remaps all local declarations to appropriate replacements in gimple
4311 operands. */
4312
4313 static tree
4314 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4315 {
4316 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4317 copy_body_data *id = (copy_body_data *) wi->info;
4318 struct pointer_map_t *st = id->decl_map;
4319 tree *n;
4320 tree expr = *tp;
4321
4322 /* Only a local declaration (variable or label). */
4323 if ((TREE_CODE (expr) == VAR_DECL
4324 && !TREE_STATIC (expr))
4325 || TREE_CODE (expr) == LABEL_DECL)
4326 {
4327 /* Lookup the declaration. */
4328 n = (tree *) pointer_map_contains (st, expr);
4329
4330 /* If it's there, remap it. */
4331 if (n)
4332 *tp = *n;
4333 *walk_subtrees = 0;
4334 }
4335 else if (TREE_CODE (expr) == STATEMENT_LIST
4336 || TREE_CODE (expr) == BIND_EXPR
4337 || TREE_CODE (expr) == SAVE_EXPR)
4338 gcc_unreachable ();
4339 else if (TREE_CODE (expr) == TARGET_EXPR)
4340 {
4341 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4342 It's OK for this to happen if it was part of a subtree that
4343 isn't immediately expanded, such as operand 2 of another
4344 TARGET_EXPR. */
4345 if (!TREE_OPERAND (expr, 1))
4346 {
4347 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4348 TREE_OPERAND (expr, 3) = NULL_TREE;
4349 }
4350 }
4351
4352 /* Keep iterating. */
4353 return NULL_TREE;
4354 }
4355
4356
4357 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4358 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4359 remaps all local declarations to appropriate replacements in gimple
4360 statements. */
4361
4362 static tree
4363 replace_locals_stmt (gimple_stmt_iterator *gsip,
4364 bool *handled_ops_p ATTRIBUTE_UNUSED,
4365 struct walk_stmt_info *wi)
4366 {
4367 copy_body_data *id = (copy_body_data *) wi->info;
4368 gimple stmt = gsi_stmt (*gsip);
4369
4370 if (gimple_code (stmt) == GIMPLE_BIND)
4371 {
4372 tree block = gimple_bind_block (stmt);
4373
4374 if (block)
4375 {
4376 remap_block (&block, id);
4377 gimple_bind_set_block (stmt, block);
4378 }
4379
4380 /* This will remap a lot of the same decls again, but this should be
4381 harmless. */
4382 if (gimple_bind_vars (stmt))
4383 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4384 }
4385
4386 /* Keep iterating. */
4387 return NULL_TREE;
4388 }
4389
4390
4391 /* Copies everything in SEQ and replaces variables and labels local to
4392 current_function_decl. */
4393
4394 gimple_seq
4395 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4396 {
4397 copy_body_data id;
4398 struct walk_stmt_info wi;
4399 struct pointer_set_t *visited;
4400 gimple_seq copy;
4401
4402 /* There's nothing to do for NULL_TREE. */
4403 if (seq == NULL)
4404 return seq;
4405
4406 /* Set up ID. */
4407 memset (&id, 0, sizeof (id));
4408 id.src_fn = current_function_decl;
4409 id.dst_fn = current_function_decl;
4410 id.decl_map = pointer_map_create ();
4411 id.debug_map = NULL;
4412
4413 id.copy_decl = copy_decl_no_change;
4414 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4415 id.transform_new_cfg = false;
4416 id.transform_return_to_modify = false;
4417 id.transform_lang_insert_block = NULL;
4418
4419 /* Walk the tree once to find local labels. */
4420 memset (&wi, 0, sizeof (wi));
4421 visited = pointer_set_create ();
4422 wi.info = &id;
4423 wi.pset = visited;
4424 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4425 pointer_set_destroy (visited);
4426
4427 copy = gimple_seq_copy (seq);
4428
4429 /* Walk the copy, remapping decls. */
4430 memset (&wi, 0, sizeof (wi));
4431 wi.info = &id;
4432 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4433
4434 /* Clean up. */
4435 pointer_map_destroy (id.decl_map);
4436 if (id.debug_map)
4437 pointer_map_destroy (id.debug_map);
4438
4439 return copy;
4440 }
4441
4442
4443 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4444
4445 static tree
4446 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4447 {
4448 if (*tp == data)
4449 return (tree) data;
4450 else
4451 return NULL;
4452 }
4453
4454 bool
4455 debug_find_tree (tree top, tree search)
4456 {
4457 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4458 }
4459
4460
4461 /* Declare the variables created by the inliner. Add all the variables in
4462 VARS to BIND_EXPR. */
4463
4464 static void
4465 declare_inline_vars (tree block, tree vars)
4466 {
4467 tree t;
4468 for (t = vars; t; t = TREE_CHAIN (t))
4469 {
4470 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4471 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4472 cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
4473 }
4474
4475 if (block)
4476 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4477 }
4478
4479 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4480 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4481 VAR_DECL translation. */
4482
4483 static tree
4484 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4485 {
4486 /* Don't generate debug information for the copy if we wouldn't have
4487 generated it for the copy either. */
4488 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4489 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4490
4491 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4492 declaration inspired this copy. */
4493 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4494
4495 /* The new variable/label has no RTL, yet. */
4496 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4497 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4498 SET_DECL_RTL (copy, NULL_RTX);
4499
4500 /* These args would always appear unused, if not for this. */
4501 TREE_USED (copy) = 1;
4502
4503 /* Set the context for the new declaration. */
4504 if (!DECL_CONTEXT (decl))
4505 /* Globals stay global. */
4506 ;
4507 else if (DECL_CONTEXT (decl) != id->src_fn)
4508 /* Things that weren't in the scope of the function we're inlining
4509 from aren't in the scope we're inlining to, either. */
4510 ;
4511 else if (TREE_STATIC (decl))
4512 /* Function-scoped static variables should stay in the original
4513 function. */
4514 ;
4515 else
4516 /* Ordinary automatic local variables are now in the scope of the
4517 new function. */
4518 DECL_CONTEXT (copy) = id->dst_fn;
4519
4520 return copy;
4521 }
4522
4523 static tree
4524 copy_decl_to_var (tree decl, copy_body_data *id)
4525 {
4526 tree copy, type;
4527
4528 gcc_assert (TREE_CODE (decl) == PARM_DECL
4529 || TREE_CODE (decl) == RESULT_DECL);
4530
4531 type = TREE_TYPE (decl);
4532
4533 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4534 VAR_DECL, DECL_NAME (decl), type);
4535 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4536 TREE_READONLY (copy) = TREE_READONLY (decl);
4537 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4538 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4539
4540 return copy_decl_for_dup_finish (id, decl, copy);
4541 }
4542
4543 /* Like copy_decl_to_var, but create a return slot object instead of a
4544 pointer variable for return by invisible reference. */
4545
4546 static tree
4547 copy_result_decl_to_var (tree decl, copy_body_data *id)
4548 {
4549 tree copy, type;
4550
4551 gcc_assert (TREE_CODE (decl) == PARM_DECL
4552 || TREE_CODE (decl) == RESULT_DECL);
4553
4554 type = TREE_TYPE (decl);
4555 if (DECL_BY_REFERENCE (decl))
4556 type = TREE_TYPE (type);
4557
4558 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4559 VAR_DECL, DECL_NAME (decl), type);
4560 TREE_READONLY (copy) = TREE_READONLY (decl);
4561 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4562 if (!DECL_BY_REFERENCE (decl))
4563 {
4564 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4565 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4566 }
4567
4568 return copy_decl_for_dup_finish (id, decl, copy);
4569 }
4570
4571 tree
4572 copy_decl_no_change (tree decl, copy_body_data *id)
4573 {
4574 tree copy;
4575
4576 copy = copy_node (decl);
4577
4578 /* The COPY is not abstract; it will be generated in DST_FN. */
4579 DECL_ABSTRACT (copy) = 0;
4580 lang_hooks.dup_lang_specific_decl (copy);
4581
4582 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4583 been taken; it's for internal bookkeeping in expand_goto_internal. */
4584 if (TREE_CODE (copy) == LABEL_DECL)
4585 {
4586 TREE_ADDRESSABLE (copy) = 0;
4587 LABEL_DECL_UID (copy) = -1;
4588 }
4589
4590 return copy_decl_for_dup_finish (id, decl, copy);
4591 }
4592
4593 static tree
4594 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4595 {
4596 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4597 return copy_decl_to_var (decl, id);
4598 else
4599 return copy_decl_no_change (decl, id);
4600 }
4601
4602 /* Return a copy of the function's argument tree. */
4603 static tree
4604 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4605 bitmap args_to_skip, tree *vars)
4606 {
4607 tree arg, *parg;
4608 tree new_parm = NULL;
4609 int i = 0;
4610
4611 parg = &new_parm;
4612
4613 for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4614 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4615 {
4616 tree new_tree = remap_decl (arg, id);
4617 lang_hooks.dup_lang_specific_decl (new_tree);
4618 *parg = new_tree;
4619 parg = &TREE_CHAIN (new_tree);
4620 }
4621 else if (!pointer_map_contains (id->decl_map, arg))
4622 {
4623 /* Make an equivalent VAR_DECL. If the argument was used
4624 as temporary variable later in function, the uses will be
4625 replaced by local variable. */
4626 tree var = copy_decl_to_var (arg, id);
4627 get_var_ann (var);
4628 add_referenced_var (var);
4629 insert_decl_map (id, arg, var);
4630 /* Declare this new variable. */
4631 TREE_CHAIN (var) = *vars;
4632 *vars = var;
4633 }
4634 return new_parm;
4635 }
4636
4637 /* Return a copy of the function's static chain. */
4638 static tree
4639 copy_static_chain (tree static_chain, copy_body_data * id)
4640 {
4641 tree *chain_copy, *pvar;
4642
4643 chain_copy = &static_chain;
4644 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4645 {
4646 tree new_tree = remap_decl (*pvar, id);
4647 lang_hooks.dup_lang_specific_decl (new_tree);
4648 TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4649 *pvar = new_tree;
4650 }
4651 return static_chain;
4652 }
4653
4654 /* Return true if the function is allowed to be versioned.
4655 This is a guard for the versioning functionality. */
4656
4657 bool
4658 tree_versionable_function_p (tree fndecl)
4659 {
4660 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4661 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4662 }
4663
4664 /* Delete all unreachable basic blocks and update callgraph.
4665 Doing so is somewhat nontrivial because we need to update all clones and
4666 remove inline function that become unreachable. */
4667
4668 static bool
4669 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4670 {
4671 bool changed = false;
4672 basic_block b, next_bb;
4673
4674 find_unreachable_blocks ();
4675
4676 /* Delete all unreachable basic blocks. */
4677
4678 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4679 {
4680 next_bb = b->next_bb;
4681
4682 if (!(b->flags & BB_REACHABLE))
4683 {
4684 gimple_stmt_iterator bsi;
4685
4686 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4687 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4688 {
4689 struct cgraph_edge *e;
4690 struct cgraph_node *node;
4691
4692 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4693 {
4694 if (!e->inline_failed)
4695 cgraph_remove_node_and_inline_clones (e->callee);
4696 else
4697 cgraph_remove_edge (e);
4698 }
4699 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4700 && id->dst_node->clones)
4701 for (node = id->dst_node->clones; node != id->dst_node;)
4702 {
4703 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4704 {
4705 if (!e->inline_failed)
4706 cgraph_remove_node_and_inline_clones (e->callee);
4707 else
4708 cgraph_remove_edge (e);
4709 }
4710
4711 if (node->clones)
4712 node = node->clones;
4713 else if (node->next_sibling_clone)
4714 node = node->next_sibling_clone;
4715 else
4716 {
4717 while (node != id->dst_node && !node->next_sibling_clone)
4718 node = node->clone_of;
4719 if (node != id->dst_node)
4720 node = node->next_sibling_clone;
4721 }
4722 }
4723 }
4724 delete_basic_block (b);
4725 changed = true;
4726 }
4727 }
4728
4729 if (changed)
4730 tidy_fallthru_edges ();
4731 #ifdef ENABLE_CHECKING0
4732 verify_cgraph_node (id->dst_node);
4733 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4734 && id->dst_node->clones)
4735 {
4736 struct cgraph_node *node;
4737 for (node = id->dst_node->clones; node != id->dst_node;)
4738 {
4739 verify_cgraph_node (node);
4740
4741 if (node->clones)
4742 node = node->clones;
4743 else if (node->next_sibling_clone)
4744 node = node->next_sibling_clone;
4745 else
4746 {
4747 while (node != id->dst_node && !node->next_sibling_clone)
4748 node = node->clone_of;
4749 if (node != id->dst_node)
4750 node = node->next_sibling_clone;
4751 }
4752 }
4753 }
4754 #endif
4755 return changed;
4756 }
4757
4758 /* Update clone info after duplication. */
4759
4760 static void
4761 update_clone_info (copy_body_data * id)
4762 {
4763 struct cgraph_node *node;
4764 if (!id->dst_node->clones)
4765 return;
4766 for (node = id->dst_node->clones; node != id->dst_node;)
4767 {
4768 /* First update replace maps to match the new body. */
4769 if (node->clone.tree_map)
4770 {
4771 unsigned int i;
4772 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4773 {
4774 struct ipa_replace_map *replace_info;
4775 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4776 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4777 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4778 }
4779 }
4780 if (node->clones)
4781 node = node->clones;
4782 else if (node->next_sibling_clone)
4783 node = node->next_sibling_clone;
4784 else
4785 {
4786 while (node != id->dst_node && !node->next_sibling_clone)
4787 node = node->clone_of;
4788 if (node != id->dst_node)
4789 node = node->next_sibling_clone;
4790 }
4791 }
4792 }
4793
4794 /* Create a copy of a function's tree.
4795 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4796 of the original function and the new copied function
4797 respectively. In case we want to replace a DECL
4798 tree with another tree while duplicating the function's
4799 body, TREE_MAP represents the mapping between these
4800 trees. If UPDATE_CLONES is set, the call_stmt fields
4801 of edges of clones of the function will be updated. */
4802 void
4803 tree_function_versioning (tree old_decl, tree new_decl,
4804 VEC(ipa_replace_map_p,gc)* tree_map,
4805 bool update_clones, bitmap args_to_skip)
4806 {
4807 struct cgraph_node *old_version_node;
4808 struct cgraph_node *new_version_node;
4809 copy_body_data id;
4810 tree p;
4811 unsigned i;
4812 struct ipa_replace_map *replace_info;
4813 basic_block old_entry_block, bb;
4814 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4815
4816 tree t_step;
4817 tree old_current_function_decl = current_function_decl;
4818 tree vars = NULL_TREE;
4819
4820 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4821 && TREE_CODE (new_decl) == FUNCTION_DECL);
4822 DECL_POSSIBLY_INLINED (old_decl) = 1;
4823
4824 old_version_node = cgraph_node (old_decl);
4825 new_version_node = cgraph_node (new_decl);
4826
4827 /* Output the inlining info for this abstract function, since it has been
4828 inlined. If we don't do this now, we can lose the information about the
4829 variables in the function when the blocks get blown away as soon as we
4830 remove the cgraph node. */
4831 (*debug_hooks->outlining_inline_function) (old_decl);
4832
4833 DECL_ARTIFICIAL (new_decl) = 1;
4834 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
4835 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
4836
4837 /* Prepare the data structures for the tree copy. */
4838 memset (&id, 0, sizeof (id));
4839
4840 /* Generate a new name for the new version. */
4841 id.statements_to_fold = pointer_set_create ();
4842
4843 id.decl_map = pointer_map_create ();
4844 id.debug_map = NULL;
4845 id.src_fn = old_decl;
4846 id.dst_fn = new_decl;
4847 id.src_node = old_version_node;
4848 id.dst_node = new_version_node;
4849 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
4850
4851 id.copy_decl = copy_decl_no_change;
4852 id.transform_call_graph_edges
4853 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
4854 id.transform_new_cfg = true;
4855 id.transform_return_to_modify = false;
4856 id.transform_lang_insert_block = NULL;
4857
4858 current_function_decl = new_decl;
4859 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
4860 (DECL_STRUCT_FUNCTION (old_decl));
4861 initialize_cfun (new_decl, old_decl,
4862 old_entry_block->count,
4863 old_entry_block->frequency);
4864 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
4865
4866 /* Copy the function's static chain. */
4867 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
4868 if (p)
4869 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
4870 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
4871 &id);
4872
4873 /* If there's a tree_map, prepare for substitution. */
4874 if (tree_map)
4875 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
4876 {
4877 gimple init;
4878 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
4879 if (replace_info->replace_p)
4880 {
4881 tree op = replace_info->new_tree;
4882
4883 STRIP_NOPS (op);
4884
4885 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
4886 op = TREE_OPERAND (op, 0);
4887
4888 if (TREE_CODE (op) == ADDR_EXPR)
4889 {
4890 op = TREE_OPERAND (op, 0);
4891 while (handled_component_p (op))
4892 op = TREE_OPERAND (op, 0);
4893 if (TREE_CODE (op) == VAR_DECL)
4894 add_referenced_var (op);
4895 }
4896 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
4897 init = setup_one_parameter (&id, replace_info->old_tree,
4898 replace_info->new_tree, id.src_fn,
4899 NULL,
4900 &vars);
4901 if (init)
4902 VEC_safe_push (gimple, heap, init_stmts, init);
4903 }
4904 }
4905 /* Copy the function's arguments. */
4906 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
4907 DECL_ARGUMENTS (new_decl) =
4908 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
4909 args_to_skip, &vars);
4910
4911 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
4912
4913 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4914 number_blocks (id.dst_fn);
4915
4916 declare_inline_vars (DECL_INITIAL (new_decl), vars);
4917
4918 if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
4919 /* Add local vars. */
4920 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
4921 t_step; t_step = TREE_CHAIN (t_step))
4922 {
4923 tree var = TREE_VALUE (t_step);
4924 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
4925 cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
4926 else if (!can_be_nonlocal (var, &id))
4927 cfun->local_decls =
4928 tree_cons (NULL_TREE, remap_decl (var, &id),
4929 cfun->local_decls);
4930 }
4931
4932 /* Copy the Function's body. */
4933 copy_body (&id, old_entry_block->count, old_entry_block->frequency,
4934 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
4935
4936 if (DECL_RESULT (old_decl) != NULL_TREE)
4937 {
4938 tree *res_decl = &DECL_RESULT (old_decl);
4939 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
4940 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
4941 }
4942
4943 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4944 number_blocks (new_decl);
4945
4946 /* We want to create the BB unconditionally, so that the addition of
4947 debug stmts doesn't affect BB count, which may in the end cause
4948 codegen differences. */
4949 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
4950 while (VEC_length (gimple, init_stmts))
4951 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
4952 update_clone_info (&id);
4953
4954 /* Remap the nonlocal_goto_save_area, if any. */
4955 if (cfun->nonlocal_goto_save_area)
4956 {
4957 struct walk_stmt_info wi;
4958
4959 memset (&wi, 0, sizeof (wi));
4960 wi.info = &id;
4961 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
4962 }
4963
4964 /* Clean up. */
4965 pointer_map_destroy (id.decl_map);
4966 if (id.debug_map)
4967 pointer_map_destroy (id.debug_map);
4968 free_dominance_info (CDI_DOMINATORS);
4969 free_dominance_info (CDI_POST_DOMINATORS);
4970
4971 fold_marked_statements (0, id.statements_to_fold);
4972 pointer_set_destroy (id.statements_to_fold);
4973 fold_cond_expr_cond ();
4974 delete_unreachable_blocks_update_callgraph (&id);
4975 update_ssa (TODO_update_ssa);
4976 free_dominance_info (CDI_DOMINATORS);
4977 free_dominance_info (CDI_POST_DOMINATORS);
4978
4979 gcc_assert (!id.debug_stmts);
4980 VEC_free (gimple, heap, init_stmts);
4981 pop_cfun ();
4982 current_function_decl = old_current_function_decl;
4983 gcc_assert (!current_function_decl
4984 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
4985 return;
4986 }
4987
4988 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
4989 the callee and return the inlined body on success. */
4990
4991 tree
4992 maybe_inline_call_in_expr (tree exp)
4993 {
4994 tree fn = get_callee_fndecl (exp);
4995
4996 /* We can only try to inline "const" functions. */
4997 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
4998 {
4999 struct pointer_map_t *decl_map = pointer_map_create ();
5000 call_expr_arg_iterator iter;
5001 copy_body_data id;
5002 tree param, arg, t;
5003
5004 /* Remap the parameters. */
5005 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5006 param;
5007 param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
5008 *pointer_map_insert (decl_map, param) = arg;
5009
5010 memset (&id, 0, sizeof (id));
5011 id.src_fn = fn;
5012 id.dst_fn = current_function_decl;
5013 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5014 id.decl_map = decl_map;
5015
5016 id.copy_decl = copy_decl_no_change;
5017 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5018 id.transform_new_cfg = false;
5019 id.transform_return_to_modify = true;
5020 id.transform_lang_insert_block = false;
5021
5022 /* Make sure not to unshare trees behind the front-end's back
5023 since front-end specific mechanisms may rely on sharing. */
5024 id.regimplify = false;
5025 id.do_not_unshare = true;
5026
5027 /* We're not inside any EH region. */
5028 id.eh_lp_nr = 0;
5029
5030 t = copy_tree_body (&id);
5031 pointer_map_destroy (decl_map);
5032
5033 /* We can only return something suitable for use in a GENERIC
5034 expression tree. */
5035 if (TREE_CODE (t) == MODIFY_EXPR)
5036 return TREE_OPERAND (t, 1);
5037 }
5038
5039 return NULL_TREE;
5040 }
5041
5042 /* Duplicate a type, fields and all. */
5043
5044 tree
5045 build_duplicate_type (tree type)
5046 {
5047 struct copy_body_data id;
5048
5049 memset (&id, 0, sizeof (id));
5050 id.src_fn = current_function_decl;
5051 id.dst_fn = current_function_decl;
5052 id.src_cfun = cfun;
5053 id.decl_map = pointer_map_create ();
5054 id.debug_map = NULL;
5055 id.copy_decl = copy_decl_no_change;
5056
5057 type = remap_type_1 (type, &id);
5058
5059 pointer_map_destroy (id.decl_map);
5060 if (id.debug_map)
5061 pointer_map_destroy (id.debug_map);
5062
5063 TYPE_CANONICAL (type) = type;
5064
5065 return type;
5066 }
5067
5068 /* Return whether it is safe to inline a function because it used different
5069 target specific options or call site actual types mismatch parameter types.
5070 E is the call edge to be checked. */
5071 bool
5072 tree_can_inline_p (struct cgraph_edge *e)
5073 {
5074 #if 0
5075 /* This causes a regression in SPEC in that it prevents a cold function from
5076 inlining a hot function. Perhaps this should only apply to functions
5077 that the user declares hot/cold/optimize explicitly. */
5078
5079 /* Don't inline a function with a higher optimization level than the
5080 caller, or with different space constraints (hot/cold functions). */
5081 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
5082 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
5083
5084 if (caller_tree != callee_tree)
5085 {
5086 struct cl_optimization *caller_opt
5087 = TREE_OPTIMIZATION ((caller_tree)
5088 ? caller_tree
5089 : optimization_default_node);
5090
5091 struct cl_optimization *callee_opt
5092 = TREE_OPTIMIZATION ((callee_tree)
5093 ? callee_tree
5094 : optimization_default_node);
5095
5096 if ((caller_opt->optimize > callee_opt->optimize)
5097 || (caller_opt->optimize_size != callee_opt->optimize_size))
5098 return false;
5099 }
5100 #endif
5101 tree caller, callee;
5102
5103 caller = e->caller->decl;
5104 callee = e->callee->decl;
5105
5106 /* We cannot inline a function that uses a different EH personality
5107 than the caller. */
5108 if (DECL_FUNCTION_PERSONALITY (caller)
5109 && DECL_FUNCTION_PERSONALITY (callee)
5110 && (DECL_FUNCTION_PERSONALITY (caller)
5111 != DECL_FUNCTION_PERSONALITY (callee)))
5112 {
5113 e->inline_failed = CIF_UNSPECIFIED;
5114 gimple_call_set_cannot_inline (e->call_stmt, true);
5115 return false;
5116 }
5117
5118 /* Allow the backend to decide if inlining is ok. */
5119 if (!targetm.target_option.can_inline_p (caller, callee))
5120 {
5121 e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
5122 gimple_call_set_cannot_inline (e->call_stmt, true);
5123 e->call_stmt_cannot_inline_p = true;
5124 return false;
5125 }
5126
5127 if (e->call_stmt
5128 && !gimple_check_call_args (e->call_stmt))
5129 {
5130 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
5131 gimple_call_set_cannot_inline (e->call_stmt, true);
5132 e->call_stmt_cannot_inline_p = true;
5133 return false;
5134 }
5135
5136 return true;
5137 }