]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-inline.c
gimple-predict.h: New file.
[thirdparty/gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "cfghooks.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "gimple-predict.h"
29 #include "rtl.h"
30 #include "ssa.h"
31 #include "diagnostic-core.h"
32 #include "alias.h"
33 #include "fold-const.h"
34 #include "stor-layout.h"
35 #include "calls.h"
36 #include "tree-inline.h"
37 #include "flags.h"
38 #include "params.h"
39 #include "insn-config.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "internal-fn.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "gimplify-me.h"
50 #include "gimple-walk.h"
51 #include "tree-cfg.h"
52 #include "tree-into-ssa.h"
53 #include "expmed.h"
54 #include "dojump.h"
55 #include "explow.h"
56 #include "emit-rtl.h"
57 #include "varasm.h"
58 #include "stmt.h"
59 #include "expr.h"
60 #include "tree-dfa.h"
61 #include "tree-ssa.h"
62 #include "tree-pretty-print.h"
63 #include "except.h"
64 #include "debug.h"
65 #include "cgraph.h"
66 #include "alloc-pool.h"
67 #include "symbol-summary.h"
68 #include "ipa-prop.h"
69 #include "value-prof.h"
70 #include "tree-pass.h"
71 #include "target.h"
72 #include "cfgloop.h"
73 #include "builtins.h"
74 #include "tree-chkp.h"
75
76
77 /* I'm not real happy about this, but we need to handle gimple and
78 non-gimple trees. */
79
80 /* Inlining, Cloning, Versioning, Parallelization
81
82 Inlining: a function body is duplicated, but the PARM_DECLs are
83 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
84 MODIFY_EXPRs that store to a dedicated returned-value variable.
85 The duplicated eh_region info of the copy will later be appended
86 to the info for the caller; the eh_region info in copied throwing
87 statements and RESX statements are adjusted accordingly.
88
89 Cloning: (only in C++) We have one body for a con/de/structor, and
90 multiple function decls, each with a unique parameter list.
91 Duplicate the body, using the given splay tree; some parameters
92 will become constants (like 0 or 1).
93
94 Versioning: a function body is duplicated and the result is a new
95 function rather than into blocks of an existing function as with
96 inlining. Some parameters will become constants.
97
98 Parallelization: a region of a function is duplicated resulting in
99 a new function. Variables may be replaced with complex expressions
100 to enable shared variable semantics.
101
102 All of these will simultaneously lookup any callgraph edges. If
103 we're going to inline the duplicated function body, and the given
104 function has some cloned callgraph nodes (one for each place this
105 function will be inlined) those callgraph edges will be duplicated.
106 If we're cloning the body, those callgraph edges will be
107 updated to point into the new body. (Note that the original
108 callgraph node and edge list will not be altered.)
109
110 See the CALL_EXPR handling case in copy_tree_body_r (). */
111
112 /* To Do:
113
114 o In order to make inlining-on-trees work, we pessimized
115 function-local static constants. In particular, they are now
116 always output, even when not addressed. Fix this by treating
117 function-local static constants just like global static
118 constants; the back-end already knows not to output them if they
119 are not needed.
120
121 o Provide heuristics to clamp inlining of recursive template
122 calls? */
123
124
125 /* Weights that estimate_num_insns uses to estimate the size of the
126 produced code. */
127
128 eni_weights eni_size_weights;
129
130 /* Weights that estimate_num_insns uses to estimate the time necessary
131 to execute the produced code. */
132
133 eni_weights eni_time_weights;
134
135 /* Prototypes. */
136
137 static tree declare_return_variable (copy_body_data *, tree, tree, tree,
138 basic_block);
139 static void remap_block (tree *, copy_body_data *);
140 static void copy_bind_expr (tree *, int *, copy_body_data *);
141 static void declare_inline_vars (tree, tree);
142 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
143 static void prepend_lexical_block (tree current_block, tree new_block);
144 static tree copy_decl_to_var (tree, copy_body_data *);
145 static tree copy_result_decl_to_var (tree, copy_body_data *);
146 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
147 static gimple_seq remap_gimple_stmt (gimple, copy_body_data *);
148 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
149 static void insert_init_stmt (copy_body_data *, basic_block, gimple);
150
151 /* Insert a tree->tree mapping for ID. Despite the name suggests
152 that the trees should be variables, it is used for more than that. */
153
154 void
155 insert_decl_map (copy_body_data *id, tree key, tree value)
156 {
157 id->decl_map->put (key, value);
158
159 /* Always insert an identity map as well. If we see this same new
160 node again, we won't want to duplicate it a second time. */
161 if (key != value)
162 id->decl_map->put (value, value);
163 }
164
165 /* Insert a tree->tree mapping for ID. This is only used for
166 variables. */
167
168 static void
169 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
170 {
171 if (!gimple_in_ssa_p (id->src_cfun))
172 return;
173
174 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
175 return;
176
177 if (!target_for_debug_bind (key))
178 return;
179
180 gcc_assert (TREE_CODE (key) == PARM_DECL);
181 gcc_assert (TREE_CODE (value) == VAR_DECL);
182
183 if (!id->debug_map)
184 id->debug_map = new hash_map<tree, tree>;
185
186 id->debug_map->put (key, value);
187 }
188
189 /* If nonzero, we're remapping the contents of inlined debug
190 statements. If negative, an error has occurred, such as a
191 reference to a variable that isn't available in the inlined
192 context. */
193 static int processing_debug_stmt = 0;
194
195 /* Construct new SSA name for old NAME. ID is the inline context. */
196
197 static tree
198 remap_ssa_name (tree name, copy_body_data *id)
199 {
200 tree new_tree, var;
201 tree *n;
202
203 gcc_assert (TREE_CODE (name) == SSA_NAME);
204
205 n = id->decl_map->get (name);
206 if (n)
207 return unshare_expr (*n);
208
209 if (processing_debug_stmt)
210 {
211 if (SSA_NAME_IS_DEFAULT_DEF (name)
212 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
213 && id->entry_bb == NULL
214 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
215 {
216 tree vexpr = make_node (DEBUG_EXPR_DECL);
217 gimple def_temp;
218 gimple_stmt_iterator gsi;
219 tree val = SSA_NAME_VAR (name);
220
221 n = id->decl_map->get (val);
222 if (n != NULL)
223 val = *n;
224 if (TREE_CODE (val) != PARM_DECL)
225 {
226 processing_debug_stmt = -1;
227 return name;
228 }
229 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
230 DECL_ARTIFICIAL (vexpr) = 1;
231 TREE_TYPE (vexpr) = TREE_TYPE (name);
232 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
233 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
234 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
235 return vexpr;
236 }
237
238 processing_debug_stmt = -1;
239 return name;
240 }
241
242 /* Remap anonymous SSA names or SSA names of anonymous decls. */
243 var = SSA_NAME_VAR (name);
244 if (!var
245 || (!SSA_NAME_IS_DEFAULT_DEF (name)
246 && TREE_CODE (var) == VAR_DECL
247 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
248 && DECL_ARTIFICIAL (var)
249 && DECL_IGNORED_P (var)
250 && !DECL_NAME (var)))
251 {
252 struct ptr_info_def *pi;
253 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
254 if (!var && SSA_NAME_IDENTIFIER (name))
255 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
256 insert_decl_map (id, name, new_tree);
257 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
258 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
259 /* At least IPA points-to info can be directly transferred. */
260 if (id->src_cfun->gimple_df
261 && id->src_cfun->gimple_df->ipa_pta
262 && (pi = SSA_NAME_PTR_INFO (name))
263 && !pi->pt.anything)
264 {
265 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
266 new_pi->pt = pi->pt;
267 }
268 return new_tree;
269 }
270
271 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
272 in copy_bb. */
273 new_tree = remap_decl (var, id);
274
275 /* We might've substituted constant or another SSA_NAME for
276 the variable.
277
278 Replace the SSA name representing RESULT_DECL by variable during
279 inlining: this saves us from need to introduce PHI node in a case
280 return value is just partly initialized. */
281 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
282 && (!SSA_NAME_VAR (name)
283 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
284 || !id->transform_return_to_modify))
285 {
286 struct ptr_info_def *pi;
287 new_tree = make_ssa_name (new_tree);
288 insert_decl_map (id, name, new_tree);
289 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
290 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
291 /* At least IPA points-to info can be directly transferred. */
292 if (id->src_cfun->gimple_df
293 && id->src_cfun->gimple_df->ipa_pta
294 && (pi = SSA_NAME_PTR_INFO (name))
295 && !pi->pt.anything)
296 {
297 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
298 new_pi->pt = pi->pt;
299 }
300 if (SSA_NAME_IS_DEFAULT_DEF (name))
301 {
302 /* By inlining function having uninitialized variable, we might
303 extend the lifetime (variable might get reused). This cause
304 ICE in the case we end up extending lifetime of SSA name across
305 abnormal edge, but also increase register pressure.
306
307 We simply initialize all uninitialized vars by 0 except
308 for case we are inlining to very first BB. We can avoid
309 this for all BBs that are not inside strongly connected
310 regions of the CFG, but this is expensive to test. */
311 if (id->entry_bb
312 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
313 && (!SSA_NAME_VAR (name)
314 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
315 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
316 0)->dest
317 || EDGE_COUNT (id->entry_bb->preds) != 1))
318 {
319 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
320 gimple init_stmt;
321 tree zero = build_zero_cst (TREE_TYPE (new_tree));
322
323 init_stmt = gimple_build_assign (new_tree, zero);
324 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
325 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
326 }
327 else
328 {
329 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
330 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
331 }
332 }
333 }
334 else
335 insert_decl_map (id, name, new_tree);
336 return new_tree;
337 }
338
339 /* Remap DECL during the copying of the BLOCK tree for the function. */
340
341 tree
342 remap_decl (tree decl, copy_body_data *id)
343 {
344 tree *n;
345
346 /* We only remap local variables in the current function. */
347
348 /* See if we have remapped this declaration. */
349
350 n = id->decl_map->get (decl);
351
352 if (!n && processing_debug_stmt)
353 {
354 processing_debug_stmt = -1;
355 return decl;
356 }
357
358 /* If we didn't already have an equivalent for this declaration,
359 create one now. */
360 if (!n)
361 {
362 /* Make a copy of the variable or label. */
363 tree t = id->copy_decl (decl, id);
364
365 /* Remember it, so that if we encounter this local entity again
366 we can reuse this copy. Do this early because remap_type may
367 need this decl for TYPE_STUB_DECL. */
368 insert_decl_map (id, decl, t);
369
370 if (!DECL_P (t))
371 return t;
372
373 /* Remap types, if necessary. */
374 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
375 if (TREE_CODE (t) == TYPE_DECL)
376 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
377
378 /* Remap sizes as necessary. */
379 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
380 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
381
382 /* If fields, do likewise for offset and qualifier. */
383 if (TREE_CODE (t) == FIELD_DECL)
384 {
385 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
386 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
387 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
388 }
389
390 return t;
391 }
392
393 if (id->do_not_unshare)
394 return *n;
395 else
396 return unshare_expr (*n);
397 }
398
399 static tree
400 remap_type_1 (tree type, copy_body_data *id)
401 {
402 tree new_tree, t;
403
404 /* We do need a copy. build and register it now. If this is a pointer or
405 reference type, remap the designated type and make a new pointer or
406 reference type. */
407 if (TREE_CODE (type) == POINTER_TYPE)
408 {
409 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
410 TYPE_MODE (type),
411 TYPE_REF_CAN_ALIAS_ALL (type));
412 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
413 new_tree = build_type_attribute_qual_variant (new_tree,
414 TYPE_ATTRIBUTES (type),
415 TYPE_QUALS (type));
416 insert_decl_map (id, type, new_tree);
417 return new_tree;
418 }
419 else if (TREE_CODE (type) == REFERENCE_TYPE)
420 {
421 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
422 TYPE_MODE (type),
423 TYPE_REF_CAN_ALIAS_ALL (type));
424 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
425 new_tree = build_type_attribute_qual_variant (new_tree,
426 TYPE_ATTRIBUTES (type),
427 TYPE_QUALS (type));
428 insert_decl_map (id, type, new_tree);
429 return new_tree;
430 }
431 else
432 new_tree = copy_node (type);
433
434 insert_decl_map (id, type, new_tree);
435
436 /* This is a new type, not a copy of an old type. Need to reassociate
437 variants. We can handle everything except the main variant lazily. */
438 t = TYPE_MAIN_VARIANT (type);
439 if (type != t)
440 {
441 t = remap_type (t, id);
442 TYPE_MAIN_VARIANT (new_tree) = t;
443 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
444 TYPE_NEXT_VARIANT (t) = new_tree;
445 }
446 else
447 {
448 TYPE_MAIN_VARIANT (new_tree) = new_tree;
449 TYPE_NEXT_VARIANT (new_tree) = NULL;
450 }
451
452 if (TYPE_STUB_DECL (type))
453 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
454
455 /* Lazily create pointer and reference types. */
456 TYPE_POINTER_TO (new_tree) = NULL;
457 TYPE_REFERENCE_TO (new_tree) = NULL;
458
459 /* Copy all types that may contain references to local variables; be sure to
460 preserve sharing in between type and its main variant when possible. */
461 switch (TREE_CODE (new_tree))
462 {
463 case INTEGER_TYPE:
464 case REAL_TYPE:
465 case FIXED_POINT_TYPE:
466 case ENUMERAL_TYPE:
467 case BOOLEAN_TYPE:
468 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
469 {
470 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
471 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
472
473 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
474 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
475 }
476 else
477 {
478 t = TYPE_MIN_VALUE (new_tree);
479 if (t && TREE_CODE (t) != INTEGER_CST)
480 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
481
482 t = TYPE_MAX_VALUE (new_tree);
483 if (t && TREE_CODE (t) != INTEGER_CST)
484 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
485 }
486 return new_tree;
487
488 case FUNCTION_TYPE:
489 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
490 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
491 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
492 else
493 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
494 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
495 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
496 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
497 else
498 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
499 return new_tree;
500
501 case ARRAY_TYPE:
502 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
503 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
504 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
505 else
506 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
507
508 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
509 {
510 gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
511 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
512 }
513 else
514 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
515 break;
516
517 case RECORD_TYPE:
518 case UNION_TYPE:
519 case QUAL_UNION_TYPE:
520 if (TYPE_MAIN_VARIANT (type) != type
521 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
522 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
523 else
524 {
525 tree f, nf = NULL;
526
527 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
528 {
529 t = remap_decl (f, id);
530 DECL_CONTEXT (t) = new_tree;
531 DECL_CHAIN (t) = nf;
532 nf = t;
533 }
534 TYPE_FIELDS (new_tree) = nreverse (nf);
535 }
536 break;
537
538 case OFFSET_TYPE:
539 default:
540 /* Shouldn't have been thought variable sized. */
541 gcc_unreachable ();
542 }
543
544 /* All variants of type share the same size, so use the already remaped data. */
545 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
546 {
547 gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
548 gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
549
550 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
551 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
552 }
553 else
554 {
555 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
556 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
557 }
558
559 return new_tree;
560 }
561
562 tree
563 remap_type (tree type, copy_body_data *id)
564 {
565 tree *node;
566 tree tmp;
567
568 if (type == NULL)
569 return type;
570
571 /* See if we have remapped this type. */
572 node = id->decl_map->get (type);
573 if (node)
574 return *node;
575
576 /* The type only needs remapping if it's variably modified. */
577 if (! variably_modified_type_p (type, id->src_fn))
578 {
579 insert_decl_map (id, type, type);
580 return type;
581 }
582
583 id->remapping_type_depth++;
584 tmp = remap_type_1 (type, id);
585 id->remapping_type_depth--;
586
587 return tmp;
588 }
589
590 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
591
592 static bool
593 can_be_nonlocal (tree decl, copy_body_data *id)
594 {
595 /* We can not duplicate function decls. */
596 if (TREE_CODE (decl) == FUNCTION_DECL)
597 return true;
598
599 /* Local static vars must be non-local or we get multiple declaration
600 problems. */
601 if (TREE_CODE (decl) == VAR_DECL
602 && !auto_var_in_fn_p (decl, id->src_fn))
603 return true;
604
605 return false;
606 }
607
608 static tree
609 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
610 copy_body_data *id)
611 {
612 tree old_var;
613 tree new_decls = NULL_TREE;
614
615 /* Remap its variables. */
616 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
617 {
618 tree new_var;
619
620 if (can_be_nonlocal (old_var, id))
621 {
622 /* We need to add this variable to the local decls as otherwise
623 nothing else will do so. */
624 if (TREE_CODE (old_var) == VAR_DECL
625 && ! DECL_EXTERNAL (old_var))
626 add_local_decl (cfun, old_var);
627 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
628 && !DECL_IGNORED_P (old_var)
629 && nonlocalized_list)
630 vec_safe_push (*nonlocalized_list, old_var);
631 continue;
632 }
633
634 /* Remap the variable. */
635 new_var = remap_decl (old_var, id);
636
637 /* If we didn't remap this variable, we can't mess with its
638 TREE_CHAIN. If we remapped this variable to the return slot, it's
639 already declared somewhere else, so don't declare it here. */
640
641 if (new_var == id->retvar)
642 ;
643 else if (!new_var)
644 {
645 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
646 && !DECL_IGNORED_P (old_var)
647 && nonlocalized_list)
648 vec_safe_push (*nonlocalized_list, old_var);
649 }
650 else
651 {
652 gcc_assert (DECL_P (new_var));
653 DECL_CHAIN (new_var) = new_decls;
654 new_decls = new_var;
655
656 /* Also copy value-expressions. */
657 if (TREE_CODE (new_var) == VAR_DECL
658 && DECL_HAS_VALUE_EXPR_P (new_var))
659 {
660 tree tem = DECL_VALUE_EXPR (new_var);
661 bool old_regimplify = id->regimplify;
662 id->remapping_type_depth++;
663 walk_tree (&tem, copy_tree_body_r, id, NULL);
664 id->remapping_type_depth--;
665 id->regimplify = old_regimplify;
666 SET_DECL_VALUE_EXPR (new_var, tem);
667 }
668 }
669 }
670
671 return nreverse (new_decls);
672 }
673
674 /* Copy the BLOCK to contain remapped versions of the variables
675 therein. And hook the new block into the block-tree. */
676
677 static void
678 remap_block (tree *block, copy_body_data *id)
679 {
680 tree old_block;
681 tree new_block;
682
683 /* Make the new block. */
684 old_block = *block;
685 new_block = make_node (BLOCK);
686 TREE_USED (new_block) = TREE_USED (old_block);
687 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
688 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
689 BLOCK_NONLOCALIZED_VARS (new_block)
690 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
691 *block = new_block;
692
693 /* Remap its variables. */
694 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
695 &BLOCK_NONLOCALIZED_VARS (new_block),
696 id);
697
698 if (id->transform_lang_insert_block)
699 id->transform_lang_insert_block (new_block);
700
701 /* Remember the remapped block. */
702 insert_decl_map (id, old_block, new_block);
703 }
704
705 /* Copy the whole block tree and root it in id->block. */
706 static tree
707 remap_blocks (tree block, copy_body_data *id)
708 {
709 tree t;
710 tree new_tree = block;
711
712 if (!block)
713 return NULL;
714
715 remap_block (&new_tree, id);
716 gcc_assert (new_tree != block);
717 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
718 prepend_lexical_block (new_tree, remap_blocks (t, id));
719 /* Blocks are in arbitrary order, but make things slightly prettier and do
720 not swap order when producing a copy. */
721 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
722 return new_tree;
723 }
724
725 /* Remap the block tree rooted at BLOCK to nothing. */
726 static void
727 remap_blocks_to_null (tree block, copy_body_data *id)
728 {
729 tree t;
730 insert_decl_map (id, block, NULL_TREE);
731 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
732 remap_blocks_to_null (t, id);
733 }
734
735 static void
736 copy_statement_list (tree *tp)
737 {
738 tree_stmt_iterator oi, ni;
739 tree new_tree;
740
741 new_tree = alloc_stmt_list ();
742 ni = tsi_start (new_tree);
743 oi = tsi_start (*tp);
744 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
745 *tp = new_tree;
746
747 for (; !tsi_end_p (oi); tsi_next (&oi))
748 {
749 tree stmt = tsi_stmt (oi);
750 if (TREE_CODE (stmt) == STATEMENT_LIST)
751 /* This copy is not redundant; tsi_link_after will smash this
752 STATEMENT_LIST into the end of the one we're building, and we
753 don't want to do that with the original. */
754 copy_statement_list (&stmt);
755 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
756 }
757 }
758
759 static void
760 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
761 {
762 tree block = BIND_EXPR_BLOCK (*tp);
763 /* Copy (and replace) the statement. */
764 copy_tree_r (tp, walk_subtrees, NULL);
765 if (block)
766 {
767 remap_block (&block, id);
768 BIND_EXPR_BLOCK (*tp) = block;
769 }
770
771 if (BIND_EXPR_VARS (*tp))
772 /* This will remap a lot of the same decls again, but this should be
773 harmless. */
774 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
775 }
776
777
778 /* Create a new gimple_seq by remapping all the statements in BODY
779 using the inlining information in ID. */
780
781 static gimple_seq
782 remap_gimple_seq (gimple_seq body, copy_body_data *id)
783 {
784 gimple_stmt_iterator si;
785 gimple_seq new_body = NULL;
786
787 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
788 {
789 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
790 gimple_seq_add_seq (&new_body, new_stmts);
791 }
792
793 return new_body;
794 }
795
796
797 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
798 block using the mapping information in ID. */
799
800 static gimple
801 copy_gimple_bind (gbind *stmt, copy_body_data *id)
802 {
803 gimple new_bind;
804 tree new_block, new_vars;
805 gimple_seq body, new_body;
806
807 /* Copy the statement. Note that we purposely don't use copy_stmt
808 here because we need to remap statements as we copy. */
809 body = gimple_bind_body (stmt);
810 new_body = remap_gimple_seq (body, id);
811
812 new_block = gimple_bind_block (stmt);
813 if (new_block)
814 remap_block (&new_block, id);
815
816 /* This will remap a lot of the same decls again, but this should be
817 harmless. */
818 new_vars = gimple_bind_vars (stmt);
819 if (new_vars)
820 new_vars = remap_decls (new_vars, NULL, id);
821
822 new_bind = gimple_build_bind (new_vars, new_body, new_block);
823
824 return new_bind;
825 }
826
827 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
828
829 static bool
830 is_parm (tree decl)
831 {
832 if (TREE_CODE (decl) == SSA_NAME)
833 {
834 decl = SSA_NAME_VAR (decl);
835 if (!decl)
836 return false;
837 }
838
839 return (TREE_CODE (decl) == PARM_DECL);
840 }
841
842 /* Remap the dependence CLIQUE from the source to the destination function
843 as specified in ID. */
844
845 static unsigned short
846 remap_dependence_clique (copy_body_data *id, unsigned short clique)
847 {
848 if (clique == 0)
849 return 0;
850 if (!id->dependence_map)
851 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
852 bool existed;
853 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
854 if (!existed)
855 newc = ++cfun->last_clique;
856 return newc;
857 }
858
859 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
860 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
861 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
862 recursing into the children nodes of *TP. */
863
864 static tree
865 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
866 {
867 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
868 copy_body_data *id = (copy_body_data *) wi_p->info;
869 tree fn = id->src_fn;
870
871 if (TREE_CODE (*tp) == SSA_NAME)
872 {
873 *tp = remap_ssa_name (*tp, id);
874 *walk_subtrees = 0;
875 return NULL;
876 }
877 else if (auto_var_in_fn_p (*tp, fn))
878 {
879 /* Local variables and labels need to be replaced by equivalent
880 variables. We don't want to copy static variables; there's
881 only one of those, no matter how many times we inline the
882 containing function. Similarly for globals from an outer
883 function. */
884 tree new_decl;
885
886 /* Remap the declaration. */
887 new_decl = remap_decl (*tp, id);
888 gcc_assert (new_decl);
889 /* Replace this variable with the copy. */
890 STRIP_TYPE_NOPS (new_decl);
891 /* ??? The C++ frontend uses void * pointer zero to initialize
892 any other type. This confuses the middle-end type verification.
893 As cloned bodies do not go through gimplification again the fixup
894 there doesn't trigger. */
895 if (TREE_CODE (new_decl) == INTEGER_CST
896 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
897 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
898 *tp = new_decl;
899 *walk_subtrees = 0;
900 }
901 else if (TREE_CODE (*tp) == STATEMENT_LIST)
902 gcc_unreachable ();
903 else if (TREE_CODE (*tp) == SAVE_EXPR)
904 gcc_unreachable ();
905 else if (TREE_CODE (*tp) == LABEL_DECL
906 && (!DECL_CONTEXT (*tp)
907 || decl_function_context (*tp) == id->src_fn))
908 /* These may need to be remapped for EH handling. */
909 *tp = remap_decl (*tp, id);
910 else if (TREE_CODE (*tp) == FIELD_DECL)
911 {
912 /* If the enclosing record type is variably_modified_type_p, the field
913 has already been remapped. Otherwise, it need not be. */
914 tree *n = id->decl_map->get (*tp);
915 if (n)
916 *tp = *n;
917 *walk_subtrees = 0;
918 }
919 else if (TYPE_P (*tp))
920 /* Types may need remapping as well. */
921 *tp = remap_type (*tp, id);
922 else if (CONSTANT_CLASS_P (*tp))
923 {
924 /* If this is a constant, we have to copy the node iff the type
925 will be remapped. copy_tree_r will not copy a constant. */
926 tree new_type = remap_type (TREE_TYPE (*tp), id);
927
928 if (new_type == TREE_TYPE (*tp))
929 *walk_subtrees = 0;
930
931 else if (TREE_CODE (*tp) == INTEGER_CST)
932 *tp = wide_int_to_tree (new_type, *tp);
933 else
934 {
935 *tp = copy_node (*tp);
936 TREE_TYPE (*tp) = new_type;
937 }
938 }
939 else
940 {
941 /* Otherwise, just copy the node. Note that copy_tree_r already
942 knows not to copy VAR_DECLs, etc., so this is safe. */
943
944 if (TREE_CODE (*tp) == MEM_REF)
945 {
946 /* We need to re-canonicalize MEM_REFs from inline substitutions
947 that can happen when a pointer argument is an ADDR_EXPR.
948 Recurse here manually to allow that. */
949 tree ptr = TREE_OPERAND (*tp, 0);
950 tree type = remap_type (TREE_TYPE (*tp), id);
951 tree old = *tp;
952 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
953 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
954 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
955 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
956 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
957 if (MR_DEPENDENCE_CLIQUE (old) != 0)
958 {
959 MR_DEPENDENCE_CLIQUE (*tp)
960 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
961 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
962 }
963 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
964 remapped a parameter as the property might be valid only
965 for the parameter itself. */
966 if (TREE_THIS_NOTRAP (old)
967 && (!is_parm (TREE_OPERAND (old, 0))
968 || (!id->transform_parameter && is_parm (ptr))))
969 TREE_THIS_NOTRAP (*tp) = 1;
970 *walk_subtrees = 0;
971 return NULL;
972 }
973
974 /* Here is the "usual case". Copy this tree node, and then
975 tweak some special cases. */
976 copy_tree_r (tp, walk_subtrees, NULL);
977
978 if (TREE_CODE (*tp) != OMP_CLAUSE)
979 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
980
981 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
982 {
983 /* The copied TARGET_EXPR has never been expanded, even if the
984 original node was expanded already. */
985 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
986 TREE_OPERAND (*tp, 3) = NULL_TREE;
987 }
988 else if (TREE_CODE (*tp) == ADDR_EXPR)
989 {
990 /* Variable substitution need not be simple. In particular,
991 the MEM_REF substitution above. Make sure that
992 TREE_CONSTANT and friends are up-to-date. */
993 int invariant = is_gimple_min_invariant (*tp);
994 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
995 recompute_tree_invariant_for_addr_expr (*tp);
996
997 /* If this used to be invariant, but is not any longer,
998 then regimplification is probably needed. */
999 if (invariant && !is_gimple_min_invariant (*tp))
1000 id->regimplify = true;
1001
1002 *walk_subtrees = 0;
1003 }
1004 }
1005
1006 /* Update the TREE_BLOCK for the cloned expr. */
1007 if (EXPR_P (*tp))
1008 {
1009 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1010 tree old_block = TREE_BLOCK (*tp);
1011 if (old_block)
1012 {
1013 tree *n;
1014 n = id->decl_map->get (TREE_BLOCK (*tp));
1015 if (n)
1016 new_block = *n;
1017 }
1018 TREE_SET_BLOCK (*tp, new_block);
1019 }
1020
1021 /* Keep iterating. */
1022 return NULL_TREE;
1023 }
1024
1025
1026 /* Called from copy_body_id via walk_tree. DATA is really a
1027 `copy_body_data *'. */
1028
1029 tree
1030 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1031 {
1032 copy_body_data *id = (copy_body_data *) data;
1033 tree fn = id->src_fn;
1034 tree new_block;
1035
1036 /* Begin by recognizing trees that we'll completely rewrite for the
1037 inlining context. Our output for these trees is completely
1038 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1039 into an edge). Further down, we'll handle trees that get
1040 duplicated and/or tweaked. */
1041
1042 /* When requested, RETURN_EXPRs should be transformed to just the
1043 contained MODIFY_EXPR. The branch semantics of the return will
1044 be handled elsewhere by manipulating the CFG rather than a statement. */
1045 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1046 {
1047 tree assignment = TREE_OPERAND (*tp, 0);
1048
1049 /* If we're returning something, just turn that into an
1050 assignment into the equivalent of the original RESULT_DECL.
1051 If the "assignment" is just the result decl, the result
1052 decl has already been set (e.g. a recent "foo (&result_decl,
1053 ...)"); just toss the entire RETURN_EXPR. */
1054 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1055 {
1056 /* Replace the RETURN_EXPR with (a copy of) the
1057 MODIFY_EXPR hanging underneath. */
1058 *tp = copy_node (assignment);
1059 }
1060 else /* Else the RETURN_EXPR returns no value. */
1061 {
1062 *tp = NULL;
1063 return (tree) (void *)1;
1064 }
1065 }
1066 else if (TREE_CODE (*tp) == SSA_NAME)
1067 {
1068 *tp = remap_ssa_name (*tp, id);
1069 *walk_subtrees = 0;
1070 return NULL;
1071 }
1072
1073 /* Local variables and labels need to be replaced by equivalent
1074 variables. We don't want to copy static variables; there's only
1075 one of those, no matter how many times we inline the containing
1076 function. Similarly for globals from an outer function. */
1077 else if (auto_var_in_fn_p (*tp, fn))
1078 {
1079 tree new_decl;
1080
1081 /* Remap the declaration. */
1082 new_decl = remap_decl (*tp, id);
1083 gcc_assert (new_decl);
1084 /* Replace this variable with the copy. */
1085 STRIP_TYPE_NOPS (new_decl);
1086 *tp = new_decl;
1087 *walk_subtrees = 0;
1088 }
1089 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1090 copy_statement_list (tp);
1091 else if (TREE_CODE (*tp) == SAVE_EXPR
1092 || TREE_CODE (*tp) == TARGET_EXPR)
1093 remap_save_expr (tp, id->decl_map, walk_subtrees);
1094 else if (TREE_CODE (*tp) == LABEL_DECL
1095 && (! DECL_CONTEXT (*tp)
1096 || decl_function_context (*tp) == id->src_fn))
1097 /* These may need to be remapped for EH handling. */
1098 *tp = remap_decl (*tp, id);
1099 else if (TREE_CODE (*tp) == BIND_EXPR)
1100 copy_bind_expr (tp, walk_subtrees, id);
1101 /* Types may need remapping as well. */
1102 else if (TYPE_P (*tp))
1103 *tp = remap_type (*tp, id);
1104
1105 /* If this is a constant, we have to copy the node iff the type will be
1106 remapped. copy_tree_r will not copy a constant. */
1107 else if (CONSTANT_CLASS_P (*tp))
1108 {
1109 tree new_type = remap_type (TREE_TYPE (*tp), id);
1110
1111 if (new_type == TREE_TYPE (*tp))
1112 *walk_subtrees = 0;
1113
1114 else if (TREE_CODE (*tp) == INTEGER_CST)
1115 *tp = wide_int_to_tree (new_type, *tp);
1116 else
1117 {
1118 *tp = copy_node (*tp);
1119 TREE_TYPE (*tp) = new_type;
1120 }
1121 }
1122
1123 /* Otherwise, just copy the node. Note that copy_tree_r already
1124 knows not to copy VAR_DECLs, etc., so this is safe. */
1125 else
1126 {
1127 /* Here we handle trees that are not completely rewritten.
1128 First we detect some inlining-induced bogosities for
1129 discarding. */
1130 if (TREE_CODE (*tp) == MODIFY_EXPR
1131 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1132 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1133 {
1134 /* Some assignments VAR = VAR; don't generate any rtl code
1135 and thus don't count as variable modification. Avoid
1136 keeping bogosities like 0 = 0. */
1137 tree decl = TREE_OPERAND (*tp, 0), value;
1138 tree *n;
1139
1140 n = id->decl_map->get (decl);
1141 if (n)
1142 {
1143 value = *n;
1144 STRIP_TYPE_NOPS (value);
1145 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1146 {
1147 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1148 return copy_tree_body_r (tp, walk_subtrees, data);
1149 }
1150 }
1151 }
1152 else if (TREE_CODE (*tp) == INDIRECT_REF)
1153 {
1154 /* Get rid of *& from inline substitutions that can happen when a
1155 pointer argument is an ADDR_EXPR. */
1156 tree decl = TREE_OPERAND (*tp, 0);
1157 tree *n = id->decl_map->get (decl);
1158 if (n)
1159 {
1160 /* If we happen to get an ADDR_EXPR in n->value, strip
1161 it manually here as we'll eventually get ADDR_EXPRs
1162 which lie about their types pointed to. In this case
1163 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1164 but we absolutely rely on that. As fold_indirect_ref
1165 does other useful transformations, try that first, though. */
1166 tree type = TREE_TYPE (*tp);
1167 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1168 tree old = *tp;
1169 *tp = gimple_fold_indirect_ref (ptr);
1170 if (! *tp)
1171 {
1172 if (TREE_CODE (ptr) == ADDR_EXPR)
1173 {
1174 *tp
1175 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1176 /* ??? We should either assert here or build
1177 a VIEW_CONVERT_EXPR instead of blindly leaking
1178 incompatible types to our IL. */
1179 if (! *tp)
1180 *tp = TREE_OPERAND (ptr, 0);
1181 }
1182 else
1183 {
1184 *tp = build1 (INDIRECT_REF, type, ptr);
1185 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1186 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1187 TREE_READONLY (*tp) = TREE_READONLY (old);
1188 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1189 have remapped a parameter as the property might be
1190 valid only for the parameter itself. */
1191 if (TREE_THIS_NOTRAP (old)
1192 && (!is_parm (TREE_OPERAND (old, 0))
1193 || (!id->transform_parameter && is_parm (ptr))))
1194 TREE_THIS_NOTRAP (*tp) = 1;
1195 }
1196 }
1197 *walk_subtrees = 0;
1198 return NULL;
1199 }
1200 }
1201 else if (TREE_CODE (*tp) == MEM_REF)
1202 {
1203 /* We need to re-canonicalize MEM_REFs from inline substitutions
1204 that can happen when a pointer argument is an ADDR_EXPR.
1205 Recurse here manually to allow that. */
1206 tree ptr = TREE_OPERAND (*tp, 0);
1207 tree type = remap_type (TREE_TYPE (*tp), id);
1208 tree old = *tp;
1209 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1210 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1211 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1212 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1213 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1214 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1215 {
1216 MR_DEPENDENCE_CLIQUE (*tp)
1217 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1218 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1219 }
1220 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1221 remapped a parameter as the property might be valid only
1222 for the parameter itself. */
1223 if (TREE_THIS_NOTRAP (old)
1224 && (!is_parm (TREE_OPERAND (old, 0))
1225 || (!id->transform_parameter && is_parm (ptr))))
1226 TREE_THIS_NOTRAP (*tp) = 1;
1227 *walk_subtrees = 0;
1228 return NULL;
1229 }
1230
1231 /* Here is the "usual case". Copy this tree node, and then
1232 tweak some special cases. */
1233 copy_tree_r (tp, walk_subtrees, NULL);
1234
1235 /* If EXPR has block defined, map it to newly constructed block.
1236 When inlining we want EXPRs without block appear in the block
1237 of function call if we are not remapping a type. */
1238 if (EXPR_P (*tp))
1239 {
1240 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1241 if (TREE_BLOCK (*tp))
1242 {
1243 tree *n;
1244 n = id->decl_map->get (TREE_BLOCK (*tp));
1245 if (n)
1246 new_block = *n;
1247 }
1248 TREE_SET_BLOCK (*tp, new_block);
1249 }
1250
1251 if (TREE_CODE (*tp) != OMP_CLAUSE)
1252 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1253
1254 /* The copied TARGET_EXPR has never been expanded, even if the
1255 original node was expanded already. */
1256 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1257 {
1258 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1259 TREE_OPERAND (*tp, 3) = NULL_TREE;
1260 }
1261
1262 /* Variable substitution need not be simple. In particular, the
1263 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1264 and friends are up-to-date. */
1265 else if (TREE_CODE (*tp) == ADDR_EXPR)
1266 {
1267 int invariant = is_gimple_min_invariant (*tp);
1268 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1269
1270 /* Handle the case where we substituted an INDIRECT_REF
1271 into the operand of the ADDR_EXPR. */
1272 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1273 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1274 else
1275 recompute_tree_invariant_for_addr_expr (*tp);
1276
1277 /* If this used to be invariant, but is not any longer,
1278 then regimplification is probably needed. */
1279 if (invariant && !is_gimple_min_invariant (*tp))
1280 id->regimplify = true;
1281
1282 *walk_subtrees = 0;
1283 }
1284 }
1285
1286 /* Keep iterating. */
1287 return NULL_TREE;
1288 }
1289
1290 /* Helper for remap_gimple_stmt. Given an EH region number for the
1291 source function, map that to the duplicate EH region number in
1292 the destination function. */
1293
1294 static int
1295 remap_eh_region_nr (int old_nr, copy_body_data *id)
1296 {
1297 eh_region old_r, new_r;
1298
1299 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1300 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1301
1302 return new_r->index;
1303 }
1304
1305 /* Similar, but operate on INTEGER_CSTs. */
1306
1307 static tree
1308 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1309 {
1310 int old_nr, new_nr;
1311
1312 old_nr = tree_to_shwi (old_t_nr);
1313 new_nr = remap_eh_region_nr (old_nr, id);
1314
1315 return build_int_cst (integer_type_node, new_nr);
1316 }
1317
1318 /* Helper for copy_bb. Remap statement STMT using the inlining
1319 information in ID. Return the new statement copy. */
1320
1321 static gimple_seq
1322 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1323 {
1324 gimple copy = NULL;
1325 struct walk_stmt_info wi;
1326 bool skip_first = false;
1327 gimple_seq stmts = NULL;
1328
1329 if (is_gimple_debug (stmt)
1330 && !opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
1331 return stmts;
1332
1333 /* Begin by recognizing trees that we'll completely rewrite for the
1334 inlining context. Our output for these trees is completely
1335 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1336 into an edge). Further down, we'll handle trees that get
1337 duplicated and/or tweaked. */
1338
1339 /* When requested, GIMPLE_RETURNs should be transformed to just the
1340 contained GIMPLE_ASSIGN. The branch semantics of the return will
1341 be handled elsewhere by manipulating the CFG rather than the
1342 statement. */
1343 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1344 {
1345 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1346 tree retbnd = gimple_return_retbnd (stmt);
1347 tree bndslot = id->retbnd;
1348
1349 if (retbnd && bndslot)
1350 {
1351 gimple bndcopy = gimple_build_assign (bndslot, retbnd);
1352 memset (&wi, 0, sizeof (wi));
1353 wi.info = id;
1354 walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1355 gimple_seq_add_stmt (&stmts, bndcopy);
1356 }
1357
1358 /* If we're returning something, just turn that into an
1359 assignment into the equivalent of the original RESULT_DECL.
1360 If RETVAL is just the result decl, the result decl has
1361 already been set (e.g. a recent "foo (&result_decl, ...)");
1362 just toss the entire GIMPLE_RETURN. */
1363 if (retval
1364 && (TREE_CODE (retval) != RESULT_DECL
1365 && (TREE_CODE (retval) != SSA_NAME
1366 || ! SSA_NAME_VAR (retval)
1367 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1368 {
1369 copy = gimple_build_assign (id->do_not_unshare
1370 ? id->retvar : unshare_expr (id->retvar),
1371 retval);
1372 /* id->retvar is already substituted. Skip it on later remapping. */
1373 skip_first = true;
1374
1375 /* We need to copy bounds if return structure with pointers into
1376 instrumented function. */
1377 if (chkp_function_instrumented_p (id->dst_fn)
1378 && !bndslot
1379 && !BOUNDED_P (id->retvar)
1380 && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1381 id->assign_stmts.safe_push (copy);
1382
1383 }
1384 else
1385 return stmts;
1386 }
1387 else if (gimple_has_substatements (stmt))
1388 {
1389 gimple_seq s1, s2;
1390
1391 /* When cloning bodies from the C++ front end, we will be handed bodies
1392 in High GIMPLE form. Handle here all the High GIMPLE statements that
1393 have embedded statements. */
1394 switch (gimple_code (stmt))
1395 {
1396 case GIMPLE_BIND:
1397 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1398 break;
1399
1400 case GIMPLE_CATCH:
1401 {
1402 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1403 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1404 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1405 }
1406 break;
1407
1408 case GIMPLE_EH_FILTER:
1409 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1410 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1411 break;
1412
1413 case GIMPLE_TRY:
1414 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1415 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1416 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1417 break;
1418
1419 case GIMPLE_WITH_CLEANUP_EXPR:
1420 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1421 copy = gimple_build_wce (s1);
1422 break;
1423
1424 case GIMPLE_OMP_PARALLEL:
1425 {
1426 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1427 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1428 copy = gimple_build_omp_parallel
1429 (s1,
1430 gimple_omp_parallel_clauses (omp_par_stmt),
1431 gimple_omp_parallel_child_fn (omp_par_stmt),
1432 gimple_omp_parallel_data_arg (omp_par_stmt));
1433 }
1434 break;
1435
1436 case GIMPLE_OMP_TASK:
1437 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1438 copy = gimple_build_omp_task
1439 (s1,
1440 gimple_omp_task_clauses (stmt),
1441 gimple_omp_task_child_fn (stmt),
1442 gimple_omp_task_data_arg (stmt),
1443 gimple_omp_task_copy_fn (stmt),
1444 gimple_omp_task_arg_size (stmt),
1445 gimple_omp_task_arg_align (stmt));
1446 break;
1447
1448 case GIMPLE_OMP_FOR:
1449 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1450 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1451 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1452 gimple_omp_for_clauses (stmt),
1453 gimple_omp_for_collapse (stmt), s2);
1454 {
1455 size_t i;
1456 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1457 {
1458 gimple_omp_for_set_index (copy, i,
1459 gimple_omp_for_index (stmt, i));
1460 gimple_omp_for_set_initial (copy, i,
1461 gimple_omp_for_initial (stmt, i));
1462 gimple_omp_for_set_final (copy, i,
1463 gimple_omp_for_final (stmt, i));
1464 gimple_omp_for_set_incr (copy, i,
1465 gimple_omp_for_incr (stmt, i));
1466 gimple_omp_for_set_cond (copy, i,
1467 gimple_omp_for_cond (stmt, i));
1468 }
1469 }
1470 break;
1471
1472 case GIMPLE_OMP_MASTER:
1473 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1474 copy = gimple_build_omp_master (s1);
1475 break;
1476
1477 case GIMPLE_OMP_TASKGROUP:
1478 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1479 copy = gimple_build_omp_taskgroup (s1);
1480 break;
1481
1482 case GIMPLE_OMP_ORDERED:
1483 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1484 copy = gimple_build_omp_ordered (s1);
1485 break;
1486
1487 case GIMPLE_OMP_SECTION:
1488 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1489 copy = gimple_build_omp_section (s1);
1490 break;
1491
1492 case GIMPLE_OMP_SECTIONS:
1493 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1494 copy = gimple_build_omp_sections
1495 (s1, gimple_omp_sections_clauses (stmt));
1496 break;
1497
1498 case GIMPLE_OMP_SINGLE:
1499 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1500 copy = gimple_build_omp_single
1501 (s1, gimple_omp_single_clauses (stmt));
1502 break;
1503
1504 case GIMPLE_OMP_TARGET:
1505 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1506 copy = gimple_build_omp_target
1507 (s1, gimple_omp_target_kind (stmt),
1508 gimple_omp_target_clauses (stmt));
1509 break;
1510
1511 case GIMPLE_OMP_TEAMS:
1512 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1513 copy = gimple_build_omp_teams
1514 (s1, gimple_omp_teams_clauses (stmt));
1515 break;
1516
1517 case GIMPLE_OMP_CRITICAL:
1518 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1519 copy = gimple_build_omp_critical (s1,
1520 gimple_omp_critical_name (
1521 as_a <gomp_critical *> (stmt)));
1522 break;
1523
1524 case GIMPLE_TRANSACTION:
1525 {
1526 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1527 gtransaction *new_trans_stmt;
1528 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1529 id);
1530 copy = new_trans_stmt
1531 = gimple_build_transaction (
1532 s1,
1533 gimple_transaction_label (old_trans_stmt));
1534 gimple_transaction_set_subcode (
1535 new_trans_stmt,
1536 gimple_transaction_subcode (old_trans_stmt));
1537 }
1538 break;
1539
1540 default:
1541 gcc_unreachable ();
1542 }
1543 }
1544 else
1545 {
1546 if (gimple_assign_copy_p (stmt)
1547 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1548 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1549 {
1550 /* Here we handle statements that are not completely rewritten.
1551 First we detect some inlining-induced bogosities for
1552 discarding. */
1553
1554 /* Some assignments VAR = VAR; don't generate any rtl code
1555 and thus don't count as variable modification. Avoid
1556 keeping bogosities like 0 = 0. */
1557 tree decl = gimple_assign_lhs (stmt), value;
1558 tree *n;
1559
1560 n = id->decl_map->get (decl);
1561 if (n)
1562 {
1563 value = *n;
1564 STRIP_TYPE_NOPS (value);
1565 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1566 return NULL;
1567 }
1568 }
1569
1570 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1571 in a block that we aren't copying during tree_function_versioning,
1572 just drop the clobber stmt. */
1573 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1574 {
1575 tree lhs = gimple_assign_lhs (stmt);
1576 if (TREE_CODE (lhs) == MEM_REF
1577 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1578 {
1579 gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1580 if (gimple_bb (def_stmt)
1581 && !bitmap_bit_p (id->blocks_to_copy,
1582 gimple_bb (def_stmt)->index))
1583 return NULL;
1584 }
1585 }
1586
1587 if (gimple_debug_bind_p (stmt))
1588 {
1589 gdebug *copy
1590 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1591 gimple_debug_bind_get_value (stmt),
1592 stmt);
1593 id->debug_stmts.safe_push (copy);
1594 gimple_seq_add_stmt (&stmts, copy);
1595 return stmts;
1596 }
1597 if (gimple_debug_source_bind_p (stmt))
1598 {
1599 gdebug *copy = gimple_build_debug_source_bind
1600 (gimple_debug_source_bind_get_var (stmt),
1601 gimple_debug_source_bind_get_value (stmt),
1602 stmt);
1603 id->debug_stmts.safe_push (copy);
1604 gimple_seq_add_stmt (&stmts, copy);
1605 return stmts;
1606 }
1607
1608 /* Create a new deep copy of the statement. */
1609 copy = gimple_copy (stmt);
1610
1611 /* Clear flags that need revisiting. */
1612 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1613 {
1614 if (gimple_call_tail_p (call_stmt))
1615 gimple_call_set_tail (call_stmt, false);
1616 if (gimple_call_from_thunk_p (call_stmt))
1617 gimple_call_set_from_thunk (call_stmt, false);
1618 }
1619
1620 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1621 RESX and EH_DISPATCH. */
1622 if (id->eh_map)
1623 switch (gimple_code (copy))
1624 {
1625 case GIMPLE_CALL:
1626 {
1627 tree r, fndecl = gimple_call_fndecl (copy);
1628 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1629 switch (DECL_FUNCTION_CODE (fndecl))
1630 {
1631 case BUILT_IN_EH_COPY_VALUES:
1632 r = gimple_call_arg (copy, 1);
1633 r = remap_eh_region_tree_nr (r, id);
1634 gimple_call_set_arg (copy, 1, r);
1635 /* FALLTHRU */
1636
1637 case BUILT_IN_EH_POINTER:
1638 case BUILT_IN_EH_FILTER:
1639 r = gimple_call_arg (copy, 0);
1640 r = remap_eh_region_tree_nr (r, id);
1641 gimple_call_set_arg (copy, 0, r);
1642 break;
1643
1644 default:
1645 break;
1646 }
1647
1648 /* Reset alias info if we didn't apply measures to
1649 keep it valid over inlining by setting DECL_PT_UID. */
1650 if (!id->src_cfun->gimple_df
1651 || !id->src_cfun->gimple_df->ipa_pta)
1652 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1653 }
1654 break;
1655
1656 case GIMPLE_RESX:
1657 {
1658 gresx *resx_stmt = as_a <gresx *> (copy);
1659 int r = gimple_resx_region (resx_stmt);
1660 r = remap_eh_region_nr (r, id);
1661 gimple_resx_set_region (resx_stmt, r);
1662 }
1663 break;
1664
1665 case GIMPLE_EH_DISPATCH:
1666 {
1667 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1668 int r = gimple_eh_dispatch_region (eh_dispatch);
1669 r = remap_eh_region_nr (r, id);
1670 gimple_eh_dispatch_set_region (eh_dispatch, r);
1671 }
1672 break;
1673
1674 default:
1675 break;
1676 }
1677 }
1678
1679 /* If STMT has a block defined, map it to the newly constructed
1680 block. */
1681 if (gimple_block (copy))
1682 {
1683 tree *n;
1684 n = id->decl_map->get (gimple_block (copy));
1685 gcc_assert (n);
1686 gimple_set_block (copy, *n);
1687 }
1688
1689 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1690 {
1691 gimple_seq_add_stmt (&stmts, copy);
1692 return stmts;
1693 }
1694
1695 /* Remap all the operands in COPY. */
1696 memset (&wi, 0, sizeof (wi));
1697 wi.info = id;
1698 if (skip_first)
1699 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1700 else
1701 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1702
1703 /* Clear the copied virtual operands. We are not remapping them here
1704 but are going to recreate them from scratch. */
1705 if (gimple_has_mem_ops (copy))
1706 {
1707 gimple_set_vdef (copy, NULL_TREE);
1708 gimple_set_vuse (copy, NULL_TREE);
1709 }
1710
1711 gimple_seq_add_stmt (&stmts, copy);
1712 return stmts;
1713 }
1714
1715
1716 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1717 later */
1718
1719 static basic_block
1720 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1721 gcov_type count_scale)
1722 {
1723 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1724 basic_block copy_basic_block;
1725 tree decl;
1726 gcov_type freq;
1727 basic_block prev;
1728
1729 /* Search for previous copied basic block. */
1730 prev = bb->prev_bb;
1731 while (!prev->aux)
1732 prev = prev->prev_bb;
1733
1734 /* create_basic_block() will append every new block to
1735 basic_block_info automatically. */
1736 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1737 copy_basic_block->count = apply_scale (bb->count, count_scale);
1738
1739 /* We are going to rebuild frequencies from scratch. These values
1740 have just small importance to drive canonicalize_loop_headers. */
1741 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1742
1743 /* We recompute frequencies after inlining, so this is quite safe. */
1744 if (freq > BB_FREQ_MAX)
1745 freq = BB_FREQ_MAX;
1746 copy_basic_block->frequency = freq;
1747
1748 copy_gsi = gsi_start_bb (copy_basic_block);
1749
1750 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1751 {
1752 gimple_seq stmts;
1753 gimple stmt = gsi_stmt (gsi);
1754 gimple orig_stmt = stmt;
1755 gimple_stmt_iterator stmts_gsi;
1756 bool stmt_added = false;
1757
1758 id->regimplify = false;
1759 stmts = remap_gimple_stmt (stmt, id);
1760
1761 if (gimple_seq_empty_p (stmts))
1762 continue;
1763
1764 seq_gsi = copy_gsi;
1765
1766 for (stmts_gsi = gsi_start (stmts);
1767 !gsi_end_p (stmts_gsi); )
1768 {
1769 stmt = gsi_stmt (stmts_gsi);
1770
1771 /* Advance iterator now before stmt is moved to seq_gsi. */
1772 gsi_next (&stmts_gsi);
1773
1774 if (gimple_nop_p (stmt))
1775 continue;
1776
1777 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1778 orig_stmt);
1779
1780 /* With return slot optimization we can end up with
1781 non-gimple (foo *)&this->m, fix that here. */
1782 if (is_gimple_assign (stmt)
1783 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1784 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1785 {
1786 tree new_rhs;
1787 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1788 gimple_assign_rhs1 (stmt),
1789 true, NULL, false,
1790 GSI_CONTINUE_LINKING);
1791 gimple_assign_set_rhs1 (stmt, new_rhs);
1792 id->regimplify = false;
1793 }
1794
1795 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1796
1797 if (id->regimplify)
1798 gimple_regimplify_operands (stmt, &seq_gsi);
1799
1800 stmt_added = true;
1801 }
1802
1803 if (!stmt_added)
1804 continue;
1805
1806 /* If copy_basic_block has been empty at the start of this iteration,
1807 call gsi_start_bb again to get at the newly added statements. */
1808 if (gsi_end_p (copy_gsi))
1809 copy_gsi = gsi_start_bb (copy_basic_block);
1810 else
1811 gsi_next (&copy_gsi);
1812
1813 /* Process the new statement. The call to gimple_regimplify_operands
1814 possibly turned the statement into multiple statements, we
1815 need to process all of them. */
1816 do
1817 {
1818 tree fn;
1819 gcall *call_stmt;
1820
1821 stmt = gsi_stmt (copy_gsi);
1822 call_stmt = dyn_cast <gcall *> (stmt);
1823 if (call_stmt
1824 && gimple_call_va_arg_pack_p (call_stmt)
1825 && id->call_stmt)
1826 {
1827 /* __builtin_va_arg_pack () should be replaced by
1828 all arguments corresponding to ... in the caller. */
1829 tree p;
1830 gcall *new_call;
1831 vec<tree> argarray;
1832 size_t nargs = gimple_call_num_args (id->call_stmt);
1833 size_t n, i, nargs_to_copy;
1834 bool remove_bounds = false;
1835
1836 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1837 nargs--;
1838
1839 /* Bounds should be removed from arg pack in case
1840 we handle not instrumented call in instrumented
1841 function. */
1842 nargs_to_copy = nargs;
1843 if (gimple_call_with_bounds_p (id->call_stmt)
1844 && !gimple_call_with_bounds_p (stmt))
1845 {
1846 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1847 i < gimple_call_num_args (id->call_stmt);
1848 i++)
1849 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1850 nargs_to_copy--;
1851 remove_bounds = true;
1852 }
1853
1854 /* Create the new array of arguments. */
1855 n = nargs_to_copy + gimple_call_num_args (call_stmt);
1856 argarray.create (n);
1857 argarray.safe_grow_cleared (n);
1858
1859 /* Copy all the arguments before '...' */
1860 memcpy (argarray.address (),
1861 gimple_call_arg_ptr (call_stmt, 0),
1862 gimple_call_num_args (call_stmt) * sizeof (tree));
1863
1864 if (remove_bounds)
1865 {
1866 /* Append the rest of arguments removing bounds. */
1867 unsigned cur = gimple_call_num_args (call_stmt);
1868 i = gimple_call_num_args (id->call_stmt) - nargs;
1869 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1870 i < gimple_call_num_args (id->call_stmt);
1871 i++)
1872 if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1873 argarray[cur++] = gimple_call_arg (id->call_stmt, i);
1874 gcc_assert (cur == n);
1875 }
1876 else
1877 {
1878 /* Append the arguments passed in '...' */
1879 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1880 gimple_call_arg_ptr (id->call_stmt, 0)
1881 + (gimple_call_num_args (id->call_stmt) - nargs),
1882 nargs * sizeof (tree));
1883 }
1884
1885 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1886 argarray);
1887
1888 argarray.release ();
1889
1890 /* Copy all GIMPLE_CALL flags, location and block, except
1891 GF_CALL_VA_ARG_PACK. */
1892 gimple_call_copy_flags (new_call, call_stmt);
1893 gimple_call_set_va_arg_pack (new_call, false);
1894 gimple_set_location (new_call, gimple_location (stmt));
1895 gimple_set_block (new_call, gimple_block (stmt));
1896 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1897
1898 gsi_replace (&copy_gsi, new_call, false);
1899 stmt = new_call;
1900 }
1901 else if (call_stmt
1902 && id->call_stmt
1903 && (decl = gimple_call_fndecl (stmt))
1904 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1905 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1906 {
1907 /* __builtin_va_arg_pack_len () should be replaced by
1908 the number of anonymous arguments. */
1909 size_t nargs = gimple_call_num_args (id->call_stmt), i;
1910 tree count, p;
1911 gimple new_stmt;
1912
1913 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1914 nargs--;
1915
1916 /* For instrumented calls we should ignore bounds. */
1917 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1918 i < gimple_call_num_args (id->call_stmt);
1919 i++)
1920 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1921 nargs--;
1922
1923 count = build_int_cst (integer_type_node, nargs);
1924 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1925 gsi_replace (&copy_gsi, new_stmt, false);
1926 stmt = new_stmt;
1927 }
1928 else if (call_stmt
1929 && id->call_stmt
1930 && gimple_call_internal_p (stmt)
1931 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1932 {
1933 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1934 gsi_remove (&copy_gsi, false);
1935 continue;
1936 }
1937
1938 /* Statements produced by inlining can be unfolded, especially
1939 when we constant propagated some operands. We can't fold
1940 them right now for two reasons:
1941 1) folding require SSA_NAME_DEF_STMTs to be correct
1942 2) we can't change function calls to builtins.
1943 So we just mark statement for later folding. We mark
1944 all new statements, instead just statements that has changed
1945 by some nontrivial substitution so even statements made
1946 foldable indirectly are updated. If this turns out to be
1947 expensive, copy_body can be told to watch for nontrivial
1948 changes. */
1949 if (id->statements_to_fold)
1950 id->statements_to_fold->add (stmt);
1951
1952 /* We're duplicating a CALL_EXPR. Find any corresponding
1953 callgraph edges and update or duplicate them. */
1954 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
1955 {
1956 struct cgraph_edge *edge;
1957
1958 switch (id->transform_call_graph_edges)
1959 {
1960 case CB_CGE_DUPLICATE:
1961 edge = id->src_node->get_edge (orig_stmt);
1962 if (edge)
1963 {
1964 int edge_freq = edge->frequency;
1965 int new_freq;
1966 struct cgraph_edge *old_edge = edge;
1967 edge = edge->clone (id->dst_node, call_stmt,
1968 gimple_uid (stmt),
1969 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1970 true);
1971 /* We could also just rescale the frequency, but
1972 doing so would introduce roundoff errors and make
1973 verifier unhappy. */
1974 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
1975 copy_basic_block);
1976
1977 /* Speculative calls consist of two edges - direct and indirect.
1978 Duplicate the whole thing and distribute frequencies accordingly. */
1979 if (edge->speculative)
1980 {
1981 struct cgraph_edge *direct, *indirect;
1982 struct ipa_ref *ref;
1983
1984 gcc_assert (!edge->indirect_unknown_callee);
1985 old_edge->speculative_call_info (direct, indirect, ref);
1986 indirect = indirect->clone (id->dst_node, call_stmt,
1987 gimple_uid (stmt),
1988 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1989 true);
1990 if (old_edge->frequency + indirect->frequency)
1991 {
1992 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
1993 (old_edge->frequency + indirect->frequency)),
1994 CGRAPH_FREQ_MAX);
1995 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
1996 (old_edge->frequency + indirect->frequency)),
1997 CGRAPH_FREQ_MAX);
1998 }
1999 id->dst_node->clone_reference (ref, stmt);
2000 }
2001 else
2002 {
2003 edge->frequency = new_freq;
2004 if (dump_file
2005 && profile_status_for_fn (cfun) != PROFILE_ABSENT
2006 && (edge_freq > edge->frequency + 10
2007 || edge_freq < edge->frequency - 10))
2008 {
2009 fprintf (dump_file, "Edge frequency estimated by "
2010 "cgraph %i diverge from inliner's estimate %i\n",
2011 edge_freq,
2012 edge->frequency);
2013 fprintf (dump_file,
2014 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
2015 bb->index,
2016 bb->frequency,
2017 copy_basic_block->frequency);
2018 }
2019 }
2020 }
2021 break;
2022
2023 case CB_CGE_MOVE_CLONES:
2024 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2025 call_stmt);
2026 edge = id->dst_node->get_edge (stmt);
2027 break;
2028
2029 case CB_CGE_MOVE:
2030 edge = id->dst_node->get_edge (orig_stmt);
2031 if (edge)
2032 edge->set_call_stmt (call_stmt);
2033 break;
2034
2035 default:
2036 gcc_unreachable ();
2037 }
2038
2039 /* Constant propagation on argument done during inlining
2040 may create new direct call. Produce an edge for it. */
2041 if ((!edge
2042 || (edge->indirect_inlining_edge
2043 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2044 && id->dst_node->definition
2045 && (fn = gimple_call_fndecl (stmt)) != NULL)
2046 {
2047 struct cgraph_node *dest = cgraph_node::get (fn);
2048
2049 /* We have missing edge in the callgraph. This can happen
2050 when previous inlining turned an indirect call into a
2051 direct call by constant propagating arguments or we are
2052 producing dead clone (for further cloning). In all
2053 other cases we hit a bug (incorrect node sharing is the
2054 most common reason for missing edges). */
2055 gcc_assert (!dest->definition
2056 || dest->address_taken
2057 || !id->src_node->definition
2058 || !id->dst_node->definition);
2059 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2060 id->dst_node->create_edge_including_clones
2061 (dest, orig_stmt, call_stmt, bb->count,
2062 compute_call_stmt_bb_frequency (id->dst_node->decl,
2063 copy_basic_block),
2064 CIF_ORIGINALLY_INDIRECT_CALL);
2065 else
2066 id->dst_node->create_edge (dest, call_stmt,
2067 bb->count,
2068 compute_call_stmt_bb_frequency
2069 (id->dst_node->decl,
2070 copy_basic_block))->inline_failed
2071 = CIF_ORIGINALLY_INDIRECT_CALL;
2072 if (dump_file)
2073 {
2074 fprintf (dump_file, "Created new direct edge to %s\n",
2075 dest->name ());
2076 }
2077 }
2078
2079 notice_special_calls (as_a <gcall *> (stmt));
2080 }
2081
2082 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2083 id->eh_map, id->eh_lp_nr);
2084
2085 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
2086 {
2087 ssa_op_iter i;
2088 tree def;
2089
2090 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
2091 if (TREE_CODE (def) == SSA_NAME)
2092 SSA_NAME_DEF_STMT (def) = stmt;
2093 }
2094
2095 gsi_next (&copy_gsi);
2096 }
2097 while (!gsi_end_p (copy_gsi));
2098
2099 copy_gsi = gsi_last_bb (copy_basic_block);
2100 }
2101
2102 return copy_basic_block;
2103 }
2104
2105 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2106 form is quite easy, since dominator relationship for old basic blocks does
2107 not change.
2108
2109 There is however exception where inlining might change dominator relation
2110 across EH edges from basic block within inlined functions destinating
2111 to landing pads in function we inline into.
2112
2113 The function fills in PHI_RESULTs of such PHI nodes if they refer
2114 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2115 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2116 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2117 set, and this means that there will be no overlapping live ranges
2118 for the underlying symbol.
2119
2120 This might change in future if we allow redirecting of EH edges and
2121 we might want to change way build CFG pre-inlining to include
2122 all the possible edges then. */
2123 static void
2124 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2125 bool can_throw, bool nonlocal_goto)
2126 {
2127 edge e;
2128 edge_iterator ei;
2129
2130 FOR_EACH_EDGE (e, ei, bb->succs)
2131 if (!e->dest->aux
2132 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2133 {
2134 gphi *phi;
2135 gphi_iterator si;
2136
2137 if (!nonlocal_goto)
2138 gcc_assert (e->flags & EDGE_EH);
2139
2140 if (!can_throw)
2141 gcc_assert (!(e->flags & EDGE_EH));
2142
2143 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2144 {
2145 edge re;
2146
2147 phi = si.phi ();
2148
2149 /* For abnormal goto/call edges the receiver can be the
2150 ENTRY_BLOCK. Do not assert this cannot happen. */
2151
2152 gcc_assert ((e->flags & EDGE_EH)
2153 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2154
2155 re = find_edge (ret_bb, e->dest);
2156 gcc_checking_assert (re);
2157 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2158 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2159
2160 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2161 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2162 }
2163 }
2164 }
2165
2166
2167 /* Copy edges from BB into its copy constructed earlier, scale profile
2168 accordingly. Edges will be taken care of later. Assume aux
2169 pointers to point to the copies of each BB. Return true if any
2170 debug stmts are left after a statement that must end the basic block. */
2171
2172 static bool
2173 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
2174 basic_block abnormal_goto_dest)
2175 {
2176 basic_block new_bb = (basic_block) bb->aux;
2177 edge_iterator ei;
2178 edge old_edge;
2179 gimple_stmt_iterator si;
2180 int flags;
2181 bool need_debug_cleanup = false;
2182
2183 /* Use the indices from the original blocks to create edges for the
2184 new ones. */
2185 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2186 if (!(old_edge->flags & EDGE_EH))
2187 {
2188 edge new_edge;
2189
2190 flags = old_edge->flags;
2191
2192 /* Return edges do get a FALLTHRU flag when the get inlined. */
2193 if (old_edge->dest->index == EXIT_BLOCK
2194 && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2195 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2196 flags |= EDGE_FALLTHRU;
2197 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2198 new_edge->count = apply_scale (old_edge->count, count_scale);
2199 new_edge->probability = old_edge->probability;
2200 }
2201
2202 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2203 return false;
2204
2205 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2206 {
2207 gimple copy_stmt;
2208 bool can_throw, nonlocal_goto;
2209
2210 copy_stmt = gsi_stmt (si);
2211 if (!is_gimple_debug (copy_stmt))
2212 update_stmt (copy_stmt);
2213
2214 /* Do this before the possible split_block. */
2215 gsi_next (&si);
2216
2217 /* If this tree could throw an exception, there are two
2218 cases where we need to add abnormal edge(s): the
2219 tree wasn't in a region and there is a "current
2220 region" in the caller; or the original tree had
2221 EH edges. In both cases split the block after the tree,
2222 and add abnormal edge(s) as needed; we need both
2223 those from the callee and the caller.
2224 We check whether the copy can throw, because the const
2225 propagation can change an INDIRECT_REF which throws
2226 into a COMPONENT_REF which doesn't. If the copy
2227 can throw, the original could also throw. */
2228 can_throw = stmt_can_throw_internal (copy_stmt);
2229 nonlocal_goto
2230 = (stmt_can_make_abnormal_goto (copy_stmt)
2231 && !computed_goto_p (copy_stmt));
2232
2233 if (can_throw || nonlocal_goto)
2234 {
2235 if (!gsi_end_p (si))
2236 {
2237 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2238 gsi_next (&si);
2239 if (gsi_end_p (si))
2240 need_debug_cleanup = true;
2241 }
2242 if (!gsi_end_p (si))
2243 /* Note that bb's predecessor edges aren't necessarily
2244 right at this point; split_block doesn't care. */
2245 {
2246 edge e = split_block (new_bb, copy_stmt);
2247
2248 new_bb = e->dest;
2249 new_bb->aux = e->src->aux;
2250 si = gsi_start_bb (new_bb);
2251 }
2252 }
2253
2254 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2255 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2256 else if (can_throw)
2257 make_eh_edges (copy_stmt);
2258
2259 /* If the call we inline cannot make abnormal goto do not add
2260 additional abnormal edges but only retain those already present
2261 in the original function body. */
2262 if (abnormal_goto_dest == NULL)
2263 nonlocal_goto = false;
2264 if (nonlocal_goto)
2265 {
2266 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2267
2268 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2269 nonlocal_goto = false;
2270 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2271 in OpenMP regions which aren't allowed to be left abnormally.
2272 So, no need to add abnormal edge in that case. */
2273 else if (is_gimple_call (copy_stmt)
2274 && gimple_call_internal_p (copy_stmt)
2275 && (gimple_call_internal_fn (copy_stmt)
2276 == IFN_ABNORMAL_DISPATCHER)
2277 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2278 nonlocal_goto = false;
2279 else
2280 make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2281 }
2282
2283 if ((can_throw || nonlocal_goto)
2284 && gimple_in_ssa_p (cfun))
2285 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2286 can_throw, nonlocal_goto);
2287 }
2288 return need_debug_cleanup;
2289 }
2290
2291 /* Copy the PHIs. All blocks and edges are copied, some blocks
2292 was possibly split and new outgoing EH edges inserted.
2293 BB points to the block of original function and AUX pointers links
2294 the original and newly copied blocks. */
2295
2296 static void
2297 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2298 {
2299 basic_block const new_bb = (basic_block) bb->aux;
2300 edge_iterator ei;
2301 gphi *phi;
2302 gphi_iterator si;
2303 edge new_edge;
2304 bool inserted = false;
2305
2306 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2307 {
2308 tree res, new_res;
2309 gphi *new_phi;
2310
2311 phi = si.phi ();
2312 res = PHI_RESULT (phi);
2313 new_res = res;
2314 if (!virtual_operand_p (res))
2315 {
2316 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2317 new_phi = create_phi_node (new_res, new_bb);
2318 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2319 {
2320 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2321 tree arg;
2322 tree new_arg;
2323 edge_iterator ei2;
2324 location_t locus;
2325
2326 /* When doing partial cloning, we allow PHIs on the entry block
2327 as long as all the arguments are the same. Find any input
2328 edge to see argument to copy. */
2329 if (!old_edge)
2330 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2331 if (!old_edge->src->aux)
2332 break;
2333
2334 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2335 new_arg = arg;
2336 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2337 gcc_assert (new_arg);
2338 /* With return slot optimization we can end up with
2339 non-gimple (foo *)&this->m, fix that here. */
2340 if (TREE_CODE (new_arg) != SSA_NAME
2341 && TREE_CODE (new_arg) != FUNCTION_DECL
2342 && !is_gimple_val (new_arg))
2343 {
2344 gimple_seq stmts = NULL;
2345 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2346 gsi_insert_seq_on_edge (new_edge, stmts);
2347 inserted = true;
2348 }
2349 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2350 if (LOCATION_BLOCK (locus))
2351 {
2352 tree *n;
2353 n = id->decl_map->get (LOCATION_BLOCK (locus));
2354 gcc_assert (n);
2355 if (*n)
2356 locus = COMBINE_LOCATION_DATA (line_table, locus, *n);
2357 else
2358 locus = LOCATION_LOCUS (locus);
2359 }
2360 else
2361 locus = LOCATION_LOCUS (locus);
2362
2363 add_phi_arg (new_phi, new_arg, new_edge, locus);
2364 }
2365 }
2366 }
2367
2368 /* Commit the delayed edge insertions. */
2369 if (inserted)
2370 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2371 gsi_commit_one_edge_insert (new_edge, NULL);
2372 }
2373
2374
2375 /* Wrapper for remap_decl so it can be used as a callback. */
2376
2377 static tree
2378 remap_decl_1 (tree decl, void *data)
2379 {
2380 return remap_decl (decl, (copy_body_data *) data);
2381 }
2382
2383 /* Build struct function and associated datastructures for the new clone
2384 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2385 the cfun to the function of new_fndecl (and current_function_decl too). */
2386
2387 static void
2388 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2389 {
2390 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2391 gcov_type count_scale;
2392
2393 if (!DECL_ARGUMENTS (new_fndecl))
2394 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2395 if (!DECL_RESULT (new_fndecl))
2396 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2397
2398 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2399 count_scale
2400 = GCOV_COMPUTE_SCALE (count,
2401 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2402 else
2403 count_scale = REG_BR_PROB_BASE;
2404
2405 /* Register specific tree functions. */
2406 gimple_register_cfg_hooks ();
2407
2408 /* Get clean struct function. */
2409 push_struct_function (new_fndecl);
2410
2411 /* We will rebuild these, so just sanity check that they are empty. */
2412 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2413 gcc_assert (cfun->local_decls == NULL);
2414 gcc_assert (cfun->cfg == NULL);
2415 gcc_assert (cfun->decl == new_fndecl);
2416
2417 /* Copy items we preserve during cloning. */
2418 cfun->static_chain_decl = src_cfun->static_chain_decl;
2419 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2420 cfun->function_end_locus = src_cfun->function_end_locus;
2421 cfun->curr_properties = src_cfun->curr_properties;
2422 cfun->last_verified = src_cfun->last_verified;
2423 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2424 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2425 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2426 cfun->stdarg = src_cfun->stdarg;
2427 cfun->after_inlining = src_cfun->after_inlining;
2428 cfun->can_throw_non_call_exceptions
2429 = src_cfun->can_throw_non_call_exceptions;
2430 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2431 cfun->returns_struct = src_cfun->returns_struct;
2432 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2433
2434 init_empty_tree_cfg ();
2435
2436 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2437 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2438 (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2439 REG_BR_PROB_BASE);
2440 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2441 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2442 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2443 (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2444 REG_BR_PROB_BASE);
2445 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2446 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2447 if (src_cfun->eh)
2448 init_eh_for_function ();
2449
2450 if (src_cfun->gimple_df)
2451 {
2452 init_tree_ssa (cfun);
2453 cfun->gimple_df->in_ssa_p = true;
2454 init_ssa_operands (cfun);
2455 }
2456 }
2457
2458 /* Helper function for copy_cfg_body. Move debug stmts from the end
2459 of NEW_BB to the beginning of successor basic blocks when needed. If the
2460 successor has multiple predecessors, reset them, otherwise keep
2461 their value. */
2462
2463 static void
2464 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2465 {
2466 edge e;
2467 edge_iterator ei;
2468 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2469
2470 if (gsi_end_p (si)
2471 || gsi_one_before_end_p (si)
2472 || !(stmt_can_throw_internal (gsi_stmt (si))
2473 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2474 return;
2475
2476 FOR_EACH_EDGE (e, ei, new_bb->succs)
2477 {
2478 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2479 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2480 while (is_gimple_debug (gsi_stmt (ssi)))
2481 {
2482 gimple stmt = gsi_stmt (ssi);
2483 gdebug *new_stmt;
2484 tree var;
2485 tree value;
2486
2487 /* For the last edge move the debug stmts instead of copying
2488 them. */
2489 if (ei_one_before_end_p (ei))
2490 {
2491 si = ssi;
2492 gsi_prev (&ssi);
2493 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2494 gimple_debug_bind_reset_value (stmt);
2495 gsi_remove (&si, false);
2496 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2497 continue;
2498 }
2499
2500 if (gimple_debug_bind_p (stmt))
2501 {
2502 var = gimple_debug_bind_get_var (stmt);
2503 if (single_pred_p (e->dest))
2504 {
2505 value = gimple_debug_bind_get_value (stmt);
2506 value = unshare_expr (value);
2507 }
2508 else
2509 value = NULL_TREE;
2510 new_stmt = gimple_build_debug_bind (var, value, stmt);
2511 }
2512 else if (gimple_debug_source_bind_p (stmt))
2513 {
2514 var = gimple_debug_source_bind_get_var (stmt);
2515 value = gimple_debug_source_bind_get_value (stmt);
2516 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2517 }
2518 else
2519 gcc_unreachable ();
2520 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2521 id->debug_stmts.safe_push (new_stmt);
2522 gsi_prev (&ssi);
2523 }
2524 }
2525 }
2526
2527 /* Make a copy of the sub-loops of SRC_PARENT and place them
2528 as siblings of DEST_PARENT. */
2529
2530 static void
2531 copy_loops (copy_body_data *id,
2532 struct loop *dest_parent, struct loop *src_parent)
2533 {
2534 struct loop *src_loop = src_parent->inner;
2535 while (src_loop)
2536 {
2537 if (!id->blocks_to_copy
2538 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2539 {
2540 struct loop *dest_loop = alloc_loop ();
2541
2542 /* Assign the new loop its header and latch and associate
2543 those with the new loop. */
2544 dest_loop->header = (basic_block)src_loop->header->aux;
2545 dest_loop->header->loop_father = dest_loop;
2546 if (src_loop->latch != NULL)
2547 {
2548 dest_loop->latch = (basic_block)src_loop->latch->aux;
2549 dest_loop->latch->loop_father = dest_loop;
2550 }
2551
2552 /* Copy loop meta-data. */
2553 copy_loop_info (src_loop, dest_loop);
2554
2555 /* Finally place it into the loop array and the loop tree. */
2556 place_new_loop (cfun, dest_loop);
2557 flow_loop_tree_node_add (dest_parent, dest_loop);
2558
2559 dest_loop->safelen = src_loop->safelen;
2560 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2561 if (src_loop->force_vectorize)
2562 {
2563 dest_loop->force_vectorize = true;
2564 cfun->has_force_vectorize_loops = true;
2565 }
2566 if (src_loop->simduid)
2567 {
2568 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2569 cfun->has_simduid_loops = true;
2570 }
2571
2572 /* Recurse. */
2573 copy_loops (id, dest_loop, src_loop);
2574 }
2575 src_loop = src_loop->next;
2576 }
2577 }
2578
2579 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2580
2581 void
2582 redirect_all_calls (copy_body_data * id, basic_block bb)
2583 {
2584 gimple_stmt_iterator si;
2585 gimple last = last_stmt (bb);
2586 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2587 {
2588 gimple stmt = gsi_stmt (si);
2589 if (is_gimple_call (stmt))
2590 {
2591 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2592 if (edge)
2593 {
2594 edge->redirect_call_stmt_to_callee ();
2595 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2596 gimple_purge_dead_eh_edges (bb);
2597 }
2598 }
2599 }
2600 }
2601
2602 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2603 with each bb's frequency. Used when NODE has a 0-weight entry
2604 but we are about to inline it into a non-zero count call bb.
2605 See the comments for handle_missing_profiles() in predict.c for
2606 when this can happen for COMDATs. */
2607
2608 void
2609 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2610 {
2611 basic_block bb;
2612 edge_iterator ei;
2613 edge e;
2614 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2615
2616 FOR_ALL_BB_FN(bb, fn)
2617 {
2618 bb->count = apply_scale (count,
2619 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2620 FOR_EACH_EDGE (e, ei, bb->succs)
2621 e->count = apply_probability (e->src->count, e->probability);
2622 }
2623 }
2624
2625 /* Make a copy of the body of FN so that it can be inserted inline in
2626 another function. Walks FN via CFG, returns new fndecl. */
2627
2628 static tree
2629 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2630 basic_block entry_block_map, basic_block exit_block_map,
2631 basic_block new_entry)
2632 {
2633 tree callee_fndecl = id->src_fn;
2634 /* Original cfun for the callee, doesn't change. */
2635 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2636 struct function *cfun_to_copy;
2637 basic_block bb;
2638 tree new_fndecl = NULL;
2639 bool need_debug_cleanup = false;
2640 gcov_type count_scale;
2641 int last;
2642 int incoming_frequency = 0;
2643 gcov_type incoming_count = 0;
2644
2645 /* This can happen for COMDAT routines that end up with 0 counts
2646 despite being called (see the comments for handle_missing_profiles()
2647 in predict.c as to why). Apply counts to the blocks in the callee
2648 before inlining, using the guessed edge frequencies, so that we don't
2649 end up with a 0-count inline body which can confuse downstream
2650 optimizations such as function splitting. */
2651 if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2652 {
2653 /* Apply the larger of the call bb count and the total incoming
2654 call edge count to the callee. */
2655 gcov_type in_count = 0;
2656 struct cgraph_edge *in_edge;
2657 for (in_edge = id->src_node->callers; in_edge;
2658 in_edge = in_edge->next_caller)
2659 in_count += in_edge->count;
2660 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2661 }
2662
2663 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2664 count_scale
2665 = GCOV_COMPUTE_SCALE (count,
2666 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2667 else
2668 count_scale = REG_BR_PROB_BASE;
2669
2670 /* Register specific tree functions. */
2671 gimple_register_cfg_hooks ();
2672
2673 /* If we are inlining just region of the function, make sure to connect
2674 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2675 part of loop, we must compute frequency and probability of
2676 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2677 probabilities of edges incoming from nonduplicated region. */
2678 if (new_entry)
2679 {
2680 edge e;
2681 edge_iterator ei;
2682
2683 FOR_EACH_EDGE (e, ei, new_entry->preds)
2684 if (!e->src->aux)
2685 {
2686 incoming_frequency += EDGE_FREQUENCY (e);
2687 incoming_count += e->count;
2688 }
2689 incoming_count = apply_scale (incoming_count, count_scale);
2690 incoming_frequency
2691 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2692 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2693 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2694 }
2695
2696 /* Must have a CFG here at this point. */
2697 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2698 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2699
2700 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2701
2702 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2703 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2704 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2705 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2706
2707 /* Duplicate any exception-handling regions. */
2708 if (cfun->eh)
2709 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2710 remap_decl_1, id);
2711
2712 /* Use aux pointers to map the original blocks to copy. */
2713 FOR_EACH_BB_FN (bb, cfun_to_copy)
2714 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2715 {
2716 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2717 bb->aux = new_bb;
2718 new_bb->aux = bb;
2719 new_bb->loop_father = entry_block_map->loop_father;
2720 }
2721
2722 last = last_basic_block_for_fn (cfun);
2723
2724 /* Now that we've duplicated the blocks, duplicate their edges. */
2725 basic_block abnormal_goto_dest = NULL;
2726 if (id->call_stmt
2727 && stmt_can_make_abnormal_goto (id->call_stmt))
2728 {
2729 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2730
2731 bb = gimple_bb (id->call_stmt);
2732 gsi_next (&gsi);
2733 if (gsi_end_p (gsi))
2734 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2735 }
2736 FOR_ALL_BB_FN (bb, cfun_to_copy)
2737 if (!id->blocks_to_copy
2738 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2739 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2740 abnormal_goto_dest);
2741
2742 if (new_entry)
2743 {
2744 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2745 e->probability = REG_BR_PROB_BASE;
2746 e->count = incoming_count;
2747 }
2748
2749 /* Duplicate the loop tree, if available and wanted. */
2750 if (loops_for_fn (src_cfun) != NULL
2751 && current_loops != NULL)
2752 {
2753 copy_loops (id, entry_block_map->loop_father,
2754 get_loop (src_cfun, 0));
2755 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2756 loops_state_set (LOOPS_NEED_FIXUP);
2757 }
2758
2759 /* If the loop tree in the source function needed fixup, mark the
2760 destination loop tree for fixup, too. */
2761 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2762 loops_state_set (LOOPS_NEED_FIXUP);
2763
2764 if (gimple_in_ssa_p (cfun))
2765 FOR_ALL_BB_FN (bb, cfun_to_copy)
2766 if (!id->blocks_to_copy
2767 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2768 copy_phis_for_bb (bb, id);
2769
2770 FOR_ALL_BB_FN (bb, cfun_to_copy)
2771 if (bb->aux)
2772 {
2773 if (need_debug_cleanup
2774 && bb->index != ENTRY_BLOCK
2775 && bb->index != EXIT_BLOCK)
2776 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2777 /* Update call edge destinations. This can not be done before loop
2778 info is updated, because we may split basic blocks. */
2779 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2780 && bb->index != ENTRY_BLOCK
2781 && bb->index != EXIT_BLOCK)
2782 redirect_all_calls (id, (basic_block)bb->aux);
2783 ((basic_block)bb->aux)->aux = NULL;
2784 bb->aux = NULL;
2785 }
2786
2787 /* Zero out AUX fields of newly created block during EH edge
2788 insertion. */
2789 for (; last < last_basic_block_for_fn (cfun); last++)
2790 {
2791 if (need_debug_cleanup)
2792 maybe_move_debug_stmts_to_successors (id,
2793 BASIC_BLOCK_FOR_FN (cfun, last));
2794 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2795 /* Update call edge destinations. This can not be done before loop
2796 info is updated, because we may split basic blocks. */
2797 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2798 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2799 }
2800 entry_block_map->aux = NULL;
2801 exit_block_map->aux = NULL;
2802
2803 if (id->eh_map)
2804 {
2805 delete id->eh_map;
2806 id->eh_map = NULL;
2807 }
2808 if (id->dependence_map)
2809 {
2810 delete id->dependence_map;
2811 id->dependence_map = NULL;
2812 }
2813
2814 return new_fndecl;
2815 }
2816
2817 /* Copy the debug STMT using ID. We deal with these statements in a
2818 special way: if any variable in their VALUE expression wasn't
2819 remapped yet, we won't remap it, because that would get decl uids
2820 out of sync, causing codegen differences between -g and -g0. If
2821 this arises, we drop the VALUE expression altogether. */
2822
2823 static void
2824 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2825 {
2826 tree t, *n;
2827 struct walk_stmt_info wi;
2828
2829 if (gimple_block (stmt))
2830 {
2831 n = id->decl_map->get (gimple_block (stmt));
2832 gimple_set_block (stmt, n ? *n : id->block);
2833 }
2834
2835 /* Remap all the operands in COPY. */
2836 memset (&wi, 0, sizeof (wi));
2837 wi.info = id;
2838
2839 processing_debug_stmt = 1;
2840
2841 if (gimple_debug_source_bind_p (stmt))
2842 t = gimple_debug_source_bind_get_var (stmt);
2843 else
2844 t = gimple_debug_bind_get_var (stmt);
2845
2846 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2847 && (n = id->debug_map->get (t)))
2848 {
2849 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2850 t = *n;
2851 }
2852 else if (TREE_CODE (t) == VAR_DECL
2853 && !is_global_var (t)
2854 && !id->decl_map->get (t))
2855 /* T is a non-localized variable. */;
2856 else
2857 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2858
2859 if (gimple_debug_bind_p (stmt))
2860 {
2861 gimple_debug_bind_set_var (stmt, t);
2862
2863 if (gimple_debug_bind_has_value_p (stmt))
2864 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2865 remap_gimple_op_r, &wi, NULL);
2866
2867 /* Punt if any decl couldn't be remapped. */
2868 if (processing_debug_stmt < 0)
2869 gimple_debug_bind_reset_value (stmt);
2870 }
2871 else if (gimple_debug_source_bind_p (stmt))
2872 {
2873 gimple_debug_source_bind_set_var (stmt, t);
2874 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2875 remap_gimple_op_r, &wi, NULL);
2876 /* When inlining and source bind refers to one of the optimized
2877 away parameters, change the source bind into normal debug bind
2878 referring to the corresponding DEBUG_EXPR_DECL that should have
2879 been bound before the call stmt. */
2880 t = gimple_debug_source_bind_get_value (stmt);
2881 if (t != NULL_TREE
2882 && TREE_CODE (t) == PARM_DECL
2883 && id->call_stmt)
2884 {
2885 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2886 unsigned int i;
2887 if (debug_args != NULL)
2888 {
2889 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2890 if ((**debug_args)[i] == DECL_ORIGIN (t)
2891 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2892 {
2893 t = (**debug_args)[i + 1];
2894 stmt->subcode = GIMPLE_DEBUG_BIND;
2895 gimple_debug_bind_set_value (stmt, t);
2896 break;
2897 }
2898 }
2899 }
2900 }
2901
2902 processing_debug_stmt = 0;
2903
2904 update_stmt (stmt);
2905 }
2906
2907 /* Process deferred debug stmts. In order to give values better odds
2908 of being successfully remapped, we delay the processing of debug
2909 stmts until all other stmts that might require remapping are
2910 processed. */
2911
2912 static void
2913 copy_debug_stmts (copy_body_data *id)
2914 {
2915 size_t i;
2916 gdebug *stmt;
2917
2918 if (!id->debug_stmts.exists ())
2919 return;
2920
2921 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2922 copy_debug_stmt (stmt, id);
2923
2924 id->debug_stmts.release ();
2925 }
2926
2927 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2928 another function. */
2929
2930 static tree
2931 copy_tree_body (copy_body_data *id)
2932 {
2933 tree fndecl = id->src_fn;
2934 tree body = DECL_SAVED_TREE (fndecl);
2935
2936 walk_tree (&body, copy_tree_body_r, id, NULL);
2937
2938 return body;
2939 }
2940
2941 /* Make a copy of the body of FN so that it can be inserted inline in
2942 another function. */
2943
2944 static tree
2945 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2946 basic_block entry_block_map, basic_block exit_block_map,
2947 basic_block new_entry)
2948 {
2949 tree fndecl = id->src_fn;
2950 tree body;
2951
2952 /* If this body has a CFG, walk CFG and copy. */
2953 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2954 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2955 new_entry);
2956 copy_debug_stmts (id);
2957
2958 return body;
2959 }
2960
2961 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2962 defined in function FN, or of a data member thereof. */
2963
2964 static bool
2965 self_inlining_addr_expr (tree value, tree fn)
2966 {
2967 tree var;
2968
2969 if (TREE_CODE (value) != ADDR_EXPR)
2970 return false;
2971
2972 var = get_base_address (TREE_OPERAND (value, 0));
2973
2974 return var && auto_var_in_fn_p (var, fn);
2975 }
2976
2977 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2978 lexical block and line number information from base_stmt, if given,
2979 or from the last stmt of the block otherwise. */
2980
2981 static gimple
2982 insert_init_debug_bind (copy_body_data *id,
2983 basic_block bb, tree var, tree value,
2984 gimple base_stmt)
2985 {
2986 gimple note;
2987 gimple_stmt_iterator gsi;
2988 tree tracked_var;
2989
2990 if (!gimple_in_ssa_p (id->src_cfun))
2991 return NULL;
2992
2993 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
2994 return NULL;
2995
2996 tracked_var = target_for_debug_bind (var);
2997 if (!tracked_var)
2998 return NULL;
2999
3000 if (bb)
3001 {
3002 gsi = gsi_last_bb (bb);
3003 if (!base_stmt && !gsi_end_p (gsi))
3004 base_stmt = gsi_stmt (gsi);
3005 }
3006
3007 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3008
3009 if (bb)
3010 {
3011 if (!gsi_end_p (gsi))
3012 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3013 else
3014 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3015 }
3016
3017 return note;
3018 }
3019
3020 static void
3021 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
3022 {
3023 /* If VAR represents a zero-sized variable, it's possible that the
3024 assignment statement may result in no gimple statements. */
3025 if (init_stmt)
3026 {
3027 gimple_stmt_iterator si = gsi_last_bb (bb);
3028
3029 /* We can end up with init statements that store to a non-register
3030 from a rhs with a conversion. Handle that here by forcing the
3031 rhs into a temporary. gimple_regimplify_operands is not
3032 prepared to do this for us. */
3033 if (!is_gimple_debug (init_stmt)
3034 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3035 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3036 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3037 {
3038 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3039 gimple_expr_type (init_stmt),
3040 gimple_assign_rhs1 (init_stmt));
3041 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3042 GSI_NEW_STMT);
3043 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3044 gimple_assign_set_rhs1 (init_stmt, rhs);
3045 }
3046 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3047 gimple_regimplify_operands (init_stmt, &si);
3048
3049 if (!is_gimple_debug (init_stmt))
3050 {
3051 tree def = gimple_assign_lhs (init_stmt);
3052 insert_init_debug_bind (id, bb, def, def, init_stmt);
3053 }
3054 }
3055 }
3056
3057 /* Initialize parameter P with VALUE. If needed, produce init statement
3058 at the end of BB. When BB is NULL, we return init statement to be
3059 output later. */
3060 static gimple
3061 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3062 basic_block bb, tree *vars)
3063 {
3064 gimple init_stmt = NULL;
3065 tree var;
3066 tree rhs = value;
3067 tree def = (gimple_in_ssa_p (cfun)
3068 ? ssa_default_def (id->src_cfun, p) : NULL);
3069
3070 if (value
3071 && value != error_mark_node
3072 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3073 {
3074 /* If we can match up types by promotion/demotion do so. */
3075 if (fold_convertible_p (TREE_TYPE (p), value))
3076 rhs = fold_convert (TREE_TYPE (p), value);
3077 else
3078 {
3079 /* ??? For valid programs we should not end up here.
3080 Still if we end up with truly mismatched types here, fall back
3081 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3082 GIMPLE to the following passes. */
3083 if (!is_gimple_reg_type (TREE_TYPE (value))
3084 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3085 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3086 else
3087 rhs = build_zero_cst (TREE_TYPE (p));
3088 }
3089 }
3090
3091 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3092 here since the type of this decl must be visible to the calling
3093 function. */
3094 var = copy_decl_to_var (p, id);
3095
3096 /* Declare this new variable. */
3097 DECL_CHAIN (var) = *vars;
3098 *vars = var;
3099
3100 /* Make gimplifier happy about this variable. */
3101 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3102
3103 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3104 we would not need to create a new variable here at all, if it
3105 weren't for debug info. Still, we can just use the argument
3106 value. */
3107 if (TREE_READONLY (p)
3108 && !TREE_ADDRESSABLE (p)
3109 && value && !TREE_SIDE_EFFECTS (value)
3110 && !def)
3111 {
3112 /* We may produce non-gimple trees by adding NOPs or introduce
3113 invalid sharing when operand is not really constant.
3114 It is not big deal to prohibit constant propagation here as
3115 we will constant propagate in DOM1 pass anyway. */
3116 if (is_gimple_min_invariant (value)
3117 && useless_type_conversion_p (TREE_TYPE (p),
3118 TREE_TYPE (value))
3119 /* We have to be very careful about ADDR_EXPR. Make sure
3120 the base variable isn't a local variable of the inlined
3121 function, e.g., when doing recursive inlining, direct or
3122 mutually-recursive or whatever, which is why we don't
3123 just test whether fn == current_function_decl. */
3124 && ! self_inlining_addr_expr (value, fn))
3125 {
3126 insert_decl_map (id, p, value);
3127 insert_debug_decl_map (id, p, var);
3128 return insert_init_debug_bind (id, bb, var, value, NULL);
3129 }
3130 }
3131
3132 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3133 that way, when the PARM_DECL is encountered, it will be
3134 automatically replaced by the VAR_DECL. */
3135 insert_decl_map (id, p, var);
3136
3137 /* Even if P was TREE_READONLY, the new VAR should not be.
3138 In the original code, we would have constructed a
3139 temporary, and then the function body would have never
3140 changed the value of P. However, now, we will be
3141 constructing VAR directly. The constructor body may
3142 change its value multiple times as it is being
3143 constructed. Therefore, it must not be TREE_READONLY;
3144 the back-end assumes that TREE_READONLY variable is
3145 assigned to only once. */
3146 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3147 TREE_READONLY (var) = 0;
3148
3149 /* If there is no setup required and we are in SSA, take the easy route
3150 replacing all SSA names representing the function parameter by the
3151 SSA name passed to function.
3152
3153 We need to construct map for the variable anyway as it might be used
3154 in different SSA names when parameter is set in function.
3155
3156 Do replacement at -O0 for const arguments replaced by constant.
3157 This is important for builtin_constant_p and other construct requiring
3158 constant argument to be visible in inlined function body. */
3159 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3160 && (optimize
3161 || (TREE_READONLY (p)
3162 && is_gimple_min_invariant (rhs)))
3163 && (TREE_CODE (rhs) == SSA_NAME
3164 || is_gimple_min_invariant (rhs))
3165 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3166 {
3167 insert_decl_map (id, def, rhs);
3168 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3169 }
3170
3171 /* If the value of argument is never used, don't care about initializing
3172 it. */
3173 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3174 {
3175 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3176 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3177 }
3178
3179 /* Initialize this VAR_DECL from the equivalent argument. Convert
3180 the argument to the proper type in case it was promoted. */
3181 if (value)
3182 {
3183 if (rhs == error_mark_node)
3184 {
3185 insert_decl_map (id, p, var);
3186 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3187 }
3188
3189 STRIP_USELESS_TYPE_CONVERSION (rhs);
3190
3191 /* If we are in SSA form properly remap the default definition
3192 or assign to a dummy SSA name if the parameter is unused and
3193 we are not optimizing. */
3194 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3195 {
3196 if (def)
3197 {
3198 def = remap_ssa_name (def, id);
3199 init_stmt = gimple_build_assign (def, rhs);
3200 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3201 set_ssa_default_def (cfun, var, NULL);
3202 }
3203 else if (!optimize)
3204 {
3205 def = make_ssa_name (var);
3206 init_stmt = gimple_build_assign (def, rhs);
3207 }
3208 }
3209 else
3210 init_stmt = gimple_build_assign (var, rhs);
3211
3212 if (bb && init_stmt)
3213 insert_init_stmt (id, bb, init_stmt);
3214 }
3215 return init_stmt;
3216 }
3217
3218 /* Generate code to initialize the parameters of the function at the
3219 top of the stack in ID from the GIMPLE_CALL STMT. */
3220
3221 static void
3222 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
3223 tree fn, basic_block bb)
3224 {
3225 tree parms;
3226 size_t i;
3227 tree p;
3228 tree vars = NULL_TREE;
3229 tree static_chain = gimple_call_chain (stmt);
3230
3231 /* Figure out what the parameters are. */
3232 parms = DECL_ARGUMENTS (fn);
3233
3234 /* Loop through the parameter declarations, replacing each with an
3235 equivalent VAR_DECL, appropriately initialized. */
3236 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3237 {
3238 tree val;
3239 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3240 setup_one_parameter (id, p, val, fn, bb, &vars);
3241 }
3242 /* After remapping parameters remap their types. This has to be done
3243 in a second loop over all parameters to appropriately remap
3244 variable sized arrays when the size is specified in a
3245 parameter following the array. */
3246 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3247 {
3248 tree *varp = id->decl_map->get (p);
3249 if (varp
3250 && TREE_CODE (*varp) == VAR_DECL)
3251 {
3252 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3253 ? ssa_default_def (id->src_cfun, p) : NULL);
3254 tree var = *varp;
3255 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3256 /* Also remap the default definition if it was remapped
3257 to the default definition of the parameter replacement
3258 by the parameter setup. */
3259 if (def)
3260 {
3261 tree *defp = id->decl_map->get (def);
3262 if (defp
3263 && TREE_CODE (*defp) == SSA_NAME
3264 && SSA_NAME_VAR (*defp) == var)
3265 TREE_TYPE (*defp) = TREE_TYPE (var);
3266 }
3267 }
3268 }
3269
3270 /* Initialize the static chain. */
3271 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3272 gcc_assert (fn != current_function_decl);
3273 if (p)
3274 {
3275 /* No static chain? Seems like a bug in tree-nested.c. */
3276 gcc_assert (static_chain);
3277
3278 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3279 }
3280
3281 declare_inline_vars (id->block, vars);
3282 }
3283
3284
3285 /* Declare a return variable to replace the RESULT_DECL for the
3286 function we are calling. An appropriate DECL_STMT is returned.
3287 The USE_STMT is filled to contain a use of the declaration to
3288 indicate the return value of the function.
3289
3290 RETURN_SLOT, if non-null is place where to store the result. It
3291 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3292 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3293
3294 RETURN_BOUNDS holds a destination for returned bounds.
3295
3296 The return value is a (possibly null) value that holds the result
3297 as seen by the caller. */
3298
3299 static tree
3300 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3301 tree return_bounds, basic_block entry_bb)
3302 {
3303 tree callee = id->src_fn;
3304 tree result = DECL_RESULT (callee);
3305 tree callee_type = TREE_TYPE (result);
3306 tree caller_type;
3307 tree var, use;
3308
3309 /* Handle type-mismatches in the function declaration return type
3310 vs. the call expression. */
3311 if (modify_dest)
3312 caller_type = TREE_TYPE (modify_dest);
3313 else
3314 caller_type = TREE_TYPE (TREE_TYPE (callee));
3315
3316 /* We don't need to do anything for functions that don't return anything. */
3317 if (VOID_TYPE_P (callee_type))
3318 return NULL_TREE;
3319
3320 /* If there was a return slot, then the return value is the
3321 dereferenced address of that object. */
3322 if (return_slot)
3323 {
3324 /* The front end shouldn't have used both return_slot and
3325 a modify expression. */
3326 gcc_assert (!modify_dest);
3327 if (DECL_BY_REFERENCE (result))
3328 {
3329 tree return_slot_addr = build_fold_addr_expr (return_slot);
3330 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3331
3332 /* We are going to construct *&return_slot and we can't do that
3333 for variables believed to be not addressable.
3334
3335 FIXME: This check possibly can match, because values returned
3336 via return slot optimization are not believed to have address
3337 taken by alias analysis. */
3338 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3339 var = return_slot_addr;
3340 }
3341 else
3342 {
3343 var = return_slot;
3344 gcc_assert (TREE_CODE (var) != SSA_NAME);
3345 if (TREE_ADDRESSABLE (result))
3346 mark_addressable (var);
3347 }
3348 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3349 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3350 && !DECL_GIMPLE_REG_P (result)
3351 && DECL_P (var))
3352 DECL_GIMPLE_REG_P (var) = 0;
3353 use = NULL;
3354 goto done;
3355 }
3356
3357 /* All types requiring non-trivial constructors should have been handled. */
3358 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3359
3360 /* Attempt to avoid creating a new temporary variable. */
3361 if (modify_dest
3362 && TREE_CODE (modify_dest) != SSA_NAME)
3363 {
3364 bool use_it = false;
3365
3366 /* We can't use MODIFY_DEST if there's type promotion involved. */
3367 if (!useless_type_conversion_p (callee_type, caller_type))
3368 use_it = false;
3369
3370 /* ??? If we're assigning to a variable sized type, then we must
3371 reuse the destination variable, because we've no good way to
3372 create variable sized temporaries at this point. */
3373 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3374 use_it = true;
3375
3376 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3377 reuse it as the result of the call directly. Don't do this if
3378 it would promote MODIFY_DEST to addressable. */
3379 else if (TREE_ADDRESSABLE (result))
3380 use_it = false;
3381 else
3382 {
3383 tree base_m = get_base_address (modify_dest);
3384
3385 /* If the base isn't a decl, then it's a pointer, and we don't
3386 know where that's going to go. */
3387 if (!DECL_P (base_m))
3388 use_it = false;
3389 else if (is_global_var (base_m))
3390 use_it = false;
3391 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3392 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3393 && !DECL_GIMPLE_REG_P (result)
3394 && DECL_GIMPLE_REG_P (base_m))
3395 use_it = false;
3396 else if (!TREE_ADDRESSABLE (base_m))
3397 use_it = true;
3398 }
3399
3400 if (use_it)
3401 {
3402 var = modify_dest;
3403 use = NULL;
3404 goto done;
3405 }
3406 }
3407
3408 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3409
3410 var = copy_result_decl_to_var (result, id);
3411 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3412
3413 /* Do not have the rest of GCC warn about this variable as it should
3414 not be visible to the user. */
3415 TREE_NO_WARNING (var) = 1;
3416
3417 declare_inline_vars (id->block, var);
3418
3419 /* Build the use expr. If the return type of the function was
3420 promoted, convert it back to the expected type. */
3421 use = var;
3422 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3423 {
3424 /* If we can match up types by promotion/demotion do so. */
3425 if (fold_convertible_p (caller_type, var))
3426 use = fold_convert (caller_type, var);
3427 else
3428 {
3429 /* ??? For valid programs we should not end up here.
3430 Still if we end up with truly mismatched types here, fall back
3431 to using a MEM_REF to not leak invalid GIMPLE to the following
3432 passes. */
3433 /* Prevent var from being written into SSA form. */
3434 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3435 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3436 DECL_GIMPLE_REG_P (var) = false;
3437 else if (is_gimple_reg_type (TREE_TYPE (var)))
3438 TREE_ADDRESSABLE (var) = true;
3439 use = fold_build2 (MEM_REF, caller_type,
3440 build_fold_addr_expr (var),
3441 build_int_cst (ptr_type_node, 0));
3442 }
3443 }
3444
3445 STRIP_USELESS_TYPE_CONVERSION (use);
3446
3447 if (DECL_BY_REFERENCE (result))
3448 {
3449 TREE_ADDRESSABLE (var) = 1;
3450 var = build_fold_addr_expr (var);
3451 }
3452
3453 done:
3454 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3455 way, when the RESULT_DECL is encountered, it will be
3456 automatically replaced by the VAR_DECL.
3457
3458 When returning by reference, ensure that RESULT_DECL remaps to
3459 gimple_val. */
3460 if (DECL_BY_REFERENCE (result)
3461 && !is_gimple_val (var))
3462 {
3463 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3464 insert_decl_map (id, result, temp);
3465 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3466 it's default_def SSA_NAME. */
3467 if (gimple_in_ssa_p (id->src_cfun)
3468 && is_gimple_reg (result))
3469 {
3470 temp = make_ssa_name (temp);
3471 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3472 }
3473 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3474 }
3475 else
3476 insert_decl_map (id, result, var);
3477
3478 /* Remember this so we can ignore it in remap_decls. */
3479 id->retvar = var;
3480
3481 /* If returned bounds are used, then make var for them. */
3482 if (return_bounds)
3483 {
3484 tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3485 DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3486 TREE_NO_WARNING (bndtemp) = 1;
3487 declare_inline_vars (id->block, bndtemp);
3488
3489 id->retbnd = bndtemp;
3490 insert_init_stmt (id, entry_bb,
3491 gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3492 }
3493
3494 return use;
3495 }
3496
3497 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
3498 to a local label. */
3499
3500 static tree
3501 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
3502 {
3503 tree node = *nodep;
3504 tree fn = (tree) fnp;
3505
3506 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
3507 return node;
3508
3509 if (TYPE_P (node))
3510 *walk_subtrees = 0;
3511
3512 return NULL_TREE;
3513 }
3514
3515 /* Determine if the function can be copied. If so return NULL. If
3516 not return a string describng the reason for failure. */
3517
3518 const char *
3519 copy_forbidden (struct function *fun, tree fndecl)
3520 {
3521 const char *reason = fun->cannot_be_copied_reason;
3522 tree decl;
3523 unsigned ix;
3524
3525 /* Only examine the function once. */
3526 if (fun->cannot_be_copied_set)
3527 return reason;
3528
3529 /* We cannot copy a function that receives a non-local goto
3530 because we cannot remap the destination label used in the
3531 function that is performing the non-local goto. */
3532 /* ??? Actually, this should be possible, if we work at it.
3533 No doubt there's just a handful of places that simply
3534 assume it doesn't happen and don't substitute properly. */
3535 if (fun->has_nonlocal_label)
3536 {
3537 reason = G_("function %q+F can never be copied "
3538 "because it receives a non-local goto");
3539 goto fail;
3540 }
3541
3542 FOR_EACH_LOCAL_DECL (fun, ix, decl)
3543 if (TREE_CODE (decl) == VAR_DECL
3544 && TREE_STATIC (decl)
3545 && !DECL_EXTERNAL (decl)
3546 && DECL_INITIAL (decl)
3547 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
3548 has_label_address_in_static_1,
3549 fndecl))
3550 {
3551 reason = G_("function %q+F can never be copied because it saves "
3552 "address of local label in a static variable");
3553 goto fail;
3554 }
3555
3556 fail:
3557 fun->cannot_be_copied_reason = reason;
3558 fun->cannot_be_copied_set = true;
3559 return reason;
3560 }
3561
3562
3563 static const char *inline_forbidden_reason;
3564
3565 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3566 iff a function can not be inlined. Also sets the reason why. */
3567
3568 static tree
3569 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3570 struct walk_stmt_info *wip)
3571 {
3572 tree fn = (tree) wip->info;
3573 tree t;
3574 gimple stmt = gsi_stmt (*gsi);
3575
3576 switch (gimple_code (stmt))
3577 {
3578 case GIMPLE_CALL:
3579 /* Refuse to inline alloca call unless user explicitly forced so as
3580 this may change program's memory overhead drastically when the
3581 function using alloca is called in loop. In GCC present in
3582 SPEC2000 inlining into schedule_block cause it to require 2GB of
3583 RAM instead of 256MB. Don't do so for alloca calls emitted for
3584 VLA objects as those can't cause unbounded growth (they're always
3585 wrapped inside stack_save/stack_restore regions. */
3586 if (gimple_alloca_call_p (stmt)
3587 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3588 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3589 {
3590 inline_forbidden_reason
3591 = G_("function %q+F can never be inlined because it uses "
3592 "alloca (override using the always_inline attribute)");
3593 *handled_ops_p = true;
3594 return fn;
3595 }
3596
3597 t = gimple_call_fndecl (stmt);
3598 if (t == NULL_TREE)
3599 break;
3600
3601 /* We cannot inline functions that call setjmp. */
3602 if (setjmp_call_p (t))
3603 {
3604 inline_forbidden_reason
3605 = G_("function %q+F can never be inlined because it uses setjmp");
3606 *handled_ops_p = true;
3607 return t;
3608 }
3609
3610 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3611 switch (DECL_FUNCTION_CODE (t))
3612 {
3613 /* We cannot inline functions that take a variable number of
3614 arguments. */
3615 case BUILT_IN_VA_START:
3616 case BUILT_IN_NEXT_ARG:
3617 case BUILT_IN_VA_END:
3618 inline_forbidden_reason
3619 = G_("function %q+F can never be inlined because it "
3620 "uses variable argument lists");
3621 *handled_ops_p = true;
3622 return t;
3623
3624 case BUILT_IN_LONGJMP:
3625 /* We can't inline functions that call __builtin_longjmp at
3626 all. The non-local goto machinery really requires the
3627 destination be in a different function. If we allow the
3628 function calling __builtin_longjmp to be inlined into the
3629 function calling __builtin_setjmp, Things will Go Awry. */
3630 inline_forbidden_reason
3631 = G_("function %q+F can never be inlined because "
3632 "it uses setjmp-longjmp exception handling");
3633 *handled_ops_p = true;
3634 return t;
3635
3636 case BUILT_IN_NONLOCAL_GOTO:
3637 /* Similarly. */
3638 inline_forbidden_reason
3639 = G_("function %q+F can never be inlined because "
3640 "it uses non-local goto");
3641 *handled_ops_p = true;
3642 return t;
3643
3644 case BUILT_IN_RETURN:
3645 case BUILT_IN_APPLY_ARGS:
3646 /* If a __builtin_apply_args caller would be inlined,
3647 it would be saving arguments of the function it has
3648 been inlined into. Similarly __builtin_return would
3649 return from the function the inline has been inlined into. */
3650 inline_forbidden_reason
3651 = G_("function %q+F can never be inlined because "
3652 "it uses __builtin_return or __builtin_apply_args");
3653 *handled_ops_p = true;
3654 return t;
3655
3656 default:
3657 break;
3658 }
3659 break;
3660
3661 case GIMPLE_GOTO:
3662 t = gimple_goto_dest (stmt);
3663
3664 /* We will not inline a function which uses computed goto. The
3665 addresses of its local labels, which may be tucked into
3666 global storage, are of course not constant across
3667 instantiations, which causes unexpected behavior. */
3668 if (TREE_CODE (t) != LABEL_DECL)
3669 {
3670 inline_forbidden_reason
3671 = G_("function %q+F can never be inlined "
3672 "because it contains a computed goto");
3673 *handled_ops_p = true;
3674 return t;
3675 }
3676 break;
3677
3678 default:
3679 break;
3680 }
3681
3682 *handled_ops_p = false;
3683 return NULL_TREE;
3684 }
3685
3686 /* Return true if FNDECL is a function that cannot be inlined into
3687 another one. */
3688
3689 static bool
3690 inline_forbidden_p (tree fndecl)
3691 {
3692 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3693 struct walk_stmt_info wi;
3694 basic_block bb;
3695 bool forbidden_p = false;
3696
3697 /* First check for shared reasons not to copy the code. */
3698 inline_forbidden_reason = copy_forbidden (fun, fndecl);
3699 if (inline_forbidden_reason != NULL)
3700 return true;
3701
3702 /* Next, walk the statements of the function looking for
3703 constraucts we can't handle, or are non-optimal for inlining. */
3704 hash_set<tree> visited_nodes;
3705 memset (&wi, 0, sizeof (wi));
3706 wi.info = (void *) fndecl;
3707 wi.pset = &visited_nodes;
3708
3709 FOR_EACH_BB_FN (bb, fun)
3710 {
3711 gimple ret;
3712 gimple_seq seq = bb_seq (bb);
3713 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3714 forbidden_p = (ret != NULL);
3715 if (forbidden_p)
3716 break;
3717 }
3718
3719 return forbidden_p;
3720 }
3721 \f
3722 /* Return false if the function FNDECL cannot be inlined on account of its
3723 attributes, true otherwise. */
3724 static bool
3725 function_attribute_inlinable_p (const_tree fndecl)
3726 {
3727 if (targetm.attribute_table)
3728 {
3729 const_tree a;
3730
3731 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3732 {
3733 const_tree name = TREE_PURPOSE (a);
3734 int i;
3735
3736 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3737 if (is_attribute_p (targetm.attribute_table[i].name, name))
3738 return targetm.function_attribute_inlinable_p (fndecl);
3739 }
3740 }
3741
3742 return true;
3743 }
3744
3745 /* Returns nonzero if FN is a function that does not have any
3746 fundamental inline blocking properties. */
3747
3748 bool
3749 tree_inlinable_function_p (tree fn)
3750 {
3751 bool inlinable = true;
3752 bool do_warning;
3753 tree always_inline;
3754
3755 /* If we've already decided this function shouldn't be inlined,
3756 there's no need to check again. */
3757 if (DECL_UNINLINABLE (fn))
3758 return false;
3759
3760 /* We only warn for functions declared `inline' by the user. */
3761 do_warning = (warn_inline
3762 && DECL_DECLARED_INLINE_P (fn)
3763 && !DECL_NO_INLINE_WARNING_P (fn)
3764 && !DECL_IN_SYSTEM_HEADER (fn));
3765
3766 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3767
3768 if (flag_no_inline
3769 && always_inline == NULL)
3770 {
3771 if (do_warning)
3772 warning (OPT_Winline, "function %q+F can never be inlined because it "
3773 "is suppressed using -fno-inline", fn);
3774 inlinable = false;
3775 }
3776
3777 else if (!function_attribute_inlinable_p (fn))
3778 {
3779 if (do_warning)
3780 warning (OPT_Winline, "function %q+F can never be inlined because it "
3781 "uses attributes conflicting with inlining", fn);
3782 inlinable = false;
3783 }
3784
3785 else if (inline_forbidden_p (fn))
3786 {
3787 /* See if we should warn about uninlinable functions. Previously,
3788 some of these warnings would be issued while trying to expand
3789 the function inline, but that would cause multiple warnings
3790 about functions that would for example call alloca. But since
3791 this a property of the function, just one warning is enough.
3792 As a bonus we can now give more details about the reason why a
3793 function is not inlinable. */
3794 if (always_inline)
3795 error (inline_forbidden_reason, fn);
3796 else if (do_warning)
3797 warning (OPT_Winline, inline_forbidden_reason, fn);
3798
3799 inlinable = false;
3800 }
3801
3802 /* Squirrel away the result so that we don't have to check again. */
3803 DECL_UNINLINABLE (fn) = !inlinable;
3804
3805 return inlinable;
3806 }
3807
3808 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3809 word size and take possible memcpy call into account and return
3810 cost based on whether optimizing for size or speed according to SPEED_P. */
3811
3812 int
3813 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3814 {
3815 HOST_WIDE_INT size;
3816
3817 gcc_assert (!VOID_TYPE_P (type));
3818
3819 if (TREE_CODE (type) == VECTOR_TYPE)
3820 {
3821 machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3822 machine_mode simd
3823 = targetm.vectorize.preferred_simd_mode (inner);
3824 int simd_mode_size = GET_MODE_SIZE (simd);
3825 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3826 / simd_mode_size);
3827 }
3828
3829 size = int_size_in_bytes (type);
3830
3831 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3832 /* Cost of a memcpy call, 3 arguments and the call. */
3833 return 4;
3834 else
3835 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3836 }
3837
3838 /* Returns cost of operation CODE, according to WEIGHTS */
3839
3840 static int
3841 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3842 tree op1 ATTRIBUTE_UNUSED, tree op2)
3843 {
3844 switch (code)
3845 {
3846 /* These are "free" conversions, or their presumed cost
3847 is folded into other operations. */
3848 case RANGE_EXPR:
3849 CASE_CONVERT:
3850 case COMPLEX_EXPR:
3851 case PAREN_EXPR:
3852 case VIEW_CONVERT_EXPR:
3853 return 0;
3854
3855 /* Assign cost of 1 to usual operations.
3856 ??? We may consider mapping RTL costs to this. */
3857 case COND_EXPR:
3858 case VEC_COND_EXPR:
3859 case VEC_PERM_EXPR:
3860
3861 case PLUS_EXPR:
3862 case POINTER_PLUS_EXPR:
3863 case MINUS_EXPR:
3864 case MULT_EXPR:
3865 case MULT_HIGHPART_EXPR:
3866 case FMA_EXPR:
3867
3868 case ADDR_SPACE_CONVERT_EXPR:
3869 case FIXED_CONVERT_EXPR:
3870 case FIX_TRUNC_EXPR:
3871
3872 case NEGATE_EXPR:
3873 case FLOAT_EXPR:
3874 case MIN_EXPR:
3875 case MAX_EXPR:
3876 case ABS_EXPR:
3877
3878 case LSHIFT_EXPR:
3879 case RSHIFT_EXPR:
3880 case LROTATE_EXPR:
3881 case RROTATE_EXPR:
3882
3883 case BIT_IOR_EXPR:
3884 case BIT_XOR_EXPR:
3885 case BIT_AND_EXPR:
3886 case BIT_NOT_EXPR:
3887
3888 case TRUTH_ANDIF_EXPR:
3889 case TRUTH_ORIF_EXPR:
3890 case TRUTH_AND_EXPR:
3891 case TRUTH_OR_EXPR:
3892 case TRUTH_XOR_EXPR:
3893 case TRUTH_NOT_EXPR:
3894
3895 case LT_EXPR:
3896 case LE_EXPR:
3897 case GT_EXPR:
3898 case GE_EXPR:
3899 case EQ_EXPR:
3900 case NE_EXPR:
3901 case ORDERED_EXPR:
3902 case UNORDERED_EXPR:
3903
3904 case UNLT_EXPR:
3905 case UNLE_EXPR:
3906 case UNGT_EXPR:
3907 case UNGE_EXPR:
3908 case UNEQ_EXPR:
3909 case LTGT_EXPR:
3910
3911 case CONJ_EXPR:
3912
3913 case PREDECREMENT_EXPR:
3914 case PREINCREMENT_EXPR:
3915 case POSTDECREMENT_EXPR:
3916 case POSTINCREMENT_EXPR:
3917
3918 case REALIGN_LOAD_EXPR:
3919
3920 case REDUC_MAX_EXPR:
3921 case REDUC_MIN_EXPR:
3922 case REDUC_PLUS_EXPR:
3923 case WIDEN_SUM_EXPR:
3924 case WIDEN_MULT_EXPR:
3925 case DOT_PROD_EXPR:
3926 case SAD_EXPR:
3927 case WIDEN_MULT_PLUS_EXPR:
3928 case WIDEN_MULT_MINUS_EXPR:
3929 case WIDEN_LSHIFT_EXPR:
3930
3931 case VEC_WIDEN_MULT_HI_EXPR:
3932 case VEC_WIDEN_MULT_LO_EXPR:
3933 case VEC_WIDEN_MULT_EVEN_EXPR:
3934 case VEC_WIDEN_MULT_ODD_EXPR:
3935 case VEC_UNPACK_HI_EXPR:
3936 case VEC_UNPACK_LO_EXPR:
3937 case VEC_UNPACK_FLOAT_HI_EXPR:
3938 case VEC_UNPACK_FLOAT_LO_EXPR:
3939 case VEC_PACK_TRUNC_EXPR:
3940 case VEC_PACK_SAT_EXPR:
3941 case VEC_PACK_FIX_TRUNC_EXPR:
3942 case VEC_WIDEN_LSHIFT_HI_EXPR:
3943 case VEC_WIDEN_LSHIFT_LO_EXPR:
3944
3945 return 1;
3946
3947 /* Few special cases of expensive operations. This is useful
3948 to avoid inlining on functions having too many of these. */
3949 case TRUNC_DIV_EXPR:
3950 case CEIL_DIV_EXPR:
3951 case FLOOR_DIV_EXPR:
3952 case ROUND_DIV_EXPR:
3953 case EXACT_DIV_EXPR:
3954 case TRUNC_MOD_EXPR:
3955 case CEIL_MOD_EXPR:
3956 case FLOOR_MOD_EXPR:
3957 case ROUND_MOD_EXPR:
3958 case RDIV_EXPR:
3959 if (TREE_CODE (op2) != INTEGER_CST)
3960 return weights->div_mod_cost;
3961 return 1;
3962
3963 default:
3964 /* We expect a copy assignment with no operator. */
3965 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3966 return 0;
3967 }
3968 }
3969
3970
3971 /* Estimate number of instructions that will be created by expanding
3972 the statements in the statement sequence STMTS.
3973 WEIGHTS contains weights attributed to various constructs. */
3974
3975 static
3976 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3977 {
3978 int cost;
3979 gimple_stmt_iterator gsi;
3980
3981 cost = 0;
3982 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3983 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3984
3985 return cost;
3986 }
3987
3988
3989 /* Estimate number of instructions that will be created by expanding STMT.
3990 WEIGHTS contains weights attributed to various constructs. */
3991
3992 int
3993 estimate_num_insns (gimple stmt, eni_weights *weights)
3994 {
3995 unsigned cost, i;
3996 enum gimple_code code = gimple_code (stmt);
3997 tree lhs;
3998 tree rhs;
3999
4000 switch (code)
4001 {
4002 case GIMPLE_ASSIGN:
4003 /* Try to estimate the cost of assignments. We have three cases to
4004 deal with:
4005 1) Simple assignments to registers;
4006 2) Stores to things that must live in memory. This includes
4007 "normal" stores to scalars, but also assignments of large
4008 structures, or constructors of big arrays;
4009
4010 Let us look at the first two cases, assuming we have "a = b + C":
4011 <GIMPLE_ASSIGN <var_decl "a">
4012 <plus_expr <var_decl "b"> <constant C>>
4013 If "a" is a GIMPLE register, the assignment to it is free on almost
4014 any target, because "a" usually ends up in a real register. Hence
4015 the only cost of this expression comes from the PLUS_EXPR, and we
4016 can ignore the GIMPLE_ASSIGN.
4017 If "a" is not a GIMPLE register, the assignment to "a" will most
4018 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4019 of moving something into "a", which we compute using the function
4020 estimate_move_cost. */
4021 if (gimple_clobber_p (stmt))
4022 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4023
4024 lhs = gimple_assign_lhs (stmt);
4025 rhs = gimple_assign_rhs1 (stmt);
4026
4027 cost = 0;
4028
4029 /* Account for the cost of moving to / from memory. */
4030 if (gimple_store_p (stmt))
4031 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4032 if (gimple_assign_load_p (stmt))
4033 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4034
4035 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4036 gimple_assign_rhs1 (stmt),
4037 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4038 == GIMPLE_BINARY_RHS
4039 ? gimple_assign_rhs2 (stmt) : NULL);
4040 break;
4041
4042 case GIMPLE_COND:
4043 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4044 gimple_op (stmt, 0),
4045 gimple_op (stmt, 1));
4046 break;
4047
4048 case GIMPLE_SWITCH:
4049 {
4050 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4051 /* Take into account cost of the switch + guess 2 conditional jumps for
4052 each case label.
4053
4054 TODO: once the switch expansion logic is sufficiently separated, we can
4055 do better job on estimating cost of the switch. */
4056 if (weights->time_based)
4057 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4058 else
4059 cost = gimple_switch_num_labels (switch_stmt) * 2;
4060 }
4061 break;
4062
4063 case GIMPLE_CALL:
4064 {
4065 tree decl;
4066
4067 if (gimple_call_internal_p (stmt))
4068 return 0;
4069 else if ((decl = gimple_call_fndecl (stmt))
4070 && DECL_BUILT_IN (decl))
4071 {
4072 /* Do not special case builtins where we see the body.
4073 This just confuse inliner. */
4074 struct cgraph_node *node;
4075 if (!(node = cgraph_node::get (decl))
4076 || node->definition)
4077 ;
4078 /* For buitins that are likely expanded to nothing or
4079 inlined do not account operand costs. */
4080 else if (is_simple_builtin (decl))
4081 return 0;
4082 else if (is_inexpensive_builtin (decl))
4083 return weights->target_builtin_call_cost;
4084 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
4085 {
4086 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4087 specialize the cheap expansion we do here.
4088 ??? This asks for a more general solution. */
4089 switch (DECL_FUNCTION_CODE (decl))
4090 {
4091 case BUILT_IN_POW:
4092 case BUILT_IN_POWF:
4093 case BUILT_IN_POWL:
4094 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4095 && REAL_VALUES_EQUAL
4096 (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
4097 return estimate_operator_cost
4098 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4099 gimple_call_arg (stmt, 0));
4100 break;
4101
4102 default:
4103 break;
4104 }
4105 }
4106 }
4107
4108 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4109 if (gimple_call_lhs (stmt))
4110 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4111 weights->time_based);
4112 for (i = 0; i < gimple_call_num_args (stmt); i++)
4113 {
4114 tree arg = gimple_call_arg (stmt, i);
4115 cost += estimate_move_cost (TREE_TYPE (arg),
4116 weights->time_based);
4117 }
4118 break;
4119 }
4120
4121 case GIMPLE_RETURN:
4122 return weights->return_cost;
4123
4124 case GIMPLE_GOTO:
4125 case GIMPLE_LABEL:
4126 case GIMPLE_NOP:
4127 case GIMPLE_PHI:
4128 case GIMPLE_PREDICT:
4129 case GIMPLE_DEBUG:
4130 return 0;
4131
4132 case GIMPLE_ASM:
4133 {
4134 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4135 /* 1000 means infinity. This avoids overflows later
4136 with very long asm statements. */
4137 if (count > 1000)
4138 count = 1000;
4139 return count;
4140 }
4141
4142 case GIMPLE_RESX:
4143 /* This is either going to be an external function call with one
4144 argument, or two register copy statements plus a goto. */
4145 return 2;
4146
4147 case GIMPLE_EH_DISPATCH:
4148 /* ??? This is going to turn into a switch statement. Ideally
4149 we'd have a look at the eh region and estimate the number of
4150 edges involved. */
4151 return 10;
4152
4153 case GIMPLE_BIND:
4154 return estimate_num_insns_seq (
4155 gimple_bind_body (as_a <gbind *> (stmt)),
4156 weights);
4157
4158 case GIMPLE_EH_FILTER:
4159 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4160
4161 case GIMPLE_CATCH:
4162 return estimate_num_insns_seq (gimple_catch_handler (
4163 as_a <gcatch *> (stmt)),
4164 weights);
4165
4166 case GIMPLE_TRY:
4167 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4168 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4169
4170 /* OMP directives are generally very expensive. */
4171
4172 case GIMPLE_OMP_RETURN:
4173 case GIMPLE_OMP_SECTIONS_SWITCH:
4174 case GIMPLE_OMP_ATOMIC_STORE:
4175 case GIMPLE_OMP_CONTINUE:
4176 /* ...except these, which are cheap. */
4177 return 0;
4178
4179 case GIMPLE_OMP_ATOMIC_LOAD:
4180 return weights->omp_cost;
4181
4182 case GIMPLE_OMP_FOR:
4183 return (weights->omp_cost
4184 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4185 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4186
4187 case GIMPLE_OMP_PARALLEL:
4188 case GIMPLE_OMP_TASK:
4189 case GIMPLE_OMP_CRITICAL:
4190 case GIMPLE_OMP_MASTER:
4191 case GIMPLE_OMP_TASKGROUP:
4192 case GIMPLE_OMP_ORDERED:
4193 case GIMPLE_OMP_SECTION:
4194 case GIMPLE_OMP_SECTIONS:
4195 case GIMPLE_OMP_SINGLE:
4196 case GIMPLE_OMP_TARGET:
4197 case GIMPLE_OMP_TEAMS:
4198 return (weights->omp_cost
4199 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4200
4201 case GIMPLE_TRANSACTION:
4202 return (weights->tm_cost
4203 + estimate_num_insns_seq (gimple_transaction_body (
4204 as_a <gtransaction *> (stmt)),
4205 weights));
4206
4207 default:
4208 gcc_unreachable ();
4209 }
4210
4211 return cost;
4212 }
4213
4214 /* Estimate number of instructions that will be created by expanding
4215 function FNDECL. WEIGHTS contains weights attributed to various
4216 constructs. */
4217
4218 int
4219 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4220 {
4221 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4222 gimple_stmt_iterator bsi;
4223 basic_block bb;
4224 int n = 0;
4225
4226 gcc_assert (my_function && my_function->cfg);
4227 FOR_EACH_BB_FN (bb, my_function)
4228 {
4229 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4230 n += estimate_num_insns (gsi_stmt (bsi), weights);
4231 }
4232
4233 return n;
4234 }
4235
4236
4237 /* Initializes weights used by estimate_num_insns. */
4238
4239 void
4240 init_inline_once (void)
4241 {
4242 eni_size_weights.call_cost = 1;
4243 eni_size_weights.indirect_call_cost = 3;
4244 eni_size_weights.target_builtin_call_cost = 1;
4245 eni_size_weights.div_mod_cost = 1;
4246 eni_size_weights.omp_cost = 40;
4247 eni_size_weights.tm_cost = 10;
4248 eni_size_weights.time_based = false;
4249 eni_size_weights.return_cost = 1;
4250
4251 /* Estimating time for call is difficult, since we have no idea what the
4252 called function does. In the current uses of eni_time_weights,
4253 underestimating the cost does less harm than overestimating it, so
4254 we choose a rather small value here. */
4255 eni_time_weights.call_cost = 10;
4256 eni_time_weights.indirect_call_cost = 15;
4257 eni_time_weights.target_builtin_call_cost = 1;
4258 eni_time_weights.div_mod_cost = 10;
4259 eni_time_weights.omp_cost = 40;
4260 eni_time_weights.tm_cost = 40;
4261 eni_time_weights.time_based = true;
4262 eni_time_weights.return_cost = 2;
4263 }
4264
4265 /* Estimate the number of instructions in a gimple_seq. */
4266
4267 int
4268 count_insns_seq (gimple_seq seq, eni_weights *weights)
4269 {
4270 gimple_stmt_iterator gsi;
4271 int n = 0;
4272 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
4273 n += estimate_num_insns (gsi_stmt (gsi), weights);
4274
4275 return n;
4276 }
4277
4278
4279 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4280
4281 static void
4282 prepend_lexical_block (tree current_block, tree new_block)
4283 {
4284 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4285 BLOCK_SUBBLOCKS (current_block) = new_block;
4286 BLOCK_SUPERCONTEXT (new_block) = current_block;
4287 }
4288
4289 /* Add local variables from CALLEE to CALLER. */
4290
4291 static inline void
4292 add_local_variables (struct function *callee, struct function *caller,
4293 copy_body_data *id)
4294 {
4295 tree var;
4296 unsigned ix;
4297
4298 FOR_EACH_LOCAL_DECL (callee, ix, var)
4299 if (!can_be_nonlocal (var, id))
4300 {
4301 tree new_var = remap_decl (var, id);
4302
4303 /* Remap debug-expressions. */
4304 if (TREE_CODE (new_var) == VAR_DECL
4305 && DECL_HAS_DEBUG_EXPR_P (var)
4306 && new_var != var)
4307 {
4308 tree tem = DECL_DEBUG_EXPR (var);
4309 bool old_regimplify = id->regimplify;
4310 id->remapping_type_depth++;
4311 walk_tree (&tem, copy_tree_body_r, id, NULL);
4312 id->remapping_type_depth--;
4313 id->regimplify = old_regimplify;
4314 SET_DECL_DEBUG_EXPR (new_var, tem);
4315 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4316 }
4317 add_local_decl (caller, new_var);
4318 }
4319 }
4320
4321 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4322 have brought in or introduced any debug stmts for SRCVAR. */
4323
4324 static inline void
4325 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4326 {
4327 tree *remappedvarp = id->decl_map->get (srcvar);
4328
4329 if (!remappedvarp)
4330 return;
4331
4332 if (TREE_CODE (*remappedvarp) != VAR_DECL)
4333 return;
4334
4335 if (*remappedvarp == id->retvar || *remappedvarp == id->retbnd)
4336 return;
4337
4338 tree tvar = target_for_debug_bind (*remappedvarp);
4339 if (!tvar)
4340 return;
4341
4342 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4343 id->call_stmt);
4344 gimple_seq_add_stmt (bindings, stmt);
4345 }
4346
4347 /* For each inlined variable for which we may have debug bind stmts,
4348 add before GSI a final debug stmt resetting it, marking the end of
4349 its life, so that var-tracking knows it doesn't have to compute
4350 further locations for it. */
4351
4352 static inline void
4353 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4354 {
4355 tree var;
4356 unsigned ix;
4357 gimple_seq bindings = NULL;
4358
4359 if (!gimple_in_ssa_p (id->src_cfun))
4360 return;
4361
4362 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4363 return;
4364
4365 for (var = DECL_ARGUMENTS (id->src_fn);
4366 var; var = DECL_CHAIN (var))
4367 reset_debug_binding (id, var, &bindings);
4368
4369 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4370 reset_debug_binding (id, var, &bindings);
4371
4372 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4373 }
4374
4375 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4376
4377 static bool
4378 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
4379 {
4380 tree use_retvar;
4381 tree fn;
4382 hash_map<tree, tree> *dst;
4383 hash_map<tree, tree> *st = NULL;
4384 tree return_slot;
4385 tree modify_dest;
4386 tree return_bounds = NULL;
4387 location_t saved_location;
4388 struct cgraph_edge *cg_edge;
4389 cgraph_inline_failed_t reason;
4390 basic_block return_block;
4391 edge e;
4392 gimple_stmt_iterator gsi, stmt_gsi;
4393 bool successfully_inlined = FALSE;
4394 bool purge_dead_abnormal_edges;
4395 gcall *call_stmt;
4396 unsigned int i;
4397
4398 /* Set input_location here so we get the right instantiation context
4399 if we call instantiate_decl from inlinable_function_p. */
4400 /* FIXME: instantiate_decl isn't called by inlinable_function_p. */
4401 saved_location = input_location;
4402 input_location = gimple_location (stmt);
4403
4404 /* From here on, we're only interested in CALL_EXPRs. */
4405 call_stmt = dyn_cast <gcall *> (stmt);
4406 if (!call_stmt)
4407 goto egress;
4408
4409 cg_edge = id->dst_node->get_edge (stmt);
4410 gcc_checking_assert (cg_edge);
4411 /* First, see if we can figure out what function is being called.
4412 If we cannot, then there is no hope of inlining the function. */
4413 if (cg_edge->indirect_unknown_callee)
4414 goto egress;
4415 fn = cg_edge->callee->decl;
4416 gcc_checking_assert (fn);
4417
4418 /* If FN is a declaration of a function in a nested scope that was
4419 globally declared inline, we don't set its DECL_INITIAL.
4420 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4421 C++ front-end uses it for cdtors to refer to their internal
4422 declarations, that are not real functions. Fortunately those
4423 don't have trees to be saved, so we can tell by checking their
4424 gimple_body. */
4425 if (!DECL_INITIAL (fn)
4426 && DECL_ABSTRACT_ORIGIN (fn)
4427 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4428 fn = DECL_ABSTRACT_ORIGIN (fn);
4429
4430 /* Don't try to inline functions that are not well-suited to inlining. */
4431 if (cg_edge->inline_failed)
4432 {
4433 reason = cg_edge->inline_failed;
4434 /* If this call was originally indirect, we do not want to emit any
4435 inlining related warnings or sorry messages because there are no
4436 guarantees regarding those. */
4437 if (cg_edge->indirect_inlining_edge)
4438 goto egress;
4439
4440 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4441 /* For extern inline functions that get redefined we always
4442 silently ignored always_inline flag. Better behaviour would
4443 be to be able to keep both bodies and use extern inline body
4444 for inlining, but we can't do that because frontends overwrite
4445 the body. */
4446 && !cg_edge->callee->local.redefined_extern_inline
4447 /* During early inline pass, report only when optimization is
4448 not turned on. */
4449 && (symtab->global_info_ready
4450 || !optimize
4451 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4452 /* PR 20090218-1_0.c. Body can be provided by another module. */
4453 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4454 {
4455 error ("inlining failed in call to always_inline %q+F: %s", fn,
4456 cgraph_inline_failed_string (reason));
4457 error ("called from here");
4458 }
4459 else if (warn_inline
4460 && DECL_DECLARED_INLINE_P (fn)
4461 && !DECL_NO_INLINE_WARNING_P (fn)
4462 && !DECL_IN_SYSTEM_HEADER (fn)
4463 && reason != CIF_UNSPECIFIED
4464 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4465 /* Do not warn about not inlined recursive calls. */
4466 && !cg_edge->recursive_p ()
4467 /* Avoid warnings during early inline pass. */
4468 && symtab->global_info_ready)
4469 {
4470 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4471 fn, _(cgraph_inline_failed_string (reason)));
4472 warning (OPT_Winline, "called from here");
4473 }
4474 goto egress;
4475 }
4476 fn = cg_edge->callee->decl;
4477 cg_edge->callee->get_untransformed_body ();
4478
4479 #ifdef ENABLE_CHECKING
4480 if (cg_edge->callee->decl != id->dst_node->decl)
4481 cg_edge->callee->verify ();
4482 #endif
4483
4484 /* We will be inlining this callee. */
4485 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4486 id->assign_stmts.create (0);
4487
4488 /* Update the callers EH personality. */
4489 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4490 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4491 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4492
4493 /* Split the block holding the GIMPLE_CALL. */
4494 e = split_block (bb, stmt);
4495 bb = e->src;
4496 return_block = e->dest;
4497 remove_edge (e);
4498
4499 /* split_block splits after the statement; work around this by
4500 moving the call into the second block manually. Not pretty,
4501 but seems easier than doing the CFG manipulation by hand
4502 when the GIMPLE_CALL is in the last statement of BB. */
4503 stmt_gsi = gsi_last_bb (bb);
4504 gsi_remove (&stmt_gsi, false);
4505
4506 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4507 been the source of abnormal edges. In this case, schedule
4508 the removal of dead abnormal edges. */
4509 gsi = gsi_start_bb (return_block);
4510 if (gsi_end_p (gsi))
4511 {
4512 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4513 purge_dead_abnormal_edges = true;
4514 }
4515 else
4516 {
4517 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4518 purge_dead_abnormal_edges = false;
4519 }
4520
4521 stmt_gsi = gsi_start_bb (return_block);
4522
4523 /* Build a block containing code to initialize the arguments, the
4524 actual inline expansion of the body, and a label for the return
4525 statements within the function to jump to. The type of the
4526 statement expression is the return type of the function call.
4527 ??? If the call does not have an associated block then we will
4528 remap all callee blocks to NULL, effectively dropping most of
4529 its debug information. This should only happen for calls to
4530 artificial decls inserted by the compiler itself. We need to
4531 either link the inlined blocks into the caller block tree or
4532 not refer to them in any way to not break GC for locations. */
4533 if (gimple_block (stmt))
4534 {
4535 id->block = make_node (BLOCK);
4536 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4537 BLOCK_SOURCE_LOCATION (id->block) = LOCATION_LOCUS (input_location);
4538 prepend_lexical_block (gimple_block (stmt), id->block);
4539 }
4540
4541 /* Local declarations will be replaced by their equivalents in this
4542 map. */
4543 st = id->decl_map;
4544 id->decl_map = new hash_map<tree, tree>;
4545 dst = id->debug_map;
4546 id->debug_map = NULL;
4547
4548 /* Record the function we are about to inline. */
4549 id->src_fn = fn;
4550 id->src_node = cg_edge->callee;
4551 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4552 id->call_stmt = stmt;
4553
4554 /* If the the src function contains an IFN_VA_ARG, then so will the dst
4555 function after inlining. */
4556 if ((id->src_cfun->curr_properties & PROP_gimple_lva) == 0)
4557 {
4558 struct function *dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4559 dst_cfun->curr_properties &= ~PROP_gimple_lva;
4560 }
4561
4562 gcc_assert (!id->src_cfun->after_inlining);
4563
4564 id->entry_bb = bb;
4565 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4566 {
4567 gimple_stmt_iterator si = gsi_last_bb (bb);
4568 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4569 NOT_TAKEN),
4570 GSI_NEW_STMT);
4571 }
4572 initialize_inlined_parameters (id, stmt, fn, bb);
4573
4574 if (DECL_INITIAL (fn))
4575 {
4576 if (gimple_block (stmt))
4577 {
4578 tree *var;
4579
4580 prepend_lexical_block (id->block,
4581 remap_blocks (DECL_INITIAL (fn), id));
4582 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4583 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4584 == NULL_TREE));
4585 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4586 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4587 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4588 under it. The parameters can be then evaluated in the debugger,
4589 but don't show in backtraces. */
4590 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4591 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4592 {
4593 tree v = *var;
4594 *var = TREE_CHAIN (v);
4595 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4596 BLOCK_VARS (id->block) = v;
4597 }
4598 else
4599 var = &TREE_CHAIN (*var);
4600 }
4601 else
4602 remap_blocks_to_null (DECL_INITIAL (fn), id);
4603 }
4604
4605 /* Return statements in the function body will be replaced by jumps
4606 to the RET_LABEL. */
4607 gcc_assert (DECL_INITIAL (fn));
4608 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4609
4610 /* Find the LHS to which the result of this call is assigned. */
4611 return_slot = NULL;
4612 if (gimple_call_lhs (stmt))
4613 {
4614 modify_dest = gimple_call_lhs (stmt);
4615
4616 /* Remember where to copy returned bounds. */
4617 if (gimple_call_with_bounds_p (stmt)
4618 && TREE_CODE (modify_dest) == SSA_NAME)
4619 {
4620 gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
4621 if (retbnd)
4622 {
4623 return_bounds = gimple_call_lhs (retbnd);
4624 /* If returned bounds are not used then just
4625 remove unused call. */
4626 if (!return_bounds)
4627 {
4628 gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
4629 gsi_remove (&iter, true);
4630 }
4631 }
4632 }
4633
4634 /* The function which we are inlining might not return a value,
4635 in which case we should issue a warning that the function
4636 does not return a value. In that case the optimizers will
4637 see that the variable to which the value is assigned was not
4638 initialized. We do not want to issue a warning about that
4639 uninitialized variable. */
4640 if (DECL_P (modify_dest))
4641 TREE_NO_WARNING (modify_dest) = 1;
4642
4643 if (gimple_call_return_slot_opt_p (call_stmt))
4644 {
4645 return_slot = modify_dest;
4646 modify_dest = NULL;
4647 }
4648 }
4649 else
4650 modify_dest = NULL;
4651
4652 /* If we are inlining a call to the C++ operator new, we don't want
4653 to use type based alias analysis on the return value. Otherwise
4654 we may get confused if the compiler sees that the inlined new
4655 function returns a pointer which was just deleted. See bug
4656 33407. */
4657 if (DECL_IS_OPERATOR_NEW (fn))
4658 {
4659 return_slot = NULL;
4660 modify_dest = NULL;
4661 }
4662
4663 /* Declare the return variable for the function. */
4664 use_retvar = declare_return_variable (id, return_slot, modify_dest,
4665 return_bounds, bb);
4666
4667 /* Add local vars in this inlined callee to caller. */
4668 add_local_variables (id->src_cfun, cfun, id);
4669
4670 if (dump_file && (dump_flags & TDF_DETAILS))
4671 {
4672 fprintf (dump_file, "Inlining ");
4673 print_generic_expr (dump_file, id->src_fn, 0);
4674 fprintf (dump_file, " to ");
4675 print_generic_expr (dump_file, id->dst_fn, 0);
4676 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4677 }
4678
4679 /* This is it. Duplicate the callee body. Assume callee is
4680 pre-gimplified. Note that we must not alter the caller
4681 function in any way before this point, as this CALL_EXPR may be
4682 a self-referential call; if we're calling ourselves, we need to
4683 duplicate our body before altering anything. */
4684 copy_body (id, cg_edge->callee->count,
4685 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4686 bb, return_block, NULL);
4687
4688 reset_debug_bindings (id, stmt_gsi);
4689
4690 /* Reset the escaped solution. */
4691 if (cfun->gimple_df)
4692 pt_solution_reset (&cfun->gimple_df->escaped);
4693
4694 /* Clean up. */
4695 if (id->debug_map)
4696 {
4697 delete id->debug_map;
4698 id->debug_map = dst;
4699 }
4700 delete id->decl_map;
4701 id->decl_map = st;
4702
4703 /* Unlink the calls virtual operands before replacing it. */
4704 unlink_stmt_vdef (stmt);
4705 if (gimple_vdef (stmt)
4706 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4707 release_ssa_name (gimple_vdef (stmt));
4708
4709 /* If the inlined function returns a result that we care about,
4710 substitute the GIMPLE_CALL with an assignment of the return
4711 variable to the LHS of the call. That is, if STMT was
4712 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4713 if (use_retvar && gimple_call_lhs (stmt))
4714 {
4715 gimple old_stmt = stmt;
4716 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4717 gsi_replace (&stmt_gsi, stmt, false);
4718 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4719
4720 /* Copy bounds if we copy structure with bounds. */
4721 if (chkp_function_instrumented_p (id->dst_fn)
4722 && !BOUNDED_P (use_retvar)
4723 && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
4724 id->assign_stmts.safe_push (stmt);
4725 }
4726 else
4727 {
4728 /* Handle the case of inlining a function with no return
4729 statement, which causes the return value to become undefined. */
4730 if (gimple_call_lhs (stmt)
4731 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4732 {
4733 tree name = gimple_call_lhs (stmt);
4734 tree var = SSA_NAME_VAR (name);
4735 tree def = ssa_default_def (cfun, var);
4736
4737 if (def)
4738 {
4739 /* If the variable is used undefined, make this name
4740 undefined via a move. */
4741 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4742 gsi_replace (&stmt_gsi, stmt, true);
4743 }
4744 else
4745 {
4746 /* Otherwise make this variable undefined. */
4747 gsi_remove (&stmt_gsi, true);
4748 set_ssa_default_def (cfun, var, name);
4749 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4750 }
4751 }
4752 else
4753 gsi_remove (&stmt_gsi, true);
4754 }
4755
4756 /* Put returned bounds into the correct place if required. */
4757 if (return_bounds)
4758 {
4759 gimple old_stmt = SSA_NAME_DEF_STMT (return_bounds);
4760 gimple new_stmt = gimple_build_assign (return_bounds, id->retbnd);
4761 gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
4762 unlink_stmt_vdef (old_stmt);
4763 gsi_replace (&bnd_gsi, new_stmt, false);
4764 maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
4765 cgraph_update_edges_for_call_stmt (old_stmt,
4766 gimple_call_fndecl (old_stmt),
4767 new_stmt);
4768 }
4769
4770 if (purge_dead_abnormal_edges)
4771 {
4772 gimple_purge_dead_eh_edges (return_block);
4773 gimple_purge_dead_abnormal_call_edges (return_block);
4774 }
4775
4776 /* If the value of the new expression is ignored, that's OK. We
4777 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4778 the equivalent inlined version either. */
4779 if (is_gimple_assign (stmt))
4780 {
4781 gcc_assert (gimple_assign_single_p (stmt)
4782 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4783 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4784 }
4785
4786 /* Copy bounds for all generated assigns that need it. */
4787 for (i = 0; i < id->assign_stmts.length (); i++)
4788 chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
4789 id->assign_stmts.release ();
4790
4791 /* Output the inlining info for this abstract function, since it has been
4792 inlined. If we don't do this now, we can lose the information about the
4793 variables in the function when the blocks get blown away as soon as we
4794 remove the cgraph node. */
4795 if (gimple_block (stmt))
4796 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4797
4798 /* Update callgraph if needed. */
4799 cg_edge->callee->remove ();
4800
4801 id->block = NULL_TREE;
4802 successfully_inlined = TRUE;
4803
4804 egress:
4805 input_location = saved_location;
4806 return successfully_inlined;
4807 }
4808
4809 /* Expand call statements reachable from STMT_P.
4810 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4811 in a MODIFY_EXPR. */
4812
4813 static bool
4814 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4815 {
4816 gimple_stmt_iterator gsi;
4817 bool inlined = false;
4818
4819 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4820 {
4821 gimple stmt = gsi_stmt (gsi);
4822 gsi_prev (&gsi);
4823
4824 if (is_gimple_call (stmt)
4825 && !gimple_call_internal_p (stmt))
4826 inlined |= expand_call_inline (bb, stmt, id);
4827 }
4828
4829 return inlined;
4830 }
4831
4832
4833 /* Walk all basic blocks created after FIRST and try to fold every statement
4834 in the STATEMENTS pointer set. */
4835
4836 static void
4837 fold_marked_statements (int first, hash_set<gimple> *statements)
4838 {
4839 for (; first < n_basic_blocks_for_fn (cfun); first++)
4840 if (BASIC_BLOCK_FOR_FN (cfun, first))
4841 {
4842 gimple_stmt_iterator gsi;
4843
4844 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4845 !gsi_end_p (gsi);
4846 gsi_next (&gsi))
4847 if (statements->contains (gsi_stmt (gsi)))
4848 {
4849 gimple old_stmt = gsi_stmt (gsi);
4850 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4851
4852 if (old_decl && DECL_BUILT_IN (old_decl))
4853 {
4854 /* Folding builtins can create multiple instructions,
4855 we need to look at all of them. */
4856 gimple_stmt_iterator i2 = gsi;
4857 gsi_prev (&i2);
4858 if (fold_stmt (&gsi))
4859 {
4860 gimple new_stmt;
4861 /* If a builtin at the end of a bb folded into nothing,
4862 the following loop won't work. */
4863 if (gsi_end_p (gsi))
4864 {
4865 cgraph_update_edges_for_call_stmt (old_stmt,
4866 old_decl, NULL);
4867 break;
4868 }
4869 if (gsi_end_p (i2))
4870 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4871 else
4872 gsi_next (&i2);
4873 while (1)
4874 {
4875 new_stmt = gsi_stmt (i2);
4876 update_stmt (new_stmt);
4877 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4878 new_stmt);
4879
4880 if (new_stmt == gsi_stmt (gsi))
4881 {
4882 /* It is okay to check only for the very last
4883 of these statements. If it is a throwing
4884 statement nothing will change. If it isn't
4885 this can remove EH edges. If that weren't
4886 correct then because some intermediate stmts
4887 throw, but not the last one. That would mean
4888 we'd have to split the block, which we can't
4889 here and we'd loose anyway. And as builtins
4890 probably never throw, this all
4891 is mood anyway. */
4892 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4893 new_stmt))
4894 gimple_purge_dead_eh_edges (
4895 BASIC_BLOCK_FOR_FN (cfun, first));
4896 break;
4897 }
4898 gsi_next (&i2);
4899 }
4900 }
4901 }
4902 else if (fold_stmt (&gsi))
4903 {
4904 /* Re-read the statement from GSI as fold_stmt() may
4905 have changed it. */
4906 gimple new_stmt = gsi_stmt (gsi);
4907 update_stmt (new_stmt);
4908
4909 if (is_gimple_call (old_stmt)
4910 || is_gimple_call (new_stmt))
4911 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4912 new_stmt);
4913
4914 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4915 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4916 first));
4917 }
4918 }
4919 }
4920 }
4921
4922 /* Expand calls to inline functions in the body of FN. */
4923
4924 unsigned int
4925 optimize_inline_calls (tree fn)
4926 {
4927 copy_body_data id;
4928 basic_block bb;
4929 int last = n_basic_blocks_for_fn (cfun);
4930 bool inlined_p = false;
4931
4932 /* Clear out ID. */
4933 memset (&id, 0, sizeof (id));
4934
4935 id.src_node = id.dst_node = cgraph_node::get (fn);
4936 gcc_assert (id.dst_node->definition);
4937 id.dst_fn = fn;
4938 /* Or any functions that aren't finished yet. */
4939 if (current_function_decl)
4940 id.dst_fn = current_function_decl;
4941
4942 id.copy_decl = copy_decl_maybe_to_var;
4943 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4944 id.transform_new_cfg = false;
4945 id.transform_return_to_modify = true;
4946 id.transform_parameter = true;
4947 id.transform_lang_insert_block = NULL;
4948 id.statements_to_fold = new hash_set<gimple>;
4949
4950 push_gimplify_context ();
4951
4952 /* We make no attempts to keep dominance info up-to-date. */
4953 free_dominance_info (CDI_DOMINATORS);
4954 free_dominance_info (CDI_POST_DOMINATORS);
4955
4956 /* Register specific gimple functions. */
4957 gimple_register_cfg_hooks ();
4958
4959 /* Reach the trees by walking over the CFG, and note the
4960 enclosing basic-blocks in the call edges. */
4961 /* We walk the blocks going forward, because inlined function bodies
4962 will split id->current_basic_block, and the new blocks will
4963 follow it; we'll trudge through them, processing their CALL_EXPRs
4964 along the way. */
4965 FOR_EACH_BB_FN (bb, cfun)
4966 inlined_p |= gimple_expand_calls_inline (bb, &id);
4967
4968 pop_gimplify_context (NULL);
4969
4970 #ifdef ENABLE_CHECKING
4971 {
4972 struct cgraph_edge *e;
4973
4974 id.dst_node->verify ();
4975
4976 /* Double check that we inlined everything we are supposed to inline. */
4977 for (e = id.dst_node->callees; e; e = e->next_callee)
4978 gcc_assert (e->inline_failed);
4979 }
4980 #endif
4981
4982 /* Fold queued statements. */
4983 fold_marked_statements (last, id.statements_to_fold);
4984 delete id.statements_to_fold;
4985
4986 gcc_assert (!id.debug_stmts.exists ());
4987
4988 /* If we didn't inline into the function there is nothing to do. */
4989 if (!inlined_p)
4990 return 0;
4991
4992 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4993 number_blocks (fn);
4994
4995 delete_unreachable_blocks_update_callgraph (&id);
4996 #ifdef ENABLE_CHECKING
4997 id.dst_node->verify ();
4998 #endif
4999
5000 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5001 not possible yet - the IPA passes might make various functions to not
5002 throw and they don't care to proactively update local EH info. This is
5003 done later in fixup_cfg pass that also execute the verification. */
5004 return (TODO_update_ssa
5005 | TODO_cleanup_cfg
5006 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5007 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5008 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5009 ? TODO_rebuild_frequencies : 0));
5010 }
5011
5012 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5013
5014 tree
5015 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5016 {
5017 enum tree_code code = TREE_CODE (*tp);
5018 enum tree_code_class cl = TREE_CODE_CLASS (code);
5019
5020 /* We make copies of most nodes. */
5021 if (IS_EXPR_CODE_CLASS (cl)
5022 || code == TREE_LIST
5023 || code == TREE_VEC
5024 || code == TYPE_DECL
5025 || code == OMP_CLAUSE)
5026 {
5027 /* Because the chain gets clobbered when we make a copy, we save it
5028 here. */
5029 tree chain = NULL_TREE, new_tree;
5030
5031 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5032 chain = TREE_CHAIN (*tp);
5033
5034 /* Copy the node. */
5035 new_tree = copy_node (*tp);
5036
5037 *tp = new_tree;
5038
5039 /* Now, restore the chain, if appropriate. That will cause
5040 walk_tree to walk into the chain as well. */
5041 if (code == PARM_DECL
5042 || code == TREE_LIST
5043 || code == OMP_CLAUSE)
5044 TREE_CHAIN (*tp) = chain;
5045
5046 /* For now, we don't update BLOCKs when we make copies. So, we
5047 have to nullify all BIND_EXPRs. */
5048 if (TREE_CODE (*tp) == BIND_EXPR)
5049 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5050 }
5051 else if (code == CONSTRUCTOR)
5052 {
5053 /* CONSTRUCTOR nodes need special handling because
5054 we need to duplicate the vector of elements. */
5055 tree new_tree;
5056
5057 new_tree = copy_node (*tp);
5058 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5059 *tp = new_tree;
5060 }
5061 else if (code == STATEMENT_LIST)
5062 /* We used to just abort on STATEMENT_LIST, but we can run into them
5063 with statement-expressions (c++/40975). */
5064 copy_statement_list (tp);
5065 else if (TREE_CODE_CLASS (code) == tcc_type)
5066 *walk_subtrees = 0;
5067 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5068 *walk_subtrees = 0;
5069 else if (TREE_CODE_CLASS (code) == tcc_constant)
5070 *walk_subtrees = 0;
5071 return NULL_TREE;
5072 }
5073
5074 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5075 information indicating to what new SAVE_EXPR this one should be mapped,
5076 use that one. Otherwise, create a new node and enter it in ST. FN is
5077 the function into which the copy will be placed. */
5078
5079 static void
5080 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5081 {
5082 tree *n;
5083 tree t;
5084
5085 /* See if we already encountered this SAVE_EXPR. */
5086 n = st->get (*tp);
5087
5088 /* If we didn't already remap this SAVE_EXPR, do so now. */
5089 if (!n)
5090 {
5091 t = copy_node (*tp);
5092
5093 /* Remember this SAVE_EXPR. */
5094 st->put (*tp, t);
5095 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5096 st->put (t, t);
5097 }
5098 else
5099 {
5100 /* We've already walked into this SAVE_EXPR; don't do it again. */
5101 *walk_subtrees = 0;
5102 t = *n;
5103 }
5104
5105 /* Replace this SAVE_EXPR with the copy. */
5106 *tp = t;
5107 }
5108
5109 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5110 label, copies the declaration and enters it in the splay_tree in DATA (which
5111 is really a 'copy_body_data *'. */
5112
5113 static tree
5114 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5115 bool *handled_ops_p ATTRIBUTE_UNUSED,
5116 struct walk_stmt_info *wi)
5117 {
5118 copy_body_data *id = (copy_body_data *) wi->info;
5119 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5120
5121 if (stmt)
5122 {
5123 tree decl = gimple_label_label (stmt);
5124
5125 /* Copy the decl and remember the copy. */
5126 insert_decl_map (id, decl, id->copy_decl (decl, id));
5127 }
5128
5129 return NULL_TREE;
5130 }
5131
5132
5133 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5134 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5135 remaps all local declarations to appropriate replacements in gimple
5136 operands. */
5137
5138 static tree
5139 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5140 {
5141 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5142 copy_body_data *id = (copy_body_data *) wi->info;
5143 hash_map<tree, tree> *st = id->decl_map;
5144 tree *n;
5145 tree expr = *tp;
5146
5147 /* Only a local declaration (variable or label). */
5148 if ((TREE_CODE (expr) == VAR_DECL
5149 && !TREE_STATIC (expr))
5150 || TREE_CODE (expr) == LABEL_DECL)
5151 {
5152 /* Lookup the declaration. */
5153 n = st->get (expr);
5154
5155 /* If it's there, remap it. */
5156 if (n)
5157 *tp = *n;
5158 *walk_subtrees = 0;
5159 }
5160 else if (TREE_CODE (expr) == STATEMENT_LIST
5161 || TREE_CODE (expr) == BIND_EXPR
5162 || TREE_CODE (expr) == SAVE_EXPR)
5163 gcc_unreachable ();
5164 else if (TREE_CODE (expr) == TARGET_EXPR)
5165 {
5166 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5167 It's OK for this to happen if it was part of a subtree that
5168 isn't immediately expanded, such as operand 2 of another
5169 TARGET_EXPR. */
5170 if (!TREE_OPERAND (expr, 1))
5171 {
5172 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5173 TREE_OPERAND (expr, 3) = NULL_TREE;
5174 }
5175 }
5176
5177 /* Keep iterating. */
5178 return NULL_TREE;
5179 }
5180
5181
5182 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5183 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5184 remaps all local declarations to appropriate replacements in gimple
5185 statements. */
5186
5187 static tree
5188 replace_locals_stmt (gimple_stmt_iterator *gsip,
5189 bool *handled_ops_p ATTRIBUTE_UNUSED,
5190 struct walk_stmt_info *wi)
5191 {
5192 copy_body_data *id = (copy_body_data *) wi->info;
5193 gimple gs = gsi_stmt (*gsip);
5194
5195 if (gbind *stmt = dyn_cast <gbind *> (gs))
5196 {
5197 tree block = gimple_bind_block (stmt);
5198
5199 if (block)
5200 {
5201 remap_block (&block, id);
5202 gimple_bind_set_block (stmt, block);
5203 }
5204
5205 /* This will remap a lot of the same decls again, but this should be
5206 harmless. */
5207 if (gimple_bind_vars (stmt))
5208 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
5209 NULL, id));
5210 }
5211
5212 /* Keep iterating. */
5213 return NULL_TREE;
5214 }
5215
5216
5217 /* Copies everything in SEQ and replaces variables and labels local to
5218 current_function_decl. */
5219
5220 gimple_seq
5221 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5222 {
5223 copy_body_data id;
5224 struct walk_stmt_info wi;
5225 gimple_seq copy;
5226
5227 /* There's nothing to do for NULL_TREE. */
5228 if (seq == NULL)
5229 return seq;
5230
5231 /* Set up ID. */
5232 memset (&id, 0, sizeof (id));
5233 id.src_fn = current_function_decl;
5234 id.dst_fn = current_function_decl;
5235 id.decl_map = new hash_map<tree, tree>;
5236 id.debug_map = NULL;
5237
5238 id.copy_decl = copy_decl_no_change;
5239 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5240 id.transform_new_cfg = false;
5241 id.transform_return_to_modify = false;
5242 id.transform_parameter = false;
5243 id.transform_lang_insert_block = NULL;
5244
5245 /* Walk the tree once to find local labels. */
5246 memset (&wi, 0, sizeof (wi));
5247 hash_set<tree> visited;
5248 wi.info = &id;
5249 wi.pset = &visited;
5250 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5251
5252 copy = gimple_seq_copy (seq);
5253
5254 /* Walk the copy, remapping decls. */
5255 memset (&wi, 0, sizeof (wi));
5256 wi.info = &id;
5257 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5258
5259 /* Clean up. */
5260 delete id.decl_map;
5261 if (id.debug_map)
5262 delete id.debug_map;
5263 if (id.dependence_map)
5264 {
5265 delete id.dependence_map;
5266 id.dependence_map = NULL;
5267 }
5268
5269 return copy;
5270 }
5271
5272
5273 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5274
5275 static tree
5276 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5277 {
5278 if (*tp == data)
5279 return (tree) data;
5280 else
5281 return NULL;
5282 }
5283
5284 DEBUG_FUNCTION bool
5285 debug_find_tree (tree top, tree search)
5286 {
5287 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5288 }
5289
5290
5291 /* Declare the variables created by the inliner. Add all the variables in
5292 VARS to BIND_EXPR. */
5293
5294 static void
5295 declare_inline_vars (tree block, tree vars)
5296 {
5297 tree t;
5298 for (t = vars; t; t = DECL_CHAIN (t))
5299 {
5300 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5301 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5302 add_local_decl (cfun, t);
5303 }
5304
5305 if (block)
5306 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5307 }
5308
5309 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5310 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5311 VAR_DECL translation. */
5312
5313 static tree
5314 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5315 {
5316 /* Don't generate debug information for the copy if we wouldn't have
5317 generated it for the copy either. */
5318 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5319 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5320
5321 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5322 declaration inspired this copy. */
5323 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5324
5325 /* The new variable/label has no RTL, yet. */
5326 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5327 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5328 SET_DECL_RTL (copy, 0);
5329
5330 /* These args would always appear unused, if not for this. */
5331 TREE_USED (copy) = 1;
5332
5333 /* Set the context for the new declaration. */
5334 if (!DECL_CONTEXT (decl))
5335 /* Globals stay global. */
5336 ;
5337 else if (DECL_CONTEXT (decl) != id->src_fn)
5338 /* Things that weren't in the scope of the function we're inlining
5339 from aren't in the scope we're inlining to, either. */
5340 ;
5341 else if (TREE_STATIC (decl))
5342 /* Function-scoped static variables should stay in the original
5343 function. */
5344 ;
5345 else
5346 /* Ordinary automatic local variables are now in the scope of the
5347 new function. */
5348 DECL_CONTEXT (copy) = id->dst_fn;
5349
5350 return copy;
5351 }
5352
5353 static tree
5354 copy_decl_to_var (tree decl, copy_body_data *id)
5355 {
5356 tree copy, type;
5357
5358 gcc_assert (TREE_CODE (decl) == PARM_DECL
5359 || TREE_CODE (decl) == RESULT_DECL);
5360
5361 type = TREE_TYPE (decl);
5362
5363 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5364 VAR_DECL, DECL_NAME (decl), type);
5365 if (DECL_PT_UID_SET_P (decl))
5366 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5367 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5368 TREE_READONLY (copy) = TREE_READONLY (decl);
5369 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5370 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5371
5372 return copy_decl_for_dup_finish (id, decl, copy);
5373 }
5374
5375 /* Like copy_decl_to_var, but create a return slot object instead of a
5376 pointer variable for return by invisible reference. */
5377
5378 static tree
5379 copy_result_decl_to_var (tree decl, copy_body_data *id)
5380 {
5381 tree copy, type;
5382
5383 gcc_assert (TREE_CODE (decl) == PARM_DECL
5384 || TREE_CODE (decl) == RESULT_DECL);
5385
5386 type = TREE_TYPE (decl);
5387 if (DECL_BY_REFERENCE (decl))
5388 type = TREE_TYPE (type);
5389
5390 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5391 VAR_DECL, DECL_NAME (decl), type);
5392 if (DECL_PT_UID_SET_P (decl))
5393 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5394 TREE_READONLY (copy) = TREE_READONLY (decl);
5395 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5396 if (!DECL_BY_REFERENCE (decl))
5397 {
5398 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5399 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5400 }
5401
5402 return copy_decl_for_dup_finish (id, decl, copy);
5403 }
5404
5405 tree
5406 copy_decl_no_change (tree decl, copy_body_data *id)
5407 {
5408 tree copy;
5409
5410 copy = copy_node (decl);
5411
5412 /* The COPY is not abstract; it will be generated in DST_FN. */
5413 DECL_ABSTRACT_P (copy) = false;
5414 lang_hooks.dup_lang_specific_decl (copy);
5415
5416 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5417 been taken; it's for internal bookkeeping in expand_goto_internal. */
5418 if (TREE_CODE (copy) == LABEL_DECL)
5419 {
5420 TREE_ADDRESSABLE (copy) = 0;
5421 LABEL_DECL_UID (copy) = -1;
5422 }
5423
5424 return copy_decl_for_dup_finish (id, decl, copy);
5425 }
5426
5427 static tree
5428 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5429 {
5430 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5431 return copy_decl_to_var (decl, id);
5432 else
5433 return copy_decl_no_change (decl, id);
5434 }
5435
5436 /* Return a copy of the function's argument tree. */
5437 static tree
5438 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5439 bitmap args_to_skip, tree *vars)
5440 {
5441 tree arg, *parg;
5442 tree new_parm = NULL;
5443 int i = 0;
5444
5445 parg = &new_parm;
5446
5447 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5448 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5449 {
5450 tree new_tree = remap_decl (arg, id);
5451 if (TREE_CODE (new_tree) != PARM_DECL)
5452 new_tree = id->copy_decl (arg, id);
5453 lang_hooks.dup_lang_specific_decl (new_tree);
5454 *parg = new_tree;
5455 parg = &DECL_CHAIN (new_tree);
5456 }
5457 else if (!id->decl_map->get (arg))
5458 {
5459 /* Make an equivalent VAR_DECL. If the argument was used
5460 as temporary variable later in function, the uses will be
5461 replaced by local variable. */
5462 tree var = copy_decl_to_var (arg, id);
5463 insert_decl_map (id, arg, var);
5464 /* Declare this new variable. */
5465 DECL_CHAIN (var) = *vars;
5466 *vars = var;
5467 }
5468 return new_parm;
5469 }
5470
5471 /* Return a copy of the function's static chain. */
5472 static tree
5473 copy_static_chain (tree static_chain, copy_body_data * id)
5474 {
5475 tree *chain_copy, *pvar;
5476
5477 chain_copy = &static_chain;
5478 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5479 {
5480 tree new_tree = remap_decl (*pvar, id);
5481 lang_hooks.dup_lang_specific_decl (new_tree);
5482 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5483 *pvar = new_tree;
5484 }
5485 return static_chain;
5486 }
5487
5488 /* Return true if the function is allowed to be versioned.
5489 This is a guard for the versioning functionality. */
5490
5491 bool
5492 tree_versionable_function_p (tree fndecl)
5493 {
5494 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5495 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
5496 }
5497
5498 /* Delete all unreachable basic blocks and update callgraph.
5499 Doing so is somewhat nontrivial because we need to update all clones and
5500 remove inline function that become unreachable. */
5501
5502 static bool
5503 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5504 {
5505 bool changed = false;
5506 basic_block b, next_bb;
5507
5508 find_unreachable_blocks ();
5509
5510 /* Delete all unreachable basic blocks. */
5511
5512 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5513 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5514 {
5515 next_bb = b->next_bb;
5516
5517 if (!(b->flags & BB_REACHABLE))
5518 {
5519 gimple_stmt_iterator bsi;
5520
5521 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5522 {
5523 struct cgraph_edge *e;
5524 struct cgraph_node *node;
5525
5526 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5527
5528 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5529 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5530 {
5531 if (!e->inline_failed)
5532 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5533 else
5534 e->remove ();
5535 }
5536 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5537 && id->dst_node->clones)
5538 for (node = id->dst_node->clones; node != id->dst_node;)
5539 {
5540 node->remove_stmt_references (gsi_stmt (bsi));
5541 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5542 && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5543 {
5544 if (!e->inline_failed)
5545 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5546 else
5547 e->remove ();
5548 }
5549
5550 if (node->clones)
5551 node = node->clones;
5552 else if (node->next_sibling_clone)
5553 node = node->next_sibling_clone;
5554 else
5555 {
5556 while (node != id->dst_node && !node->next_sibling_clone)
5557 node = node->clone_of;
5558 if (node != id->dst_node)
5559 node = node->next_sibling_clone;
5560 }
5561 }
5562 }
5563 delete_basic_block (b);
5564 changed = true;
5565 }
5566 }
5567
5568 return changed;
5569 }
5570
5571 /* Update clone info after duplication. */
5572
5573 static void
5574 update_clone_info (copy_body_data * id)
5575 {
5576 struct cgraph_node *node;
5577 if (!id->dst_node->clones)
5578 return;
5579 for (node = id->dst_node->clones; node != id->dst_node;)
5580 {
5581 /* First update replace maps to match the new body. */
5582 if (node->clone.tree_map)
5583 {
5584 unsigned int i;
5585 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5586 {
5587 struct ipa_replace_map *replace_info;
5588 replace_info = (*node->clone.tree_map)[i];
5589 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5590 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5591 }
5592 }
5593 if (node->clones)
5594 node = node->clones;
5595 else if (node->next_sibling_clone)
5596 node = node->next_sibling_clone;
5597 else
5598 {
5599 while (node != id->dst_node && !node->next_sibling_clone)
5600 node = node->clone_of;
5601 if (node != id->dst_node)
5602 node = node->next_sibling_clone;
5603 }
5604 }
5605 }
5606
5607 /* Create a copy of a function's tree.
5608 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5609 of the original function and the new copied function
5610 respectively. In case we want to replace a DECL
5611 tree with another tree while duplicating the function's
5612 body, TREE_MAP represents the mapping between these
5613 trees. If UPDATE_CLONES is set, the call_stmt fields
5614 of edges of clones of the function will be updated.
5615
5616 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5617 from new version.
5618 If SKIP_RETURN is true, the new version will return void.
5619 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5620 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5621 */
5622 void
5623 tree_function_versioning (tree old_decl, tree new_decl,
5624 vec<ipa_replace_map *, va_gc> *tree_map,
5625 bool update_clones, bitmap args_to_skip,
5626 bool skip_return, bitmap blocks_to_copy,
5627 basic_block new_entry)
5628 {
5629 struct cgraph_node *old_version_node;
5630 struct cgraph_node *new_version_node;
5631 copy_body_data id;
5632 tree p;
5633 unsigned i;
5634 struct ipa_replace_map *replace_info;
5635 basic_block old_entry_block, bb;
5636 auto_vec<gimple, 10> init_stmts;
5637 tree vars = NULL_TREE;
5638
5639 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5640 && TREE_CODE (new_decl) == FUNCTION_DECL);
5641 DECL_POSSIBLY_INLINED (old_decl) = 1;
5642
5643 old_version_node = cgraph_node::get (old_decl);
5644 gcc_checking_assert (old_version_node);
5645 new_version_node = cgraph_node::get (new_decl);
5646 gcc_checking_assert (new_version_node);
5647
5648 /* Copy over debug args. */
5649 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5650 {
5651 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5652 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5653 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5654 old_debug_args = decl_debug_args_lookup (old_decl);
5655 if (old_debug_args)
5656 {
5657 new_debug_args = decl_debug_args_insert (new_decl);
5658 *new_debug_args = vec_safe_copy (*old_debug_args);
5659 }
5660 }
5661
5662 /* Output the inlining info for this abstract function, since it has been
5663 inlined. If we don't do this now, we can lose the information about the
5664 variables in the function when the blocks get blown away as soon as we
5665 remove the cgraph node. */
5666 (*debug_hooks->outlining_inline_function) (old_decl);
5667
5668 DECL_ARTIFICIAL (new_decl) = 1;
5669 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5670 if (DECL_ORIGIN (old_decl) == old_decl)
5671 old_version_node->used_as_abstract_origin = true;
5672 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5673
5674 /* Prepare the data structures for the tree copy. */
5675 memset (&id, 0, sizeof (id));
5676
5677 /* Generate a new name for the new version. */
5678 id.statements_to_fold = new hash_set<gimple>;
5679
5680 id.decl_map = new hash_map<tree, tree>;
5681 id.debug_map = NULL;
5682 id.src_fn = old_decl;
5683 id.dst_fn = new_decl;
5684 id.src_node = old_version_node;
5685 id.dst_node = new_version_node;
5686 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5687 id.blocks_to_copy = blocks_to_copy;
5688
5689 id.copy_decl = copy_decl_no_change;
5690 id.transform_call_graph_edges
5691 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5692 id.transform_new_cfg = true;
5693 id.transform_return_to_modify = false;
5694 id.transform_parameter = false;
5695 id.transform_lang_insert_block = NULL;
5696
5697 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5698 (DECL_STRUCT_FUNCTION (old_decl));
5699 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5700 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5701 initialize_cfun (new_decl, old_decl,
5702 old_entry_block->count);
5703 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5704 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5705 = id.src_cfun->gimple_df->ipa_pta;
5706
5707 /* Copy the function's static chain. */
5708 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5709 if (p)
5710 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
5711 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
5712 &id);
5713
5714 /* If there's a tree_map, prepare for substitution. */
5715 if (tree_map)
5716 for (i = 0; i < tree_map->length (); i++)
5717 {
5718 gimple init;
5719 replace_info = (*tree_map)[i];
5720 if (replace_info->replace_p)
5721 {
5722 if (!replace_info->old_tree)
5723 {
5724 int i = replace_info->parm_num;
5725 tree parm;
5726 tree req_type;
5727
5728 for (parm = DECL_ARGUMENTS (old_decl); i; parm = DECL_CHAIN (parm))
5729 i --;
5730 replace_info->old_tree = parm;
5731 req_type = TREE_TYPE (parm);
5732 if (!useless_type_conversion_p (req_type, TREE_TYPE (replace_info->new_tree)))
5733 {
5734 if (fold_convertible_p (req_type, replace_info->new_tree))
5735 replace_info->new_tree = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
5736 else if (TYPE_SIZE (req_type) == TYPE_SIZE (TREE_TYPE (replace_info->new_tree)))
5737 replace_info->new_tree = fold_build1 (VIEW_CONVERT_EXPR, req_type, replace_info->new_tree);
5738 else
5739 {
5740 if (dump_file)
5741 {
5742 fprintf (dump_file, " const ");
5743 print_generic_expr (dump_file, replace_info->new_tree, 0);
5744 fprintf (dump_file, " can't be converted to param ");
5745 print_generic_expr (dump_file, parm, 0);
5746 fprintf (dump_file, "\n");
5747 }
5748 replace_info->old_tree = NULL;
5749 }
5750 }
5751 }
5752 else
5753 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5754 if (replace_info->old_tree)
5755 {
5756 init = setup_one_parameter (&id, replace_info->old_tree,
5757 replace_info->new_tree, id.src_fn,
5758 NULL,
5759 &vars);
5760 if (init)
5761 init_stmts.safe_push (init);
5762 }
5763 }
5764 }
5765 /* Copy the function's arguments. */
5766 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5767 DECL_ARGUMENTS (new_decl) =
5768 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5769 args_to_skip, &vars);
5770
5771 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5772 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5773
5774 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5775
5776 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5777 /* Add local vars. */
5778 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5779
5780 if (DECL_RESULT (old_decl) == NULL_TREE)
5781 ;
5782 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5783 {
5784 DECL_RESULT (new_decl)
5785 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5786 RESULT_DECL, NULL_TREE, void_type_node);
5787 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5788 cfun->returns_struct = 0;
5789 cfun->returns_pcc_struct = 0;
5790 }
5791 else
5792 {
5793 tree old_name;
5794 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5795 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5796 if (gimple_in_ssa_p (id.src_cfun)
5797 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5798 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5799 {
5800 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
5801 insert_decl_map (&id, old_name, new_name);
5802 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5803 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5804 }
5805 }
5806
5807 /* Set up the destination functions loop tree. */
5808 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5809 {
5810 cfun->curr_properties &= ~PROP_loops;
5811 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5812 cfun->curr_properties |= PROP_loops;
5813 }
5814
5815 /* Copy the Function's body. */
5816 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5817 ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5818 new_entry);
5819
5820 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5821 number_blocks (new_decl);
5822
5823 /* We want to create the BB unconditionally, so that the addition of
5824 debug stmts doesn't affect BB count, which may in the end cause
5825 codegen differences. */
5826 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5827 while (init_stmts.length ())
5828 insert_init_stmt (&id, bb, init_stmts.pop ());
5829 update_clone_info (&id);
5830
5831 /* Remap the nonlocal_goto_save_area, if any. */
5832 if (cfun->nonlocal_goto_save_area)
5833 {
5834 struct walk_stmt_info wi;
5835
5836 memset (&wi, 0, sizeof (wi));
5837 wi.info = &id;
5838 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5839 }
5840
5841 /* Clean up. */
5842 delete id.decl_map;
5843 if (id.debug_map)
5844 delete id.debug_map;
5845 free_dominance_info (CDI_DOMINATORS);
5846 free_dominance_info (CDI_POST_DOMINATORS);
5847
5848 fold_marked_statements (0, id.statements_to_fold);
5849 delete id.statements_to_fold;
5850 fold_cond_expr_cond ();
5851 delete_unreachable_blocks_update_callgraph (&id);
5852 if (id.dst_node->definition)
5853 cgraph_edge::rebuild_references ();
5854 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
5855 {
5856 calculate_dominance_info (CDI_DOMINATORS);
5857 fix_loop_structure (NULL);
5858 }
5859 update_ssa (TODO_update_ssa);
5860
5861 /* After partial cloning we need to rescale frequencies, so they are
5862 within proper range in the cloned function. */
5863 if (new_entry)
5864 {
5865 struct cgraph_edge *e;
5866 rebuild_frequencies ();
5867
5868 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5869 for (e = new_version_node->callees; e; e = e->next_callee)
5870 {
5871 basic_block bb = gimple_bb (e->call_stmt);
5872 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5873 bb);
5874 e->count = bb->count;
5875 }
5876 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5877 {
5878 basic_block bb = gimple_bb (e->call_stmt);
5879 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5880 bb);
5881 e->count = bb->count;
5882 }
5883 }
5884
5885 free_dominance_info (CDI_DOMINATORS);
5886 free_dominance_info (CDI_POST_DOMINATORS);
5887
5888 gcc_assert (!id.debug_stmts.exists ());
5889 pop_cfun ();
5890 return;
5891 }
5892
5893 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5894 the callee and return the inlined body on success. */
5895
5896 tree
5897 maybe_inline_call_in_expr (tree exp)
5898 {
5899 tree fn = get_callee_fndecl (exp);
5900
5901 /* We can only try to inline "const" functions. */
5902 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5903 {
5904 call_expr_arg_iterator iter;
5905 copy_body_data id;
5906 tree param, arg, t;
5907 hash_map<tree, tree> decl_map;
5908
5909 /* Remap the parameters. */
5910 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5911 param;
5912 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
5913 decl_map.put (param, arg);
5914
5915 memset (&id, 0, sizeof (id));
5916 id.src_fn = fn;
5917 id.dst_fn = current_function_decl;
5918 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5919 id.decl_map = &decl_map;
5920
5921 id.copy_decl = copy_decl_no_change;
5922 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5923 id.transform_new_cfg = false;
5924 id.transform_return_to_modify = true;
5925 id.transform_parameter = true;
5926 id.transform_lang_insert_block = NULL;
5927
5928 /* Make sure not to unshare trees behind the front-end's back
5929 since front-end specific mechanisms may rely on sharing. */
5930 id.regimplify = false;
5931 id.do_not_unshare = true;
5932
5933 /* We're not inside any EH region. */
5934 id.eh_lp_nr = 0;
5935
5936 t = copy_tree_body (&id);
5937
5938 /* We can only return something suitable for use in a GENERIC
5939 expression tree. */
5940 if (TREE_CODE (t) == MODIFY_EXPR)
5941 return TREE_OPERAND (t, 1);
5942 }
5943
5944 return NULL_TREE;
5945 }
5946
5947 /* Duplicate a type, fields and all. */
5948
5949 tree
5950 build_duplicate_type (tree type)
5951 {
5952 struct copy_body_data id;
5953
5954 memset (&id, 0, sizeof (id));
5955 id.src_fn = current_function_decl;
5956 id.dst_fn = current_function_decl;
5957 id.src_cfun = cfun;
5958 id.decl_map = new hash_map<tree, tree>;
5959 id.debug_map = NULL;
5960 id.copy_decl = copy_decl_no_change;
5961
5962 type = remap_type_1 (type, &id);
5963
5964 delete id.decl_map;
5965 if (id.debug_map)
5966 delete id.debug_map;
5967
5968 TYPE_CANONICAL (type) = type;
5969
5970 return type;
5971 }
5972
5973 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
5974 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
5975 evaluation. */
5976
5977 tree
5978 copy_fn (tree fn, tree& parms, tree& result)
5979 {
5980 copy_body_data id;
5981 tree param;
5982 hash_map<tree, tree> decl_map;
5983
5984 tree *p = &parms;
5985 *p = NULL_TREE;
5986
5987 memset (&id, 0, sizeof (id));
5988 id.src_fn = fn;
5989 id.dst_fn = current_function_decl;
5990 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5991 id.decl_map = &decl_map;
5992
5993 id.copy_decl = copy_decl_no_change;
5994 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5995 id.transform_new_cfg = false;
5996 id.transform_return_to_modify = false;
5997 id.transform_parameter = true;
5998 id.transform_lang_insert_block = NULL;
5999
6000 /* Make sure not to unshare trees behind the front-end's back
6001 since front-end specific mechanisms may rely on sharing. */
6002 id.regimplify = false;
6003 id.do_not_unshare = true;
6004
6005 /* We're not inside any EH region. */
6006 id.eh_lp_nr = 0;
6007
6008 /* Remap the parameters and result and return them to the caller. */
6009 for (param = DECL_ARGUMENTS (fn);
6010 param;
6011 param = DECL_CHAIN (param))
6012 {
6013 *p = remap_decl (param, &id);
6014 p = &DECL_CHAIN (*p);
6015 }
6016
6017 if (DECL_RESULT (fn))
6018 result = remap_decl (DECL_RESULT (fn), &id);
6019 else
6020 result = NULL_TREE;
6021
6022 return copy_tree_body (&id);
6023 }