]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-inline.c
[debug] Handle debug references to skipped params
[thirdparty/gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2018 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "sreal.h"
63
64 /* I'm not real happy about this, but we need to handle gimple and
65 non-gimple trees. */
66
67 /* Inlining, Cloning, Versioning, Parallelization
68
69 Inlining: a function body is duplicated, but the PARM_DECLs are
70 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
71 MODIFY_EXPRs that store to a dedicated returned-value variable.
72 The duplicated eh_region info of the copy will later be appended
73 to the info for the caller; the eh_region info in copied throwing
74 statements and RESX statements are adjusted accordingly.
75
76 Cloning: (only in C++) We have one body for a con/de/structor, and
77 multiple function decls, each with a unique parameter list.
78 Duplicate the body, using the given splay tree; some parameters
79 will become constants (like 0 or 1).
80
81 Versioning: a function body is duplicated and the result is a new
82 function rather than into blocks of an existing function as with
83 inlining. Some parameters will become constants.
84
85 Parallelization: a region of a function is duplicated resulting in
86 a new function. Variables may be replaced with complex expressions
87 to enable shared variable semantics.
88
89 All of these will simultaneously lookup any callgraph edges. If
90 we're going to inline the duplicated function body, and the given
91 function has some cloned callgraph nodes (one for each place this
92 function will be inlined) those callgraph edges will be duplicated.
93 If we're cloning the body, those callgraph edges will be
94 updated to point into the new body. (Note that the original
95 callgraph node and edge list will not be altered.)
96
97 See the CALL_EXPR handling case in copy_tree_body_r (). */
98
99 /* To Do:
100
101 o In order to make inlining-on-trees work, we pessimized
102 function-local static constants. In particular, they are now
103 always output, even when not addressed. Fix this by treating
104 function-local static constants just like global static
105 constants; the back-end already knows not to output them if they
106 are not needed.
107
108 o Provide heuristics to clamp inlining of recursive template
109 calls? */
110
111
112 /* Weights that estimate_num_insns uses to estimate the size of the
113 produced code. */
114
115 eni_weights eni_size_weights;
116
117 /* Weights that estimate_num_insns uses to estimate the time necessary
118 to execute the produced code. */
119
120 eni_weights eni_time_weights;
121
122 /* Prototypes. */
123
124 static tree declare_return_variable (copy_body_data *, tree, tree,
125 basic_block);
126 static void remap_block (tree *, copy_body_data *);
127 static void copy_bind_expr (tree *, int *, copy_body_data *);
128 static void declare_inline_vars (tree, tree);
129 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
130 static void prepend_lexical_block (tree current_block, tree new_block);
131 static tree copy_decl_to_var (tree, copy_body_data *);
132 static tree copy_result_decl_to_var (tree, copy_body_data *);
133 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
134 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
135 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
136 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
137
138 /* Insert a tree->tree mapping for ID. Despite the name suggests
139 that the trees should be variables, it is used for more than that. */
140
141 void
142 insert_decl_map (copy_body_data *id, tree key, tree value)
143 {
144 id->decl_map->put (key, value);
145
146 /* Always insert an identity map as well. If we see this same new
147 node again, we won't want to duplicate it a second time. */
148 if (key != value)
149 id->decl_map->put (value, value);
150 }
151
152 /* Insert a tree->tree mapping for ID. This is only used for
153 variables. */
154
155 static void
156 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
157 {
158 if (!gimple_in_ssa_p (id->src_cfun))
159 return;
160
161 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
162 return;
163
164 if (!target_for_debug_bind (key))
165 return;
166
167 gcc_assert (TREE_CODE (key) == PARM_DECL);
168 gcc_assert (VAR_P (value));
169
170 if (!id->debug_map)
171 id->debug_map = new hash_map<tree, tree>;
172
173 id->debug_map->put (key, value);
174 }
175
176 /* If nonzero, we're remapping the contents of inlined debug
177 statements. If negative, an error has occurred, such as a
178 reference to a variable that isn't available in the inlined
179 context. */
180 static int processing_debug_stmt = 0;
181
182 /* Construct new SSA name for old NAME. ID is the inline context. */
183
184 static tree
185 remap_ssa_name (tree name, copy_body_data *id)
186 {
187 tree new_tree, var;
188 tree *n;
189
190 gcc_assert (TREE_CODE (name) == SSA_NAME);
191
192 n = id->decl_map->get (name);
193 if (n)
194 return unshare_expr (*n);
195
196 if (processing_debug_stmt)
197 {
198 if (SSA_NAME_IS_DEFAULT_DEF (name)
199 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
200 && id->entry_bb == NULL
201 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
202 {
203 tree vexpr = make_node (DEBUG_EXPR_DECL);
204 gimple *def_temp;
205 gimple_stmt_iterator gsi;
206 tree val = SSA_NAME_VAR (name);
207
208 n = id->decl_map->get (val);
209 if (n != NULL)
210 val = *n;
211 if (TREE_CODE (val) != PARM_DECL
212 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
213 {
214 processing_debug_stmt = -1;
215 return name;
216 }
217 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
218 DECL_ARTIFICIAL (vexpr) = 1;
219 TREE_TYPE (vexpr) = TREE_TYPE (name);
220 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
221 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
222 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
223 return vexpr;
224 }
225
226 processing_debug_stmt = -1;
227 return name;
228 }
229
230 /* Remap anonymous SSA names or SSA names of anonymous decls. */
231 var = SSA_NAME_VAR (name);
232 if (!var
233 || (!SSA_NAME_IS_DEFAULT_DEF (name)
234 && VAR_P (var)
235 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
236 && DECL_ARTIFICIAL (var)
237 && DECL_IGNORED_P (var)
238 && !DECL_NAME (var)))
239 {
240 struct ptr_info_def *pi;
241 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
242 if (!var && SSA_NAME_IDENTIFIER (name))
243 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
244 insert_decl_map (id, name, new_tree);
245 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
246 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
247 /* At least IPA points-to info can be directly transferred. */
248 if (id->src_cfun->gimple_df
249 && id->src_cfun->gimple_df->ipa_pta
250 && POINTER_TYPE_P (TREE_TYPE (name))
251 && (pi = SSA_NAME_PTR_INFO (name))
252 && !pi->pt.anything)
253 {
254 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
255 new_pi->pt = pi->pt;
256 }
257 return new_tree;
258 }
259
260 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
261 in copy_bb. */
262 new_tree = remap_decl (var, id);
263
264 /* We might've substituted constant or another SSA_NAME for
265 the variable.
266
267 Replace the SSA name representing RESULT_DECL by variable during
268 inlining: this saves us from need to introduce PHI node in a case
269 return value is just partly initialized. */
270 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
271 && (!SSA_NAME_VAR (name)
272 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
273 || !id->transform_return_to_modify))
274 {
275 struct ptr_info_def *pi;
276 new_tree = make_ssa_name (new_tree);
277 insert_decl_map (id, name, new_tree);
278 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
279 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
280 /* At least IPA points-to info can be directly transferred. */
281 if (id->src_cfun->gimple_df
282 && id->src_cfun->gimple_df->ipa_pta
283 && POINTER_TYPE_P (TREE_TYPE (name))
284 && (pi = SSA_NAME_PTR_INFO (name))
285 && !pi->pt.anything)
286 {
287 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
288 new_pi->pt = pi->pt;
289 }
290 if (SSA_NAME_IS_DEFAULT_DEF (name))
291 {
292 /* By inlining function having uninitialized variable, we might
293 extend the lifetime (variable might get reused). This cause
294 ICE in the case we end up extending lifetime of SSA name across
295 abnormal edge, but also increase register pressure.
296
297 We simply initialize all uninitialized vars by 0 except
298 for case we are inlining to very first BB. We can avoid
299 this for all BBs that are not inside strongly connected
300 regions of the CFG, but this is expensive to test. */
301 if (id->entry_bb
302 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
303 && (!SSA_NAME_VAR (name)
304 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
305 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
306 0)->dest
307 || EDGE_COUNT (id->entry_bb->preds) != 1))
308 {
309 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
310 gimple *init_stmt;
311 tree zero = build_zero_cst (TREE_TYPE (new_tree));
312
313 init_stmt = gimple_build_assign (new_tree, zero);
314 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
315 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
316 }
317 else
318 {
319 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
320 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
321 }
322 }
323 }
324 else
325 insert_decl_map (id, name, new_tree);
326 return new_tree;
327 }
328
329 /* Remap DECL during the copying of the BLOCK tree for the function. */
330
331 tree
332 remap_decl (tree decl, copy_body_data *id)
333 {
334 tree *n;
335
336 /* We only remap local variables in the current function. */
337
338 /* See if we have remapped this declaration. */
339
340 n = id->decl_map->get (decl);
341
342 if (!n && processing_debug_stmt)
343 {
344 processing_debug_stmt = -1;
345 return decl;
346 }
347
348 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
349 necessary DECLs have already been remapped and we do not want to duplicate
350 a decl coming from outside of the sequence we are copying. */
351 if (!n
352 && id->prevent_decl_creation_for_types
353 && id->remapping_type_depth > 0
354 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
355 return decl;
356
357 /* If we didn't already have an equivalent for this declaration, create one
358 now. */
359 if (!n)
360 {
361 /* Make a copy of the variable or label. */
362 tree t = id->copy_decl (decl, id);
363
364 /* Remember it, so that if we encounter this local entity again
365 we can reuse this copy. Do this early because remap_type may
366 need this decl for TYPE_STUB_DECL. */
367 insert_decl_map (id, decl, t);
368
369 if (!DECL_P (t))
370 return t;
371
372 /* Remap types, if necessary. */
373 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
374 if (TREE_CODE (t) == TYPE_DECL)
375 {
376 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
377
378 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
379 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
380 is not set on the TYPE_DECL, for example in LTO mode. */
381 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
382 {
383 tree x = build_variant_type_copy (TREE_TYPE (t));
384 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
385 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
386 DECL_ORIGINAL_TYPE (t) = x;
387 }
388 }
389
390 /* Remap sizes as necessary. */
391 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
392 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
393
394 /* If fields, do likewise for offset and qualifier. */
395 if (TREE_CODE (t) == FIELD_DECL)
396 {
397 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
398 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
399 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
400 }
401
402 return t;
403 }
404
405 if (id->do_not_unshare)
406 return *n;
407 else
408 return unshare_expr (*n);
409 }
410
411 static tree
412 remap_type_1 (tree type, copy_body_data *id)
413 {
414 tree new_tree, t;
415
416 /* We do need a copy. build and register it now. If this is a pointer or
417 reference type, remap the designated type and make a new pointer or
418 reference type. */
419 if (TREE_CODE (type) == POINTER_TYPE)
420 {
421 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
422 TYPE_MODE (type),
423 TYPE_REF_CAN_ALIAS_ALL (type));
424 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
425 new_tree = build_type_attribute_qual_variant (new_tree,
426 TYPE_ATTRIBUTES (type),
427 TYPE_QUALS (type));
428 insert_decl_map (id, type, new_tree);
429 return new_tree;
430 }
431 else if (TREE_CODE (type) == REFERENCE_TYPE)
432 {
433 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
434 TYPE_MODE (type),
435 TYPE_REF_CAN_ALIAS_ALL (type));
436 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
437 new_tree = build_type_attribute_qual_variant (new_tree,
438 TYPE_ATTRIBUTES (type),
439 TYPE_QUALS (type));
440 insert_decl_map (id, type, new_tree);
441 return new_tree;
442 }
443 else
444 new_tree = copy_node (type);
445
446 insert_decl_map (id, type, new_tree);
447
448 /* This is a new type, not a copy of an old type. Need to reassociate
449 variants. We can handle everything except the main variant lazily. */
450 t = TYPE_MAIN_VARIANT (type);
451 if (type != t)
452 {
453 t = remap_type (t, id);
454 TYPE_MAIN_VARIANT (new_tree) = t;
455 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
456 TYPE_NEXT_VARIANT (t) = new_tree;
457 }
458 else
459 {
460 TYPE_MAIN_VARIANT (new_tree) = new_tree;
461 TYPE_NEXT_VARIANT (new_tree) = NULL;
462 }
463
464 if (TYPE_STUB_DECL (type))
465 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
466
467 /* Lazily create pointer and reference types. */
468 TYPE_POINTER_TO (new_tree) = NULL;
469 TYPE_REFERENCE_TO (new_tree) = NULL;
470
471 /* Copy all types that may contain references to local variables; be sure to
472 preserve sharing in between type and its main variant when possible. */
473 switch (TREE_CODE (new_tree))
474 {
475 case INTEGER_TYPE:
476 case REAL_TYPE:
477 case FIXED_POINT_TYPE:
478 case ENUMERAL_TYPE:
479 case BOOLEAN_TYPE:
480 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
481 {
482 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
483 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
484
485 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
486 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
487 }
488 else
489 {
490 t = TYPE_MIN_VALUE (new_tree);
491 if (t && TREE_CODE (t) != INTEGER_CST)
492 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
493
494 t = TYPE_MAX_VALUE (new_tree);
495 if (t && TREE_CODE (t) != INTEGER_CST)
496 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
497 }
498 return new_tree;
499
500 case FUNCTION_TYPE:
501 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
502 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
503 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
504 else
505 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
506 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
507 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
508 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
509 else
510 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
511 return new_tree;
512
513 case ARRAY_TYPE:
514 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
515 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
516 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
517 else
518 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
519
520 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
521 {
522 gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
523 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
524 }
525 else
526 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
527 break;
528
529 case RECORD_TYPE:
530 case UNION_TYPE:
531 case QUAL_UNION_TYPE:
532 if (TYPE_MAIN_VARIANT (type) != type
533 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
534 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
535 else
536 {
537 tree f, nf = NULL;
538
539 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
540 {
541 t = remap_decl (f, id);
542 DECL_CONTEXT (t) = new_tree;
543 DECL_CHAIN (t) = nf;
544 nf = t;
545 }
546 TYPE_FIELDS (new_tree) = nreverse (nf);
547 }
548 break;
549
550 case OFFSET_TYPE:
551 default:
552 /* Shouldn't have been thought variable sized. */
553 gcc_unreachable ();
554 }
555
556 /* All variants of type share the same size, so use the already remaped data. */
557 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
558 {
559 tree s = TYPE_SIZE (type);
560 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
561 tree su = TYPE_SIZE_UNIT (type);
562 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
563 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
564 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
565 || s == mvs);
566 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
567 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
568 || su == mvsu);
569 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
570 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
571 }
572 else
573 {
574 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
575 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
576 }
577
578 return new_tree;
579 }
580
581 tree
582 remap_type (tree type, copy_body_data *id)
583 {
584 tree *node;
585 tree tmp;
586
587 if (type == NULL)
588 return type;
589
590 /* See if we have remapped this type. */
591 node = id->decl_map->get (type);
592 if (node)
593 return *node;
594
595 /* The type only needs remapping if it's variably modified. */
596 if (! variably_modified_type_p (type, id->src_fn))
597 {
598 insert_decl_map (id, type, type);
599 return type;
600 }
601
602 id->remapping_type_depth++;
603 tmp = remap_type_1 (type, id);
604 id->remapping_type_depth--;
605
606 return tmp;
607 }
608
609 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
610
611 static bool
612 can_be_nonlocal (tree decl, copy_body_data *id)
613 {
614 /* We can not duplicate function decls. */
615 if (TREE_CODE (decl) == FUNCTION_DECL)
616 return true;
617
618 /* Local static vars must be non-local or we get multiple declaration
619 problems. */
620 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
621 return true;
622
623 return false;
624 }
625
626 static tree
627 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
628 copy_body_data *id)
629 {
630 tree old_var;
631 tree new_decls = NULL_TREE;
632
633 /* Remap its variables. */
634 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
635 {
636 tree new_var;
637
638 if (can_be_nonlocal (old_var, id))
639 {
640 /* We need to add this variable to the local decls as otherwise
641 nothing else will do so. */
642 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
643 add_local_decl (cfun, old_var);
644 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
645 && !DECL_IGNORED_P (old_var)
646 && nonlocalized_list)
647 vec_safe_push (*nonlocalized_list, old_var);
648 continue;
649 }
650
651 /* Remap the variable. */
652 new_var = remap_decl (old_var, id);
653
654 /* If we didn't remap this variable, we can't mess with its
655 TREE_CHAIN. If we remapped this variable to the return slot, it's
656 already declared somewhere else, so don't declare it here. */
657
658 if (new_var == id->retvar)
659 ;
660 else if (!new_var)
661 {
662 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
663 && !DECL_IGNORED_P (old_var)
664 && nonlocalized_list)
665 vec_safe_push (*nonlocalized_list, old_var);
666 }
667 else
668 {
669 gcc_assert (DECL_P (new_var));
670 DECL_CHAIN (new_var) = new_decls;
671 new_decls = new_var;
672
673 /* Also copy value-expressions. */
674 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
675 {
676 tree tem = DECL_VALUE_EXPR (new_var);
677 bool old_regimplify = id->regimplify;
678 id->remapping_type_depth++;
679 walk_tree (&tem, copy_tree_body_r, id, NULL);
680 id->remapping_type_depth--;
681 id->regimplify = old_regimplify;
682 SET_DECL_VALUE_EXPR (new_var, tem);
683 }
684 }
685 }
686
687 return nreverse (new_decls);
688 }
689
690 /* Copy the BLOCK to contain remapped versions of the variables
691 therein. And hook the new block into the block-tree. */
692
693 static void
694 remap_block (tree *block, copy_body_data *id)
695 {
696 tree old_block;
697 tree new_block;
698
699 /* Make the new block. */
700 old_block = *block;
701 new_block = make_node (BLOCK);
702 TREE_USED (new_block) = TREE_USED (old_block);
703 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
704 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
705 BLOCK_NONLOCALIZED_VARS (new_block)
706 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
707 *block = new_block;
708
709 /* Remap its variables. */
710 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
711 &BLOCK_NONLOCALIZED_VARS (new_block),
712 id);
713
714 if (id->transform_lang_insert_block)
715 id->transform_lang_insert_block (new_block);
716
717 /* Remember the remapped block. */
718 insert_decl_map (id, old_block, new_block);
719 }
720
721 /* Copy the whole block tree and root it in id->block. */
722
723 static tree
724 remap_blocks (tree block, copy_body_data *id)
725 {
726 tree t;
727 tree new_tree = block;
728
729 if (!block)
730 return NULL;
731
732 remap_block (&new_tree, id);
733 gcc_assert (new_tree != block);
734 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
735 prepend_lexical_block (new_tree, remap_blocks (t, id));
736 /* Blocks are in arbitrary order, but make things slightly prettier and do
737 not swap order when producing a copy. */
738 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
739 return new_tree;
740 }
741
742 /* Remap the block tree rooted at BLOCK to nothing. */
743
744 static void
745 remap_blocks_to_null (tree block, copy_body_data *id)
746 {
747 tree t;
748 insert_decl_map (id, block, NULL_TREE);
749 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
750 remap_blocks_to_null (t, id);
751 }
752
753 /* Remap the location info pointed to by LOCUS. */
754
755 static location_t
756 remap_location (location_t locus, copy_body_data *id)
757 {
758 if (LOCATION_BLOCK (locus))
759 {
760 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
761 gcc_assert (n);
762 if (*n)
763 return set_block (locus, *n);
764 }
765
766 locus = LOCATION_LOCUS (locus);
767
768 if (locus != UNKNOWN_LOCATION && id->block)
769 return set_block (locus, id->block);
770
771 return locus;
772 }
773
774 static void
775 copy_statement_list (tree *tp)
776 {
777 tree_stmt_iterator oi, ni;
778 tree new_tree;
779
780 new_tree = alloc_stmt_list ();
781 ni = tsi_start (new_tree);
782 oi = tsi_start (*tp);
783 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
784 *tp = new_tree;
785
786 for (; !tsi_end_p (oi); tsi_next (&oi))
787 {
788 tree stmt = tsi_stmt (oi);
789 if (TREE_CODE (stmt) == STATEMENT_LIST)
790 /* This copy is not redundant; tsi_link_after will smash this
791 STATEMENT_LIST into the end of the one we're building, and we
792 don't want to do that with the original. */
793 copy_statement_list (&stmt);
794 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
795 }
796 }
797
798 static void
799 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
800 {
801 tree block = BIND_EXPR_BLOCK (*tp);
802 /* Copy (and replace) the statement. */
803 copy_tree_r (tp, walk_subtrees, NULL);
804 if (block)
805 {
806 remap_block (&block, id);
807 BIND_EXPR_BLOCK (*tp) = block;
808 }
809
810 if (BIND_EXPR_VARS (*tp))
811 /* This will remap a lot of the same decls again, but this should be
812 harmless. */
813 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
814 }
815
816
817 /* Create a new gimple_seq by remapping all the statements in BODY
818 using the inlining information in ID. */
819
820 static gimple_seq
821 remap_gimple_seq (gimple_seq body, copy_body_data *id)
822 {
823 gimple_stmt_iterator si;
824 gimple_seq new_body = NULL;
825
826 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
827 {
828 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
829 gimple_seq_add_seq (&new_body, new_stmts);
830 }
831
832 return new_body;
833 }
834
835
836 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
837 block using the mapping information in ID. */
838
839 static gimple *
840 copy_gimple_bind (gbind *stmt, copy_body_data *id)
841 {
842 gimple *new_bind;
843 tree new_block, new_vars;
844 gimple_seq body, new_body;
845
846 /* Copy the statement. Note that we purposely don't use copy_stmt
847 here because we need to remap statements as we copy. */
848 body = gimple_bind_body (stmt);
849 new_body = remap_gimple_seq (body, id);
850
851 new_block = gimple_bind_block (stmt);
852 if (new_block)
853 remap_block (&new_block, id);
854
855 /* This will remap a lot of the same decls again, but this should be
856 harmless. */
857 new_vars = gimple_bind_vars (stmt);
858 if (new_vars)
859 new_vars = remap_decls (new_vars, NULL, id);
860
861 new_bind = gimple_build_bind (new_vars, new_body, new_block);
862
863 return new_bind;
864 }
865
866 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
867
868 static bool
869 is_parm (tree decl)
870 {
871 if (TREE_CODE (decl) == SSA_NAME)
872 {
873 decl = SSA_NAME_VAR (decl);
874 if (!decl)
875 return false;
876 }
877
878 return (TREE_CODE (decl) == PARM_DECL);
879 }
880
881 /* Remap the dependence CLIQUE from the source to the destination function
882 as specified in ID. */
883
884 static unsigned short
885 remap_dependence_clique (copy_body_data *id, unsigned short clique)
886 {
887 if (clique == 0 || processing_debug_stmt)
888 return 0;
889 if (!id->dependence_map)
890 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
891 bool existed;
892 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
893 if (!existed)
894 newc = ++cfun->last_clique;
895 return newc;
896 }
897
898 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
899 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
900 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
901 recursing into the children nodes of *TP. */
902
903 static tree
904 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
905 {
906 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
907 copy_body_data *id = (copy_body_data *) wi_p->info;
908 tree fn = id->src_fn;
909
910 /* For recursive invocations this is no longer the LHS itself. */
911 bool is_lhs = wi_p->is_lhs;
912 wi_p->is_lhs = false;
913
914 if (TREE_CODE (*tp) == SSA_NAME)
915 {
916 *tp = remap_ssa_name (*tp, id);
917 *walk_subtrees = 0;
918 if (is_lhs)
919 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
920 return NULL;
921 }
922 else if (auto_var_in_fn_p (*tp, fn))
923 {
924 /* Local variables and labels need to be replaced by equivalent
925 variables. We don't want to copy static variables; there's
926 only one of those, no matter how many times we inline the
927 containing function. Similarly for globals from an outer
928 function. */
929 tree new_decl;
930
931 /* Remap the declaration. */
932 new_decl = remap_decl (*tp, id);
933 gcc_assert (new_decl);
934 /* Replace this variable with the copy. */
935 STRIP_TYPE_NOPS (new_decl);
936 /* ??? The C++ frontend uses void * pointer zero to initialize
937 any other type. This confuses the middle-end type verification.
938 As cloned bodies do not go through gimplification again the fixup
939 there doesn't trigger. */
940 if (TREE_CODE (new_decl) == INTEGER_CST
941 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
942 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
943 *tp = new_decl;
944 *walk_subtrees = 0;
945 }
946 else if (TREE_CODE (*tp) == STATEMENT_LIST)
947 gcc_unreachable ();
948 else if (TREE_CODE (*tp) == SAVE_EXPR)
949 gcc_unreachable ();
950 else if (TREE_CODE (*tp) == LABEL_DECL
951 && (!DECL_CONTEXT (*tp)
952 || decl_function_context (*tp) == id->src_fn))
953 /* These may need to be remapped for EH handling. */
954 *tp = remap_decl (*tp, id);
955 else if (TREE_CODE (*tp) == FIELD_DECL)
956 {
957 /* If the enclosing record type is variably_modified_type_p, the field
958 has already been remapped. Otherwise, it need not be. */
959 tree *n = id->decl_map->get (*tp);
960 if (n)
961 *tp = *n;
962 *walk_subtrees = 0;
963 }
964 else if (TYPE_P (*tp))
965 /* Types may need remapping as well. */
966 *tp = remap_type (*tp, id);
967 else if (CONSTANT_CLASS_P (*tp))
968 {
969 /* If this is a constant, we have to copy the node iff the type
970 will be remapped. copy_tree_r will not copy a constant. */
971 tree new_type = remap_type (TREE_TYPE (*tp), id);
972
973 if (new_type == TREE_TYPE (*tp))
974 *walk_subtrees = 0;
975
976 else if (TREE_CODE (*tp) == INTEGER_CST)
977 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
978 else
979 {
980 *tp = copy_node (*tp);
981 TREE_TYPE (*tp) = new_type;
982 }
983 }
984 else
985 {
986 /* Otherwise, just copy the node. Note that copy_tree_r already
987 knows not to copy VAR_DECLs, etc., so this is safe. */
988
989 if (TREE_CODE (*tp) == MEM_REF)
990 {
991 /* We need to re-canonicalize MEM_REFs from inline substitutions
992 that can happen when a pointer argument is an ADDR_EXPR.
993 Recurse here manually to allow that. */
994 tree ptr = TREE_OPERAND (*tp, 0);
995 tree type = remap_type (TREE_TYPE (*tp), id);
996 tree old = *tp;
997 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
998 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
999 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1000 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1001 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1002 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1003 {
1004 MR_DEPENDENCE_CLIQUE (*tp)
1005 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1006 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1007 }
1008 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1009 remapped a parameter as the property might be valid only
1010 for the parameter itself. */
1011 if (TREE_THIS_NOTRAP (old)
1012 && (!is_parm (TREE_OPERAND (old, 0))
1013 || (!id->transform_parameter && is_parm (ptr))))
1014 TREE_THIS_NOTRAP (*tp) = 1;
1015 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1016 *walk_subtrees = 0;
1017 return NULL;
1018 }
1019
1020 /* Here is the "usual case". Copy this tree node, and then
1021 tweak some special cases. */
1022 copy_tree_r (tp, walk_subtrees, NULL);
1023
1024 if (TREE_CODE (*tp) != OMP_CLAUSE)
1025 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1026
1027 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1028 {
1029 /* The copied TARGET_EXPR has never been expanded, even if the
1030 original node was expanded already. */
1031 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1032 TREE_OPERAND (*tp, 3) = NULL_TREE;
1033 }
1034 else if (TREE_CODE (*tp) == ADDR_EXPR)
1035 {
1036 /* Variable substitution need not be simple. In particular,
1037 the MEM_REF substitution above. Make sure that
1038 TREE_CONSTANT and friends are up-to-date. */
1039 int invariant = is_gimple_min_invariant (*tp);
1040 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1041 recompute_tree_invariant_for_addr_expr (*tp);
1042
1043 /* If this used to be invariant, but is not any longer,
1044 then regimplification is probably needed. */
1045 if (invariant && !is_gimple_min_invariant (*tp))
1046 id->regimplify = true;
1047
1048 *walk_subtrees = 0;
1049 }
1050 }
1051
1052 /* Update the TREE_BLOCK for the cloned expr. */
1053 if (EXPR_P (*tp))
1054 {
1055 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1056 tree old_block = TREE_BLOCK (*tp);
1057 if (old_block)
1058 {
1059 tree *n;
1060 n = id->decl_map->get (TREE_BLOCK (*tp));
1061 if (n)
1062 new_block = *n;
1063 }
1064 TREE_SET_BLOCK (*tp, new_block);
1065 }
1066
1067 /* Keep iterating. */
1068 return NULL_TREE;
1069 }
1070
1071
1072 /* Called from copy_body_id via walk_tree. DATA is really a
1073 `copy_body_data *'. */
1074
1075 tree
1076 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1077 {
1078 copy_body_data *id = (copy_body_data *) data;
1079 tree fn = id->src_fn;
1080 tree new_block;
1081
1082 /* Begin by recognizing trees that we'll completely rewrite for the
1083 inlining context. Our output for these trees is completely
1084 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1085 into an edge). Further down, we'll handle trees that get
1086 duplicated and/or tweaked. */
1087
1088 /* When requested, RETURN_EXPRs should be transformed to just the
1089 contained MODIFY_EXPR. The branch semantics of the return will
1090 be handled elsewhere by manipulating the CFG rather than a statement. */
1091 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1092 {
1093 tree assignment = TREE_OPERAND (*tp, 0);
1094
1095 /* If we're returning something, just turn that into an
1096 assignment into the equivalent of the original RESULT_DECL.
1097 If the "assignment" is just the result decl, the result
1098 decl has already been set (e.g. a recent "foo (&result_decl,
1099 ...)"); just toss the entire RETURN_EXPR. */
1100 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1101 {
1102 /* Replace the RETURN_EXPR with (a copy of) the
1103 MODIFY_EXPR hanging underneath. */
1104 *tp = copy_node (assignment);
1105 }
1106 else /* Else the RETURN_EXPR returns no value. */
1107 {
1108 *tp = NULL;
1109 return (tree) (void *)1;
1110 }
1111 }
1112 else if (TREE_CODE (*tp) == SSA_NAME)
1113 {
1114 *tp = remap_ssa_name (*tp, id);
1115 *walk_subtrees = 0;
1116 return NULL;
1117 }
1118
1119 /* Local variables and labels need to be replaced by equivalent
1120 variables. We don't want to copy static variables; there's only
1121 one of those, no matter how many times we inline the containing
1122 function. Similarly for globals from an outer function. */
1123 else if (auto_var_in_fn_p (*tp, fn))
1124 {
1125 tree new_decl;
1126
1127 /* Remap the declaration. */
1128 new_decl = remap_decl (*tp, id);
1129 gcc_assert (new_decl);
1130 /* Replace this variable with the copy. */
1131 STRIP_TYPE_NOPS (new_decl);
1132 *tp = new_decl;
1133 *walk_subtrees = 0;
1134 }
1135 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1136 copy_statement_list (tp);
1137 else if (TREE_CODE (*tp) == SAVE_EXPR
1138 || TREE_CODE (*tp) == TARGET_EXPR)
1139 remap_save_expr (tp, id->decl_map, walk_subtrees);
1140 else if (TREE_CODE (*tp) == LABEL_DECL
1141 && (! DECL_CONTEXT (*tp)
1142 || decl_function_context (*tp) == id->src_fn))
1143 /* These may need to be remapped for EH handling. */
1144 *tp = remap_decl (*tp, id);
1145 else if (TREE_CODE (*tp) == BIND_EXPR)
1146 copy_bind_expr (tp, walk_subtrees, id);
1147 /* Types may need remapping as well. */
1148 else if (TYPE_P (*tp))
1149 *tp = remap_type (*tp, id);
1150
1151 /* If this is a constant, we have to copy the node iff the type will be
1152 remapped. copy_tree_r will not copy a constant. */
1153 else if (CONSTANT_CLASS_P (*tp))
1154 {
1155 tree new_type = remap_type (TREE_TYPE (*tp), id);
1156
1157 if (new_type == TREE_TYPE (*tp))
1158 *walk_subtrees = 0;
1159
1160 else if (TREE_CODE (*tp) == INTEGER_CST)
1161 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1162 else
1163 {
1164 *tp = copy_node (*tp);
1165 TREE_TYPE (*tp) = new_type;
1166 }
1167 }
1168
1169 /* Otherwise, just copy the node. Note that copy_tree_r already
1170 knows not to copy VAR_DECLs, etc., so this is safe. */
1171 else
1172 {
1173 /* Here we handle trees that are not completely rewritten.
1174 First we detect some inlining-induced bogosities for
1175 discarding. */
1176 if (TREE_CODE (*tp) == MODIFY_EXPR
1177 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1178 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1179 {
1180 /* Some assignments VAR = VAR; don't generate any rtl code
1181 and thus don't count as variable modification. Avoid
1182 keeping bogosities like 0 = 0. */
1183 tree decl = TREE_OPERAND (*tp, 0), value;
1184 tree *n;
1185
1186 n = id->decl_map->get (decl);
1187 if (n)
1188 {
1189 value = *n;
1190 STRIP_TYPE_NOPS (value);
1191 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1192 {
1193 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1194 return copy_tree_body_r (tp, walk_subtrees, data);
1195 }
1196 }
1197 }
1198 else if (TREE_CODE (*tp) == INDIRECT_REF)
1199 {
1200 /* Get rid of *& from inline substitutions that can happen when a
1201 pointer argument is an ADDR_EXPR. */
1202 tree decl = TREE_OPERAND (*tp, 0);
1203 tree *n = id->decl_map->get (decl);
1204 if (n)
1205 {
1206 /* If we happen to get an ADDR_EXPR in n->value, strip
1207 it manually here as we'll eventually get ADDR_EXPRs
1208 which lie about their types pointed to. In this case
1209 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1210 but we absolutely rely on that. As fold_indirect_ref
1211 does other useful transformations, try that first, though. */
1212 tree type = TREE_TYPE (*tp);
1213 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1214 tree old = *tp;
1215 *tp = gimple_fold_indirect_ref (ptr);
1216 if (! *tp)
1217 {
1218 type = remap_type (type, id);
1219 if (TREE_CODE (ptr) == ADDR_EXPR)
1220 {
1221 *tp
1222 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1223 /* ??? We should either assert here or build
1224 a VIEW_CONVERT_EXPR instead of blindly leaking
1225 incompatible types to our IL. */
1226 if (! *tp)
1227 *tp = TREE_OPERAND (ptr, 0);
1228 }
1229 else
1230 {
1231 *tp = build1 (INDIRECT_REF, type, ptr);
1232 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1233 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1234 TREE_READONLY (*tp) = TREE_READONLY (old);
1235 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1236 have remapped a parameter as the property might be
1237 valid only for the parameter itself. */
1238 if (TREE_THIS_NOTRAP (old)
1239 && (!is_parm (TREE_OPERAND (old, 0))
1240 || (!id->transform_parameter && is_parm (ptr))))
1241 TREE_THIS_NOTRAP (*tp) = 1;
1242 }
1243 }
1244 *walk_subtrees = 0;
1245 return NULL;
1246 }
1247 }
1248 else if (TREE_CODE (*tp) == MEM_REF)
1249 {
1250 /* We need to re-canonicalize MEM_REFs from inline substitutions
1251 that can happen when a pointer argument is an ADDR_EXPR.
1252 Recurse here manually to allow that. */
1253 tree ptr = TREE_OPERAND (*tp, 0);
1254 tree type = remap_type (TREE_TYPE (*tp), id);
1255 tree old = *tp;
1256 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1257 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1258 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1259 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1260 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1261 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1262 {
1263 MR_DEPENDENCE_CLIQUE (*tp)
1264 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1265 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1266 }
1267 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1268 remapped a parameter as the property might be valid only
1269 for the parameter itself. */
1270 if (TREE_THIS_NOTRAP (old)
1271 && (!is_parm (TREE_OPERAND (old, 0))
1272 || (!id->transform_parameter && is_parm (ptr))))
1273 TREE_THIS_NOTRAP (*tp) = 1;
1274 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1275 *walk_subtrees = 0;
1276 return NULL;
1277 }
1278
1279 /* Here is the "usual case". Copy this tree node, and then
1280 tweak some special cases. */
1281 copy_tree_r (tp, walk_subtrees, NULL);
1282
1283 /* If EXPR has block defined, map it to newly constructed block.
1284 When inlining we want EXPRs without block appear in the block
1285 of function call if we are not remapping a type. */
1286 if (EXPR_P (*tp))
1287 {
1288 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1289 if (TREE_BLOCK (*tp))
1290 {
1291 tree *n;
1292 n = id->decl_map->get (TREE_BLOCK (*tp));
1293 if (n)
1294 new_block = *n;
1295 }
1296 TREE_SET_BLOCK (*tp, new_block);
1297 }
1298
1299 if (TREE_CODE (*tp) != OMP_CLAUSE)
1300 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1301
1302 /* The copied TARGET_EXPR has never been expanded, even if the
1303 original node was expanded already. */
1304 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1305 {
1306 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1307 TREE_OPERAND (*tp, 3) = NULL_TREE;
1308 }
1309
1310 /* Variable substitution need not be simple. In particular, the
1311 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1312 and friends are up-to-date. */
1313 else if (TREE_CODE (*tp) == ADDR_EXPR)
1314 {
1315 int invariant = is_gimple_min_invariant (*tp);
1316 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1317
1318 /* Handle the case where we substituted an INDIRECT_REF
1319 into the operand of the ADDR_EXPR. */
1320 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1321 {
1322 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1323 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1324 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1325 *tp = t;
1326 }
1327 else
1328 recompute_tree_invariant_for_addr_expr (*tp);
1329
1330 /* If this used to be invariant, but is not any longer,
1331 then regimplification is probably needed. */
1332 if (invariant && !is_gimple_min_invariant (*tp))
1333 id->regimplify = true;
1334
1335 *walk_subtrees = 0;
1336 }
1337 }
1338
1339 /* Keep iterating. */
1340 return NULL_TREE;
1341 }
1342
1343 /* Helper for remap_gimple_stmt. Given an EH region number for the
1344 source function, map that to the duplicate EH region number in
1345 the destination function. */
1346
1347 static int
1348 remap_eh_region_nr (int old_nr, copy_body_data *id)
1349 {
1350 eh_region old_r, new_r;
1351
1352 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1353 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1354
1355 return new_r->index;
1356 }
1357
1358 /* Similar, but operate on INTEGER_CSTs. */
1359
1360 static tree
1361 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1362 {
1363 int old_nr, new_nr;
1364
1365 old_nr = tree_to_shwi (old_t_nr);
1366 new_nr = remap_eh_region_nr (old_nr, id);
1367
1368 return build_int_cst (integer_type_node, new_nr);
1369 }
1370
1371 /* Helper for copy_bb. Remap statement STMT using the inlining
1372 information in ID. Return the new statement copy. */
1373
1374 static gimple_seq
1375 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1376 {
1377 gimple *copy = NULL;
1378 struct walk_stmt_info wi;
1379 bool skip_first = false;
1380 gimple_seq stmts = NULL;
1381
1382 if (is_gimple_debug (stmt)
1383 && (gimple_debug_nonbind_marker_p (stmt)
1384 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1385 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1386 return NULL;
1387
1388 /* Begin by recognizing trees that we'll completely rewrite for the
1389 inlining context. Our output for these trees is completely
1390 different from our input (e.g. RETURN_EXPR is deleted and morphs
1391 into an edge). Further down, we'll handle trees that get
1392 duplicated and/or tweaked. */
1393
1394 /* When requested, GIMPLE_RETURN should be transformed to just the
1395 contained GIMPLE_ASSIGN. The branch semantics of the return will
1396 be handled elsewhere by manipulating the CFG rather than the
1397 statement. */
1398 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1399 {
1400 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1401
1402 /* If we're returning something, just turn that into an
1403 assignment to the equivalent of the original RESULT_DECL.
1404 If RETVAL is just the result decl, the result decl has
1405 already been set (e.g. a recent "foo (&result_decl, ...)");
1406 just toss the entire GIMPLE_RETURN. */
1407 if (retval
1408 && (TREE_CODE (retval) != RESULT_DECL
1409 && (TREE_CODE (retval) != SSA_NAME
1410 || ! SSA_NAME_VAR (retval)
1411 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1412 {
1413 copy = gimple_build_assign (id->do_not_unshare
1414 ? id->retvar : unshare_expr (id->retvar),
1415 retval);
1416 /* id->retvar is already substituted. Skip it on later remapping. */
1417 skip_first = true;
1418 }
1419 else
1420 return NULL;
1421 }
1422 else if (gimple_has_substatements (stmt))
1423 {
1424 gimple_seq s1, s2;
1425
1426 /* When cloning bodies from the C++ front end, we will be handed bodies
1427 in High GIMPLE form. Handle here all the High GIMPLE statements that
1428 have embedded statements. */
1429 switch (gimple_code (stmt))
1430 {
1431 case GIMPLE_BIND:
1432 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1433 break;
1434
1435 case GIMPLE_CATCH:
1436 {
1437 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1438 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1439 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1440 }
1441 break;
1442
1443 case GIMPLE_EH_FILTER:
1444 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1445 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1446 break;
1447
1448 case GIMPLE_TRY:
1449 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1450 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1451 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1452 break;
1453
1454 case GIMPLE_WITH_CLEANUP_EXPR:
1455 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1456 copy = gimple_build_wce (s1);
1457 break;
1458
1459 case GIMPLE_OMP_PARALLEL:
1460 {
1461 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1462 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1463 copy = gimple_build_omp_parallel
1464 (s1,
1465 gimple_omp_parallel_clauses (omp_par_stmt),
1466 gimple_omp_parallel_child_fn (omp_par_stmt),
1467 gimple_omp_parallel_data_arg (omp_par_stmt));
1468 }
1469 break;
1470
1471 case GIMPLE_OMP_TASK:
1472 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1473 copy = gimple_build_omp_task
1474 (s1,
1475 gimple_omp_task_clauses (stmt),
1476 gimple_omp_task_child_fn (stmt),
1477 gimple_omp_task_data_arg (stmt),
1478 gimple_omp_task_copy_fn (stmt),
1479 gimple_omp_task_arg_size (stmt),
1480 gimple_omp_task_arg_align (stmt));
1481 break;
1482
1483 case GIMPLE_OMP_FOR:
1484 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1485 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1486 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1487 gimple_omp_for_clauses (stmt),
1488 gimple_omp_for_collapse (stmt), s2);
1489 {
1490 size_t i;
1491 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1492 {
1493 gimple_omp_for_set_index (copy, i,
1494 gimple_omp_for_index (stmt, i));
1495 gimple_omp_for_set_initial (copy, i,
1496 gimple_omp_for_initial (stmt, i));
1497 gimple_omp_for_set_final (copy, i,
1498 gimple_omp_for_final (stmt, i));
1499 gimple_omp_for_set_incr (copy, i,
1500 gimple_omp_for_incr (stmt, i));
1501 gimple_omp_for_set_cond (copy, i,
1502 gimple_omp_for_cond (stmt, i));
1503 }
1504 }
1505 break;
1506
1507 case GIMPLE_OMP_MASTER:
1508 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1509 copy = gimple_build_omp_master (s1);
1510 break;
1511
1512 case GIMPLE_OMP_TASKGROUP:
1513 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1514 copy = gimple_build_omp_taskgroup (s1);
1515 break;
1516
1517 case GIMPLE_OMP_ORDERED:
1518 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1519 copy = gimple_build_omp_ordered
1520 (s1,
1521 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1522 break;
1523
1524 case GIMPLE_OMP_SECTION:
1525 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1526 copy = gimple_build_omp_section (s1);
1527 break;
1528
1529 case GIMPLE_OMP_SECTIONS:
1530 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1531 copy = gimple_build_omp_sections
1532 (s1, gimple_omp_sections_clauses (stmt));
1533 break;
1534
1535 case GIMPLE_OMP_SINGLE:
1536 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1537 copy = gimple_build_omp_single
1538 (s1, gimple_omp_single_clauses (stmt));
1539 break;
1540
1541 case GIMPLE_OMP_TARGET:
1542 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1543 copy = gimple_build_omp_target
1544 (s1, gimple_omp_target_kind (stmt),
1545 gimple_omp_target_clauses (stmt));
1546 break;
1547
1548 case GIMPLE_OMP_TEAMS:
1549 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1550 copy = gimple_build_omp_teams
1551 (s1, gimple_omp_teams_clauses (stmt));
1552 break;
1553
1554 case GIMPLE_OMP_CRITICAL:
1555 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1556 copy = gimple_build_omp_critical (s1,
1557 gimple_omp_critical_name
1558 (as_a <gomp_critical *> (stmt)),
1559 gimple_omp_critical_clauses
1560 (as_a <gomp_critical *> (stmt)));
1561 break;
1562
1563 case GIMPLE_TRANSACTION:
1564 {
1565 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1566 gtransaction *new_trans_stmt;
1567 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1568 id);
1569 copy = new_trans_stmt = gimple_build_transaction (s1);
1570 gimple_transaction_set_subcode (new_trans_stmt,
1571 gimple_transaction_subcode (old_trans_stmt));
1572 gimple_transaction_set_label_norm (new_trans_stmt,
1573 gimple_transaction_label_norm (old_trans_stmt));
1574 gimple_transaction_set_label_uninst (new_trans_stmt,
1575 gimple_transaction_label_uninst (old_trans_stmt));
1576 gimple_transaction_set_label_over (new_trans_stmt,
1577 gimple_transaction_label_over (old_trans_stmt));
1578 }
1579 break;
1580
1581 default:
1582 gcc_unreachable ();
1583 }
1584 }
1585 else
1586 {
1587 if (gimple_assign_copy_p (stmt)
1588 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1589 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1590 {
1591 /* Here we handle statements that are not completely rewritten.
1592 First we detect some inlining-induced bogosities for
1593 discarding. */
1594
1595 /* Some assignments VAR = VAR; don't generate any rtl code
1596 and thus don't count as variable modification. Avoid
1597 keeping bogosities like 0 = 0. */
1598 tree decl = gimple_assign_lhs (stmt), value;
1599 tree *n;
1600
1601 n = id->decl_map->get (decl);
1602 if (n)
1603 {
1604 value = *n;
1605 STRIP_TYPE_NOPS (value);
1606 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1607 return NULL;
1608 }
1609 }
1610
1611 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1612 in a block that we aren't copying during tree_function_versioning,
1613 just drop the clobber stmt. */
1614 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1615 {
1616 tree lhs = gimple_assign_lhs (stmt);
1617 if (TREE_CODE (lhs) == MEM_REF
1618 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1619 {
1620 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1621 if (gimple_bb (def_stmt)
1622 && !bitmap_bit_p (id->blocks_to_copy,
1623 gimple_bb (def_stmt)->index))
1624 return NULL;
1625 }
1626 }
1627
1628 if (gimple_debug_bind_p (stmt))
1629 {
1630 gdebug *copy
1631 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1632 gimple_debug_bind_get_value (stmt),
1633 stmt);
1634 if (id->reset_location)
1635 gimple_set_location (copy, input_location);
1636 id->debug_stmts.safe_push (copy);
1637 gimple_seq_add_stmt (&stmts, copy);
1638 return stmts;
1639 }
1640 if (gimple_debug_source_bind_p (stmt))
1641 {
1642 gdebug *copy = gimple_build_debug_source_bind
1643 (gimple_debug_source_bind_get_var (stmt),
1644 gimple_debug_source_bind_get_value (stmt),
1645 stmt);
1646 if (id->reset_location)
1647 gimple_set_location (copy, input_location);
1648 id->debug_stmts.safe_push (copy);
1649 gimple_seq_add_stmt (&stmts, copy);
1650 return stmts;
1651 }
1652 if (gimple_debug_nonbind_marker_p (stmt))
1653 {
1654 /* If the inlined function has too many debug markers,
1655 don't copy them. */
1656 if (id->src_cfun->debug_marker_count
1657 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1658 return stmts;
1659
1660 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1661 if (id->reset_location)
1662 gimple_set_location (copy, input_location);
1663 id->debug_stmts.safe_push (copy);
1664 gimple_seq_add_stmt (&stmts, copy);
1665 return stmts;
1666 }
1667
1668 /* Create a new deep copy of the statement. */
1669 copy = gimple_copy (stmt);
1670
1671 /* Clear flags that need revisiting. */
1672 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1673 {
1674 if (gimple_call_tail_p (call_stmt))
1675 gimple_call_set_tail (call_stmt, false);
1676 if (gimple_call_from_thunk_p (call_stmt))
1677 gimple_call_set_from_thunk (call_stmt, false);
1678 if (gimple_call_internal_p (call_stmt))
1679 switch (gimple_call_internal_fn (call_stmt))
1680 {
1681 case IFN_GOMP_SIMD_LANE:
1682 case IFN_GOMP_SIMD_VF:
1683 case IFN_GOMP_SIMD_LAST_LANE:
1684 case IFN_GOMP_SIMD_ORDERED_START:
1685 case IFN_GOMP_SIMD_ORDERED_END:
1686 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1687 break;
1688 default:
1689 break;
1690 }
1691 }
1692
1693 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1694 RESX and EH_DISPATCH. */
1695 if (id->eh_map)
1696 switch (gimple_code (copy))
1697 {
1698 case GIMPLE_CALL:
1699 {
1700 tree r, fndecl = gimple_call_fndecl (copy);
1701 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1702 switch (DECL_FUNCTION_CODE (fndecl))
1703 {
1704 case BUILT_IN_EH_COPY_VALUES:
1705 r = gimple_call_arg (copy, 1);
1706 r = remap_eh_region_tree_nr (r, id);
1707 gimple_call_set_arg (copy, 1, r);
1708 /* FALLTHRU */
1709
1710 case BUILT_IN_EH_POINTER:
1711 case BUILT_IN_EH_FILTER:
1712 r = gimple_call_arg (copy, 0);
1713 r = remap_eh_region_tree_nr (r, id);
1714 gimple_call_set_arg (copy, 0, r);
1715 break;
1716
1717 default:
1718 break;
1719 }
1720
1721 /* Reset alias info if we didn't apply measures to
1722 keep it valid over inlining by setting DECL_PT_UID. */
1723 if (!id->src_cfun->gimple_df
1724 || !id->src_cfun->gimple_df->ipa_pta)
1725 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1726 }
1727 break;
1728
1729 case GIMPLE_RESX:
1730 {
1731 gresx *resx_stmt = as_a <gresx *> (copy);
1732 int r = gimple_resx_region (resx_stmt);
1733 r = remap_eh_region_nr (r, id);
1734 gimple_resx_set_region (resx_stmt, r);
1735 }
1736 break;
1737
1738 case GIMPLE_EH_DISPATCH:
1739 {
1740 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1741 int r = gimple_eh_dispatch_region (eh_dispatch);
1742 r = remap_eh_region_nr (r, id);
1743 gimple_eh_dispatch_set_region (eh_dispatch, r);
1744 }
1745 break;
1746
1747 default:
1748 break;
1749 }
1750 }
1751
1752 /* If STMT has a block defined, map it to the newly constructed block. */
1753 if (gimple_block (copy))
1754 {
1755 tree *n;
1756 n = id->decl_map->get (gimple_block (copy));
1757 gcc_assert (n);
1758 gimple_set_block (copy, *n);
1759 }
1760
1761 if (id->reset_location)
1762 gimple_set_location (copy, input_location);
1763
1764 /* Debug statements ought to be rebuilt and not copied. */
1765 gcc_checking_assert (!is_gimple_debug (copy));
1766
1767 /* Remap all the operands in COPY. */
1768 memset (&wi, 0, sizeof (wi));
1769 wi.info = id;
1770 if (skip_first)
1771 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1772 else
1773 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1774
1775 /* Clear the copied virtual operands. We are not remapping them here
1776 but are going to recreate them from scratch. */
1777 if (gimple_has_mem_ops (copy))
1778 {
1779 gimple_set_vdef (copy, NULL_TREE);
1780 gimple_set_vuse (copy, NULL_TREE);
1781 }
1782
1783 gimple_seq_add_stmt (&stmts, copy);
1784 return stmts;
1785 }
1786
1787
1788 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1789 later */
1790
1791 static basic_block
1792 copy_bb (copy_body_data *id, basic_block bb,
1793 profile_count num, profile_count den)
1794 {
1795 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1796 basic_block copy_basic_block;
1797 tree decl;
1798 basic_block prev;
1799
1800 profile_count::adjust_for_ipa_scaling (&num, &den);
1801
1802 /* Search for previous copied basic block. */
1803 prev = bb->prev_bb;
1804 while (!prev->aux)
1805 prev = prev->prev_bb;
1806
1807 /* create_basic_block() will append every new block to
1808 basic_block_info automatically. */
1809 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1810 copy_basic_block->count = bb->count.apply_scale (num, den);
1811
1812 copy_gsi = gsi_start_bb (copy_basic_block);
1813
1814 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1815 {
1816 gimple_seq stmts;
1817 gimple *stmt = gsi_stmt (gsi);
1818 gimple *orig_stmt = stmt;
1819 gimple_stmt_iterator stmts_gsi;
1820 bool stmt_added = false;
1821
1822 id->regimplify = false;
1823 stmts = remap_gimple_stmt (stmt, id);
1824
1825 if (gimple_seq_empty_p (stmts))
1826 continue;
1827
1828 seq_gsi = copy_gsi;
1829
1830 for (stmts_gsi = gsi_start (stmts);
1831 !gsi_end_p (stmts_gsi); )
1832 {
1833 stmt = gsi_stmt (stmts_gsi);
1834
1835 /* Advance iterator now before stmt is moved to seq_gsi. */
1836 gsi_next (&stmts_gsi);
1837
1838 if (gimple_nop_p (stmt))
1839 continue;
1840
1841 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1842 orig_stmt);
1843
1844 /* With return slot optimization we can end up with
1845 non-gimple (foo *)&this->m, fix that here. */
1846 if (is_gimple_assign (stmt)
1847 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1848 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1849 {
1850 tree new_rhs;
1851 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1852 gimple_assign_rhs1 (stmt),
1853 true, NULL, false,
1854 GSI_CONTINUE_LINKING);
1855 gimple_assign_set_rhs1 (stmt, new_rhs);
1856 id->regimplify = false;
1857 }
1858
1859 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1860
1861 if (id->regimplify)
1862 gimple_regimplify_operands (stmt, &seq_gsi);
1863
1864 stmt_added = true;
1865 }
1866
1867 if (!stmt_added)
1868 continue;
1869
1870 /* If copy_basic_block has been empty at the start of this iteration,
1871 call gsi_start_bb again to get at the newly added statements. */
1872 if (gsi_end_p (copy_gsi))
1873 copy_gsi = gsi_start_bb (copy_basic_block);
1874 else
1875 gsi_next (&copy_gsi);
1876
1877 /* Process the new statement. The call to gimple_regimplify_operands
1878 possibly turned the statement into multiple statements, we
1879 need to process all of them. */
1880 do
1881 {
1882 tree fn;
1883 gcall *call_stmt;
1884
1885 stmt = gsi_stmt (copy_gsi);
1886 call_stmt = dyn_cast <gcall *> (stmt);
1887 if (call_stmt
1888 && gimple_call_va_arg_pack_p (call_stmt)
1889 && id->call_stmt
1890 && ! gimple_call_va_arg_pack_p (id->call_stmt))
1891 {
1892 /* __builtin_va_arg_pack () should be replaced by
1893 all arguments corresponding to ... in the caller. */
1894 tree p;
1895 gcall *new_call;
1896 vec<tree> argarray;
1897 size_t nargs = gimple_call_num_args (id->call_stmt);
1898 size_t n;
1899
1900 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1901 nargs--;
1902
1903 /* Create the new array of arguments. */
1904 n = nargs + gimple_call_num_args (call_stmt);
1905 argarray.create (n);
1906 argarray.safe_grow_cleared (n);
1907
1908 /* Copy all the arguments before '...' */
1909 memcpy (argarray.address (),
1910 gimple_call_arg_ptr (call_stmt, 0),
1911 gimple_call_num_args (call_stmt) * sizeof (tree));
1912
1913 /* Append the arguments passed in '...' */
1914 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1915 gimple_call_arg_ptr (id->call_stmt, 0)
1916 + (gimple_call_num_args (id->call_stmt) - nargs),
1917 nargs * sizeof (tree));
1918
1919 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1920 argarray);
1921
1922 argarray.release ();
1923
1924 /* Copy all GIMPLE_CALL flags, location and block, except
1925 GF_CALL_VA_ARG_PACK. */
1926 gimple_call_copy_flags (new_call, call_stmt);
1927 gimple_call_set_va_arg_pack (new_call, false);
1928 gimple_set_location (new_call, gimple_location (stmt));
1929 gimple_set_block (new_call, gimple_block (stmt));
1930 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1931
1932 gsi_replace (&copy_gsi, new_call, false);
1933 stmt = new_call;
1934 }
1935 else if (call_stmt
1936 && id->call_stmt
1937 && (decl = gimple_call_fndecl (stmt))
1938 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1939 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN
1940 && ! gimple_call_va_arg_pack_p (id->call_stmt))
1941 {
1942 /* __builtin_va_arg_pack_len () should be replaced by
1943 the number of anonymous arguments. */
1944 size_t nargs = gimple_call_num_args (id->call_stmt);
1945 tree count, p;
1946 gimple *new_stmt;
1947
1948 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1949 nargs--;
1950
1951 count = build_int_cst (integer_type_node, nargs);
1952 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1953 gsi_replace (&copy_gsi, new_stmt, false);
1954 stmt = new_stmt;
1955 }
1956 else if (call_stmt
1957 && id->call_stmt
1958 && gimple_call_internal_p (stmt)
1959 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1960 {
1961 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1962 gsi_remove (&copy_gsi, false);
1963 continue;
1964 }
1965
1966 /* Statements produced by inlining can be unfolded, especially
1967 when we constant propagated some operands. We can't fold
1968 them right now for two reasons:
1969 1) folding require SSA_NAME_DEF_STMTs to be correct
1970 2) we can't change function calls to builtins.
1971 So we just mark statement for later folding. We mark
1972 all new statements, instead just statements that has changed
1973 by some nontrivial substitution so even statements made
1974 foldable indirectly are updated. If this turns out to be
1975 expensive, copy_body can be told to watch for nontrivial
1976 changes. */
1977 if (id->statements_to_fold)
1978 id->statements_to_fold->add (stmt);
1979
1980 /* We're duplicating a CALL_EXPR. Find any corresponding
1981 callgraph edges and update or duplicate them. */
1982 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
1983 {
1984 struct cgraph_edge *edge;
1985
1986 switch (id->transform_call_graph_edges)
1987 {
1988 case CB_CGE_DUPLICATE:
1989 edge = id->src_node->get_edge (orig_stmt);
1990 if (edge)
1991 {
1992 struct cgraph_edge *old_edge = edge;
1993 profile_count old_cnt = edge->count;
1994 edge = edge->clone (id->dst_node, call_stmt,
1995 gimple_uid (stmt),
1996 num, den,
1997 true);
1998
1999 /* Speculative calls consist of two edges - direct and
2000 indirect. Duplicate the whole thing and distribute
2001 frequencies accordingly. */
2002 if (edge->speculative)
2003 {
2004 struct cgraph_edge *direct, *indirect;
2005 struct ipa_ref *ref;
2006
2007 gcc_assert (!edge->indirect_unknown_callee);
2008 old_edge->speculative_call_info (direct, indirect, ref);
2009
2010 profile_count indir_cnt = indirect->count;
2011 indirect = indirect->clone (id->dst_node, call_stmt,
2012 gimple_uid (stmt),
2013 num, den,
2014 true);
2015
2016 profile_probability prob
2017 = indir_cnt.probability_in (old_cnt + indir_cnt);
2018 indirect->count
2019 = copy_basic_block->count.apply_probability (prob);
2020 edge->count = copy_basic_block->count - indirect->count;
2021 id->dst_node->clone_reference (ref, stmt);
2022 }
2023 else
2024 edge->count = copy_basic_block->count;
2025 }
2026 break;
2027
2028 case CB_CGE_MOVE_CLONES:
2029 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2030 call_stmt);
2031 edge = id->dst_node->get_edge (stmt);
2032 break;
2033
2034 case CB_CGE_MOVE:
2035 edge = id->dst_node->get_edge (orig_stmt);
2036 if (edge)
2037 edge->set_call_stmt (call_stmt);
2038 break;
2039
2040 default:
2041 gcc_unreachable ();
2042 }
2043
2044 /* Constant propagation on argument done during inlining
2045 may create new direct call. Produce an edge for it. */
2046 if ((!edge
2047 || (edge->indirect_inlining_edge
2048 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2049 && id->dst_node->definition
2050 && (fn = gimple_call_fndecl (stmt)) != NULL)
2051 {
2052 struct cgraph_node *dest = cgraph_node::get_create (fn);
2053
2054 /* We have missing edge in the callgraph. This can happen
2055 when previous inlining turned an indirect call into a
2056 direct call by constant propagating arguments or we are
2057 producing dead clone (for further cloning). In all
2058 other cases we hit a bug (incorrect node sharing is the
2059 most common reason for missing edges). */
2060 gcc_assert (!dest->definition
2061 || dest->address_taken
2062 || !id->src_node->definition
2063 || !id->dst_node->definition);
2064 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2065 id->dst_node->create_edge_including_clones
2066 (dest, orig_stmt, call_stmt, bb->count,
2067 CIF_ORIGINALLY_INDIRECT_CALL);
2068 else
2069 id->dst_node->create_edge (dest, call_stmt,
2070 bb->count)->inline_failed
2071 = CIF_ORIGINALLY_INDIRECT_CALL;
2072 if (dump_file)
2073 {
2074 fprintf (dump_file, "Created new direct edge to %s\n",
2075 dest->name ());
2076 }
2077 }
2078
2079 notice_special_calls (as_a <gcall *> (stmt));
2080 }
2081
2082 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2083 id->eh_map, id->eh_lp_nr);
2084
2085 gsi_next (&copy_gsi);
2086 }
2087 while (!gsi_end_p (copy_gsi));
2088
2089 copy_gsi = gsi_last_bb (copy_basic_block);
2090 }
2091
2092 return copy_basic_block;
2093 }
2094
2095 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2096 form is quite easy, since dominator relationship for old basic blocks does
2097 not change.
2098
2099 There is however exception where inlining might change dominator relation
2100 across EH edges from basic block within inlined functions destinating
2101 to landing pads in function we inline into.
2102
2103 The function fills in PHI_RESULTs of such PHI nodes if they refer
2104 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2105 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2106 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2107 set, and this means that there will be no overlapping live ranges
2108 for the underlying symbol.
2109
2110 This might change in future if we allow redirecting of EH edges and
2111 we might want to change way build CFG pre-inlining to include
2112 all the possible edges then. */
2113 static void
2114 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2115 bool can_throw, bool nonlocal_goto)
2116 {
2117 edge e;
2118 edge_iterator ei;
2119
2120 FOR_EACH_EDGE (e, ei, bb->succs)
2121 if (!e->dest->aux
2122 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2123 {
2124 gphi *phi;
2125 gphi_iterator si;
2126
2127 if (!nonlocal_goto)
2128 gcc_assert (e->flags & EDGE_EH);
2129
2130 if (!can_throw)
2131 gcc_assert (!(e->flags & EDGE_EH));
2132
2133 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2134 {
2135 edge re;
2136
2137 phi = si.phi ();
2138
2139 /* For abnormal goto/call edges the receiver can be the
2140 ENTRY_BLOCK. Do not assert this cannot happen. */
2141
2142 gcc_assert ((e->flags & EDGE_EH)
2143 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2144
2145 re = find_edge (ret_bb, e->dest);
2146 gcc_checking_assert (re);
2147 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2148 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2149
2150 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2151 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2152 }
2153 }
2154 }
2155
2156
2157 /* Copy edges from BB into its copy constructed earlier, scale profile
2158 accordingly. Edges will be taken care of later. Assume aux
2159 pointers to point to the copies of each BB. Return true if any
2160 debug stmts are left after a statement that must end the basic block. */
2161
2162 static bool
2163 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2164 basic_block ret_bb, basic_block abnormal_goto_dest,
2165 copy_body_data *id)
2166 {
2167 basic_block new_bb = (basic_block) bb->aux;
2168 edge_iterator ei;
2169 edge old_edge;
2170 gimple_stmt_iterator si;
2171 bool need_debug_cleanup = false;
2172
2173 /* Use the indices from the original blocks to create edges for the
2174 new ones. */
2175 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2176 if (!(old_edge->flags & EDGE_EH))
2177 {
2178 edge new_edge;
2179 int flags = old_edge->flags;
2180 location_t locus = old_edge->goto_locus;
2181
2182 /* Return edges do get a FALLTHRU flag when they get inlined. */
2183 if (old_edge->dest->index == EXIT_BLOCK
2184 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2185 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2186 flags |= EDGE_FALLTHRU;
2187
2188 new_edge
2189 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2190 new_edge->probability = old_edge->probability;
2191 if (!id->reset_location)
2192 new_edge->goto_locus = remap_location (locus, id);
2193 }
2194
2195 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2196 return false;
2197
2198 /* When doing function splitting, we must decreate count of the return block
2199 which was previously reachable by block we did not copy. */
2200 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2201 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2202 if (old_edge->src->index != ENTRY_BLOCK
2203 && !old_edge->src->aux)
2204 new_bb->count -= old_edge->count ().apply_scale (num, den);
2205
2206 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2207 {
2208 gimple *copy_stmt;
2209 bool can_throw, nonlocal_goto;
2210
2211 copy_stmt = gsi_stmt (si);
2212 if (!is_gimple_debug (copy_stmt))
2213 update_stmt (copy_stmt);
2214
2215 /* Do this before the possible split_block. */
2216 gsi_next (&si);
2217
2218 /* If this tree could throw an exception, there are two
2219 cases where we need to add abnormal edge(s): the
2220 tree wasn't in a region and there is a "current
2221 region" in the caller; or the original tree had
2222 EH edges. In both cases split the block after the tree,
2223 and add abnormal edge(s) as needed; we need both
2224 those from the callee and the caller.
2225 We check whether the copy can throw, because the const
2226 propagation can change an INDIRECT_REF which throws
2227 into a COMPONENT_REF which doesn't. If the copy
2228 can throw, the original could also throw. */
2229 can_throw = stmt_can_throw_internal (copy_stmt);
2230 nonlocal_goto
2231 = (stmt_can_make_abnormal_goto (copy_stmt)
2232 && !computed_goto_p (copy_stmt));
2233
2234 if (can_throw || nonlocal_goto)
2235 {
2236 if (!gsi_end_p (si))
2237 {
2238 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2239 gsi_next (&si);
2240 if (gsi_end_p (si))
2241 need_debug_cleanup = true;
2242 }
2243 if (!gsi_end_p (si))
2244 /* Note that bb's predecessor edges aren't necessarily
2245 right at this point; split_block doesn't care. */
2246 {
2247 edge e = split_block (new_bb, copy_stmt);
2248
2249 new_bb = e->dest;
2250 new_bb->aux = e->src->aux;
2251 si = gsi_start_bb (new_bb);
2252 }
2253 }
2254
2255 bool update_probs = false;
2256
2257 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2258 {
2259 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2260 update_probs = true;
2261 }
2262 else if (can_throw)
2263 {
2264 make_eh_edges (copy_stmt);
2265 update_probs = true;
2266 }
2267
2268 /* EH edges may not match old edges. Copy as much as possible. */
2269 if (update_probs)
2270 {
2271 edge e;
2272 edge_iterator ei;
2273 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2274
2275 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2276 if ((old_edge->flags & EDGE_EH)
2277 && (e = find_edge (copy_stmt_bb,
2278 (basic_block) old_edge->dest->aux))
2279 && (e->flags & EDGE_EH))
2280 e->probability = old_edge->probability;
2281
2282 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2283 if ((e->flags & EDGE_EH) && !e->probability.initialized_p ())
2284 e->probability = profile_probability::never ();
2285 }
2286
2287
2288 /* If the call we inline cannot make abnormal goto do not add
2289 additional abnormal edges but only retain those already present
2290 in the original function body. */
2291 if (abnormal_goto_dest == NULL)
2292 nonlocal_goto = false;
2293 if (nonlocal_goto)
2294 {
2295 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2296
2297 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2298 nonlocal_goto = false;
2299 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2300 in OpenMP regions which aren't allowed to be left abnormally.
2301 So, no need to add abnormal edge in that case. */
2302 else if (is_gimple_call (copy_stmt)
2303 && gimple_call_internal_p (copy_stmt)
2304 && (gimple_call_internal_fn (copy_stmt)
2305 == IFN_ABNORMAL_DISPATCHER)
2306 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2307 nonlocal_goto = false;
2308 else
2309 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2310 EDGE_ABNORMAL);
2311 }
2312
2313 if ((can_throw || nonlocal_goto)
2314 && gimple_in_ssa_p (cfun))
2315 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2316 can_throw, nonlocal_goto);
2317 }
2318 return need_debug_cleanup;
2319 }
2320
2321 /* Copy the PHIs. All blocks and edges are copied, some blocks
2322 was possibly split and new outgoing EH edges inserted.
2323 BB points to the block of original function and AUX pointers links
2324 the original and newly copied blocks. */
2325
2326 static void
2327 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2328 {
2329 basic_block const new_bb = (basic_block) bb->aux;
2330 edge_iterator ei;
2331 gphi *phi;
2332 gphi_iterator si;
2333 edge new_edge;
2334 bool inserted = false;
2335
2336 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2337 {
2338 tree res, new_res;
2339 gphi *new_phi;
2340
2341 phi = si.phi ();
2342 res = PHI_RESULT (phi);
2343 new_res = res;
2344 if (!virtual_operand_p (res))
2345 {
2346 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2347 if (EDGE_COUNT (new_bb->preds) == 0)
2348 {
2349 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2350 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2351 }
2352 else
2353 {
2354 new_phi = create_phi_node (new_res, new_bb);
2355 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2356 {
2357 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2358 bb);
2359 tree arg;
2360 tree new_arg;
2361 edge_iterator ei2;
2362 location_t locus;
2363
2364 /* When doing partial cloning, we allow PHIs on the entry
2365 block as long as all the arguments are the same.
2366 Find any input edge to see argument to copy. */
2367 if (!old_edge)
2368 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2369 if (!old_edge->src->aux)
2370 break;
2371
2372 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2373 new_arg = arg;
2374 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2375 gcc_assert (new_arg);
2376 /* With return slot optimization we can end up with
2377 non-gimple (foo *)&this->m, fix that here. */
2378 if (TREE_CODE (new_arg) != SSA_NAME
2379 && TREE_CODE (new_arg) != FUNCTION_DECL
2380 && !is_gimple_val (new_arg))
2381 {
2382 gimple_seq stmts = NULL;
2383 new_arg = force_gimple_operand (new_arg, &stmts, true,
2384 NULL);
2385 gsi_insert_seq_on_edge (new_edge, stmts);
2386 inserted = true;
2387 }
2388 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2389 if (id->reset_location)
2390 locus = input_location;
2391 else
2392 locus = remap_location (locus, id);
2393 add_phi_arg (new_phi, new_arg, new_edge, locus);
2394 }
2395 }
2396 }
2397 }
2398
2399 /* Commit the delayed edge insertions. */
2400 if (inserted)
2401 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2402 gsi_commit_one_edge_insert (new_edge, NULL);
2403 }
2404
2405
2406 /* Wrapper for remap_decl so it can be used as a callback. */
2407
2408 static tree
2409 remap_decl_1 (tree decl, void *data)
2410 {
2411 return remap_decl (decl, (copy_body_data *) data);
2412 }
2413
2414 /* Build struct function and associated datastructures for the new clone
2415 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2416 the cfun to the function of new_fndecl (and current_function_decl too). */
2417
2418 static void
2419 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2420 {
2421 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2422
2423 if (!DECL_ARGUMENTS (new_fndecl))
2424 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2425 if (!DECL_RESULT (new_fndecl))
2426 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2427
2428 /* Register specific tree functions. */
2429 gimple_register_cfg_hooks ();
2430
2431 /* Get clean struct function. */
2432 push_struct_function (new_fndecl);
2433
2434 /* We will rebuild these, so just sanity check that they are empty. */
2435 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2436 gcc_assert (cfun->local_decls == NULL);
2437 gcc_assert (cfun->cfg == NULL);
2438 gcc_assert (cfun->decl == new_fndecl);
2439
2440 /* Copy items we preserve during cloning. */
2441 cfun->static_chain_decl = src_cfun->static_chain_decl;
2442 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2443 cfun->function_end_locus = src_cfun->function_end_locus;
2444 cfun->curr_properties = src_cfun->curr_properties;
2445 cfun->last_verified = src_cfun->last_verified;
2446 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2447 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2448 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2449 cfun->stdarg = src_cfun->stdarg;
2450 cfun->after_inlining = src_cfun->after_inlining;
2451 cfun->can_throw_non_call_exceptions
2452 = src_cfun->can_throw_non_call_exceptions;
2453 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2454 cfun->returns_struct = src_cfun->returns_struct;
2455 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2456
2457 init_empty_tree_cfg ();
2458
2459 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2460
2461 profile_count num = count;
2462 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2463 profile_count::adjust_for_ipa_scaling (&num, &den);
2464
2465 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2466 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2467 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2468 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2469 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2470 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2471 if (src_cfun->eh)
2472 init_eh_for_function ();
2473
2474 if (src_cfun->gimple_df)
2475 {
2476 init_tree_ssa (cfun);
2477 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2478 if (cfun->gimple_df->in_ssa_p)
2479 init_ssa_operands (cfun);
2480 }
2481 }
2482
2483 /* Helper function for copy_cfg_body. Move debug stmts from the end
2484 of NEW_BB to the beginning of successor basic blocks when needed. If the
2485 successor has multiple predecessors, reset them, otherwise keep
2486 their value. */
2487
2488 static void
2489 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2490 {
2491 edge e;
2492 edge_iterator ei;
2493 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2494
2495 if (gsi_end_p (si)
2496 || gsi_one_before_end_p (si)
2497 || !(stmt_can_throw_internal (gsi_stmt (si))
2498 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2499 return;
2500
2501 FOR_EACH_EDGE (e, ei, new_bb->succs)
2502 {
2503 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2504 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2505 while (is_gimple_debug (gsi_stmt (ssi)))
2506 {
2507 gimple *stmt = gsi_stmt (ssi);
2508 gdebug *new_stmt;
2509 tree var;
2510 tree value;
2511
2512 /* For the last edge move the debug stmts instead of copying
2513 them. */
2514 if (ei_one_before_end_p (ei))
2515 {
2516 si = ssi;
2517 gsi_prev (&ssi);
2518 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2519 {
2520 gimple_debug_bind_reset_value (stmt);
2521 gimple_set_location (stmt, UNKNOWN_LOCATION);
2522 }
2523 gsi_remove (&si, false);
2524 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2525 continue;
2526 }
2527
2528 if (gimple_debug_bind_p (stmt))
2529 {
2530 var = gimple_debug_bind_get_var (stmt);
2531 if (single_pred_p (e->dest))
2532 {
2533 value = gimple_debug_bind_get_value (stmt);
2534 value = unshare_expr (value);
2535 new_stmt = gimple_build_debug_bind (var, value, stmt);
2536 }
2537 else
2538 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2539 }
2540 else if (gimple_debug_source_bind_p (stmt))
2541 {
2542 var = gimple_debug_source_bind_get_var (stmt);
2543 value = gimple_debug_source_bind_get_value (stmt);
2544 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2545 }
2546 else if (gimple_debug_nonbind_marker_p (stmt))
2547 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2548 else
2549 gcc_unreachable ();
2550 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2551 id->debug_stmts.safe_push (new_stmt);
2552 gsi_prev (&ssi);
2553 }
2554 }
2555 }
2556
2557 /* Make a copy of the sub-loops of SRC_PARENT and place them
2558 as siblings of DEST_PARENT. */
2559
2560 static void
2561 copy_loops (copy_body_data *id,
2562 struct loop *dest_parent, struct loop *src_parent)
2563 {
2564 struct loop *src_loop = src_parent->inner;
2565 while (src_loop)
2566 {
2567 if (!id->blocks_to_copy
2568 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2569 {
2570 struct loop *dest_loop = alloc_loop ();
2571
2572 /* Assign the new loop its header and latch and associate
2573 those with the new loop. */
2574 dest_loop->header = (basic_block)src_loop->header->aux;
2575 dest_loop->header->loop_father = dest_loop;
2576 if (src_loop->latch != NULL)
2577 {
2578 dest_loop->latch = (basic_block)src_loop->latch->aux;
2579 dest_loop->latch->loop_father = dest_loop;
2580 }
2581
2582 /* Copy loop meta-data. */
2583 copy_loop_info (src_loop, dest_loop);
2584
2585 /* Finally place it into the loop array and the loop tree. */
2586 place_new_loop (cfun, dest_loop);
2587 flow_loop_tree_node_add (dest_parent, dest_loop);
2588
2589 dest_loop->safelen = src_loop->safelen;
2590 if (src_loop->unroll)
2591 {
2592 dest_loop->unroll = src_loop->unroll;
2593 cfun->has_unroll = true;
2594 }
2595 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2596 if (src_loop->force_vectorize)
2597 {
2598 dest_loop->force_vectorize = true;
2599 cfun->has_force_vectorize_loops = true;
2600 }
2601 if (src_loop->simduid)
2602 {
2603 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2604 cfun->has_simduid_loops = true;
2605 }
2606
2607 /* Recurse. */
2608 copy_loops (id, dest_loop, src_loop);
2609 }
2610 src_loop = src_loop->next;
2611 }
2612 }
2613
2614 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2615
2616 void
2617 redirect_all_calls (copy_body_data * id, basic_block bb)
2618 {
2619 gimple_stmt_iterator si;
2620 gimple *last = last_stmt (bb);
2621 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2622 {
2623 gimple *stmt = gsi_stmt (si);
2624 if (is_gimple_call (stmt))
2625 {
2626 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2627 if (edge)
2628 {
2629 edge->redirect_call_stmt_to_callee ();
2630 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2631 gimple_purge_dead_eh_edges (bb);
2632 }
2633 }
2634 }
2635 }
2636
2637 /* Make a copy of the body of FN so that it can be inserted inline in
2638 another function. Walks FN via CFG, returns new fndecl. */
2639
2640 static tree
2641 copy_cfg_body (copy_body_data * id,
2642 basic_block entry_block_map, basic_block exit_block_map,
2643 basic_block new_entry)
2644 {
2645 tree callee_fndecl = id->src_fn;
2646 /* Original cfun for the callee, doesn't change. */
2647 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2648 struct function *cfun_to_copy;
2649 basic_block bb;
2650 tree new_fndecl = NULL;
2651 bool need_debug_cleanup = false;
2652 int last;
2653 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2654 profile_count num = entry_block_map->count;
2655
2656 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2657
2658 /* Register specific tree functions. */
2659 gimple_register_cfg_hooks ();
2660
2661 /* If we are inlining just region of the function, make sure to connect
2662 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2663 part of loop, we must compute frequency and probability of
2664 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2665 probabilities of edges incoming from nonduplicated region. */
2666 if (new_entry)
2667 {
2668 edge e;
2669 edge_iterator ei;
2670 den = profile_count::zero ();
2671
2672 FOR_EACH_EDGE (e, ei, new_entry->preds)
2673 if (!e->src->aux)
2674 den += e->count ();
2675 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2676 }
2677
2678 profile_count::adjust_for_ipa_scaling (&num, &den);
2679
2680 /* Must have a CFG here at this point. */
2681 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2682 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2683
2684
2685 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2686 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2687 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2688 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2689
2690 /* Duplicate any exception-handling regions. */
2691 if (cfun->eh)
2692 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2693 remap_decl_1, id);
2694
2695 /* Use aux pointers to map the original blocks to copy. */
2696 FOR_EACH_BB_FN (bb, cfun_to_copy)
2697 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2698 {
2699 basic_block new_bb = copy_bb (id, bb, num, den);
2700 bb->aux = new_bb;
2701 new_bb->aux = bb;
2702 new_bb->loop_father = entry_block_map->loop_father;
2703 }
2704
2705 last = last_basic_block_for_fn (cfun);
2706
2707 /* Now that we've duplicated the blocks, duplicate their edges. */
2708 basic_block abnormal_goto_dest = NULL;
2709 if (id->call_stmt
2710 && stmt_can_make_abnormal_goto (id->call_stmt))
2711 {
2712 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2713
2714 bb = gimple_bb (id->call_stmt);
2715 gsi_next (&gsi);
2716 if (gsi_end_p (gsi))
2717 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2718 }
2719 FOR_ALL_BB_FN (bb, cfun_to_copy)
2720 if (!id->blocks_to_copy
2721 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2722 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2723 abnormal_goto_dest, id);
2724
2725 if (new_entry)
2726 {
2727 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2728 EDGE_FALLTHRU);
2729 e->probability = profile_probability::always ();
2730 }
2731
2732 /* Duplicate the loop tree, if available and wanted. */
2733 if (loops_for_fn (src_cfun) != NULL
2734 && current_loops != NULL)
2735 {
2736 copy_loops (id, entry_block_map->loop_father,
2737 get_loop (src_cfun, 0));
2738 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2739 loops_state_set (LOOPS_NEED_FIXUP);
2740 }
2741
2742 /* If the loop tree in the source function needed fixup, mark the
2743 destination loop tree for fixup, too. */
2744 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2745 loops_state_set (LOOPS_NEED_FIXUP);
2746
2747 if (gimple_in_ssa_p (cfun))
2748 FOR_ALL_BB_FN (bb, cfun_to_copy)
2749 if (!id->blocks_to_copy
2750 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2751 copy_phis_for_bb (bb, id);
2752
2753 FOR_ALL_BB_FN (bb, cfun_to_copy)
2754 if (bb->aux)
2755 {
2756 if (need_debug_cleanup
2757 && bb->index != ENTRY_BLOCK
2758 && bb->index != EXIT_BLOCK)
2759 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2760 /* Update call edge destinations. This can not be done before loop
2761 info is updated, because we may split basic blocks. */
2762 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2763 && bb->index != ENTRY_BLOCK
2764 && bb->index != EXIT_BLOCK)
2765 redirect_all_calls (id, (basic_block)bb->aux);
2766 ((basic_block)bb->aux)->aux = NULL;
2767 bb->aux = NULL;
2768 }
2769
2770 /* Zero out AUX fields of newly created block during EH edge
2771 insertion. */
2772 for (; last < last_basic_block_for_fn (cfun); last++)
2773 {
2774 if (need_debug_cleanup)
2775 maybe_move_debug_stmts_to_successors (id,
2776 BASIC_BLOCK_FOR_FN (cfun, last));
2777 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2778 /* Update call edge destinations. This can not be done before loop
2779 info is updated, because we may split basic blocks. */
2780 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2781 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2782 }
2783 entry_block_map->aux = NULL;
2784 exit_block_map->aux = NULL;
2785
2786 if (id->eh_map)
2787 {
2788 delete id->eh_map;
2789 id->eh_map = NULL;
2790 }
2791 if (id->dependence_map)
2792 {
2793 delete id->dependence_map;
2794 id->dependence_map = NULL;
2795 }
2796
2797 return new_fndecl;
2798 }
2799
2800 /* Copy the debug STMT using ID. We deal with these statements in a
2801 special way: if any variable in their VALUE expression wasn't
2802 remapped yet, we won't remap it, because that would get decl uids
2803 out of sync, causing codegen differences between -g and -g0. If
2804 this arises, we drop the VALUE expression altogether. */
2805
2806 static void
2807 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2808 {
2809 tree t, *n;
2810 struct walk_stmt_info wi;
2811
2812 if (gimple_block (stmt))
2813 {
2814 n = id->decl_map->get (gimple_block (stmt));
2815 gimple_set_block (stmt, n ? *n : id->block);
2816 }
2817
2818 if (gimple_debug_nonbind_marker_p (stmt))
2819 return;
2820
2821 /* Remap all the operands in COPY. */
2822 memset (&wi, 0, sizeof (wi));
2823 wi.info = id;
2824
2825 processing_debug_stmt = 1;
2826
2827 if (gimple_debug_source_bind_p (stmt))
2828 t = gimple_debug_source_bind_get_var (stmt);
2829 else if (gimple_debug_bind_p (stmt))
2830 t = gimple_debug_bind_get_var (stmt);
2831 else
2832 gcc_unreachable ();
2833
2834 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2835 && (n = id->debug_map->get (t)))
2836 {
2837 gcc_assert (VAR_P (*n));
2838 t = *n;
2839 }
2840 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
2841 /* T is a non-localized variable. */;
2842 else
2843 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2844
2845 if (gimple_debug_bind_p (stmt))
2846 {
2847 gimple_debug_bind_set_var (stmt, t);
2848
2849 if (gimple_debug_bind_has_value_p (stmt))
2850 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2851 remap_gimple_op_r, &wi, NULL);
2852
2853 /* Punt if any decl couldn't be remapped. */
2854 if (processing_debug_stmt < 0)
2855 gimple_debug_bind_reset_value (stmt);
2856 }
2857 else if (gimple_debug_source_bind_p (stmt))
2858 {
2859 gimple_debug_source_bind_set_var (stmt, t);
2860 /* When inlining and source bind refers to one of the optimized
2861 away parameters, change the source bind into normal debug bind
2862 referring to the corresponding DEBUG_EXPR_DECL that should have
2863 been bound before the call stmt. */
2864 t = gimple_debug_source_bind_get_value (stmt);
2865 if (t != NULL_TREE
2866 && TREE_CODE (t) == PARM_DECL
2867 && id->call_stmt)
2868 {
2869 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2870 unsigned int i;
2871 if (debug_args != NULL)
2872 {
2873 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2874 if ((**debug_args)[i] == DECL_ORIGIN (t)
2875 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2876 {
2877 t = (**debug_args)[i + 1];
2878 stmt->subcode = GIMPLE_DEBUG_BIND;
2879 gimple_debug_bind_set_value (stmt, t);
2880 break;
2881 }
2882 }
2883 }
2884 if (gimple_debug_source_bind_p (stmt))
2885 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2886 remap_gimple_op_r, &wi, NULL);
2887 }
2888
2889 processing_debug_stmt = 0;
2890
2891 update_stmt (stmt);
2892 }
2893
2894 /* Process deferred debug stmts. In order to give values better odds
2895 of being successfully remapped, we delay the processing of debug
2896 stmts until all other stmts that might require remapping are
2897 processed. */
2898
2899 static void
2900 copy_debug_stmts (copy_body_data *id)
2901 {
2902 size_t i;
2903 gdebug *stmt;
2904
2905 if (!id->debug_stmts.exists ())
2906 return;
2907
2908 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2909 copy_debug_stmt (stmt, id);
2910
2911 id->debug_stmts.release ();
2912 }
2913
2914 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2915 another function. */
2916
2917 static tree
2918 copy_tree_body (copy_body_data *id)
2919 {
2920 tree fndecl = id->src_fn;
2921 tree body = DECL_SAVED_TREE (fndecl);
2922
2923 walk_tree (&body, copy_tree_body_r, id, NULL);
2924
2925 return body;
2926 }
2927
2928 /* Make a copy of the body of FN so that it can be inserted inline in
2929 another function. */
2930
2931 static tree
2932 copy_body (copy_body_data *id,
2933 basic_block entry_block_map, basic_block exit_block_map,
2934 basic_block new_entry)
2935 {
2936 tree fndecl = id->src_fn;
2937 tree body;
2938
2939 /* If this body has a CFG, walk CFG and copy. */
2940 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2941 body = copy_cfg_body (id, entry_block_map, exit_block_map,
2942 new_entry);
2943 copy_debug_stmts (id);
2944
2945 return body;
2946 }
2947
2948 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2949 defined in function FN, or of a data member thereof. */
2950
2951 static bool
2952 self_inlining_addr_expr (tree value, tree fn)
2953 {
2954 tree var;
2955
2956 if (TREE_CODE (value) != ADDR_EXPR)
2957 return false;
2958
2959 var = get_base_address (TREE_OPERAND (value, 0));
2960
2961 return var && auto_var_in_fn_p (var, fn);
2962 }
2963
2964 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2965 lexical block and line number information from base_stmt, if given,
2966 or from the last stmt of the block otherwise. */
2967
2968 static gimple *
2969 insert_init_debug_bind (copy_body_data *id,
2970 basic_block bb, tree var, tree value,
2971 gimple *base_stmt)
2972 {
2973 gimple *note;
2974 gimple_stmt_iterator gsi;
2975 tree tracked_var;
2976
2977 if (!gimple_in_ssa_p (id->src_cfun))
2978 return NULL;
2979
2980 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
2981 return NULL;
2982
2983 tracked_var = target_for_debug_bind (var);
2984 if (!tracked_var)
2985 return NULL;
2986
2987 if (bb)
2988 {
2989 gsi = gsi_last_bb (bb);
2990 if (!base_stmt && !gsi_end_p (gsi))
2991 base_stmt = gsi_stmt (gsi);
2992 }
2993
2994 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
2995
2996 if (bb)
2997 {
2998 if (!gsi_end_p (gsi))
2999 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3000 else
3001 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3002 }
3003
3004 return note;
3005 }
3006
3007 static void
3008 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3009 {
3010 /* If VAR represents a zero-sized variable, it's possible that the
3011 assignment statement may result in no gimple statements. */
3012 if (init_stmt)
3013 {
3014 gimple_stmt_iterator si = gsi_last_bb (bb);
3015
3016 /* We can end up with init statements that store to a non-register
3017 from a rhs with a conversion. Handle that here by forcing the
3018 rhs into a temporary. gimple_regimplify_operands is not
3019 prepared to do this for us. */
3020 if (!is_gimple_debug (init_stmt)
3021 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3022 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3023 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3024 {
3025 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3026 gimple_expr_type (init_stmt),
3027 gimple_assign_rhs1 (init_stmt));
3028 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3029 GSI_NEW_STMT);
3030 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3031 gimple_assign_set_rhs1 (init_stmt, rhs);
3032 }
3033 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3034 gimple_regimplify_operands (init_stmt, &si);
3035
3036 if (!is_gimple_debug (init_stmt))
3037 {
3038 tree def = gimple_assign_lhs (init_stmt);
3039 insert_init_debug_bind (id, bb, def, def, init_stmt);
3040 }
3041 }
3042 }
3043
3044 /* Initialize parameter P with VALUE. If needed, produce init statement
3045 at the end of BB. When BB is NULL, we return init statement to be
3046 output later. */
3047 static gimple *
3048 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3049 basic_block bb, tree *vars)
3050 {
3051 gimple *init_stmt = NULL;
3052 tree var;
3053 tree rhs = value;
3054 tree def = (gimple_in_ssa_p (cfun)
3055 ? ssa_default_def (id->src_cfun, p) : NULL);
3056
3057 if (value
3058 && value != error_mark_node
3059 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3060 {
3061 /* If we can match up types by promotion/demotion do so. */
3062 if (fold_convertible_p (TREE_TYPE (p), value))
3063 rhs = fold_convert (TREE_TYPE (p), value);
3064 else
3065 {
3066 /* ??? For valid programs we should not end up here.
3067 Still if we end up with truly mismatched types here, fall back
3068 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3069 GIMPLE to the following passes. */
3070 if (!is_gimple_reg_type (TREE_TYPE (value))
3071 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3072 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3073 else
3074 rhs = build_zero_cst (TREE_TYPE (p));
3075 }
3076 }
3077
3078 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3079 here since the type of this decl must be visible to the calling
3080 function. */
3081 var = copy_decl_to_var (p, id);
3082
3083 /* Declare this new variable. */
3084 DECL_CHAIN (var) = *vars;
3085 *vars = var;
3086
3087 /* Make gimplifier happy about this variable. */
3088 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3089
3090 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3091 we would not need to create a new variable here at all, if it
3092 weren't for debug info. Still, we can just use the argument
3093 value. */
3094 if (TREE_READONLY (p)
3095 && !TREE_ADDRESSABLE (p)
3096 && value && !TREE_SIDE_EFFECTS (value)
3097 && !def)
3098 {
3099 /* We may produce non-gimple trees by adding NOPs or introduce
3100 invalid sharing when operand is not really constant.
3101 It is not big deal to prohibit constant propagation here as
3102 we will constant propagate in DOM1 pass anyway. */
3103 if (is_gimple_min_invariant (value)
3104 && useless_type_conversion_p (TREE_TYPE (p),
3105 TREE_TYPE (value))
3106 /* We have to be very careful about ADDR_EXPR. Make sure
3107 the base variable isn't a local variable of the inlined
3108 function, e.g., when doing recursive inlining, direct or
3109 mutually-recursive or whatever, which is why we don't
3110 just test whether fn == current_function_decl. */
3111 && ! self_inlining_addr_expr (value, fn))
3112 {
3113 insert_decl_map (id, p, value);
3114 insert_debug_decl_map (id, p, var);
3115 return insert_init_debug_bind (id, bb, var, value, NULL);
3116 }
3117 }
3118
3119 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3120 that way, when the PARM_DECL is encountered, it will be
3121 automatically replaced by the VAR_DECL. */
3122 insert_decl_map (id, p, var);
3123
3124 /* Even if P was TREE_READONLY, the new VAR should not be.
3125 In the original code, we would have constructed a
3126 temporary, and then the function body would have never
3127 changed the value of P. However, now, we will be
3128 constructing VAR directly. The constructor body may
3129 change its value multiple times as it is being
3130 constructed. Therefore, it must not be TREE_READONLY;
3131 the back-end assumes that TREE_READONLY variable is
3132 assigned to only once. */
3133 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3134 TREE_READONLY (var) = 0;
3135
3136 /* If there is no setup required and we are in SSA, take the easy route
3137 replacing all SSA names representing the function parameter by the
3138 SSA name passed to function.
3139
3140 We need to construct map for the variable anyway as it might be used
3141 in different SSA names when parameter is set in function.
3142
3143 Do replacement at -O0 for const arguments replaced by constant.
3144 This is important for builtin_constant_p and other construct requiring
3145 constant argument to be visible in inlined function body. */
3146 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3147 && (optimize
3148 || (TREE_READONLY (p)
3149 && is_gimple_min_invariant (rhs)))
3150 && (TREE_CODE (rhs) == SSA_NAME
3151 || is_gimple_min_invariant (rhs))
3152 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3153 {
3154 insert_decl_map (id, def, rhs);
3155 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3156 }
3157
3158 /* If the value of argument is never used, don't care about initializing
3159 it. */
3160 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3161 {
3162 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3163 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3164 }
3165
3166 /* Initialize this VAR_DECL from the equivalent argument. Convert
3167 the argument to the proper type in case it was promoted. */
3168 if (value)
3169 {
3170 if (rhs == error_mark_node)
3171 {
3172 insert_decl_map (id, p, var);
3173 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3174 }
3175
3176 STRIP_USELESS_TYPE_CONVERSION (rhs);
3177
3178 /* If we are in SSA form properly remap the default definition
3179 or assign to a dummy SSA name if the parameter is unused and
3180 we are not optimizing. */
3181 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3182 {
3183 if (def)
3184 {
3185 def = remap_ssa_name (def, id);
3186 init_stmt = gimple_build_assign (def, rhs);
3187 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3188 set_ssa_default_def (cfun, var, NULL);
3189 }
3190 else if (!optimize)
3191 {
3192 def = make_ssa_name (var);
3193 init_stmt = gimple_build_assign (def, rhs);
3194 }
3195 }
3196 else
3197 init_stmt = gimple_build_assign (var, rhs);
3198
3199 if (bb && init_stmt)
3200 insert_init_stmt (id, bb, init_stmt);
3201 }
3202 return init_stmt;
3203 }
3204
3205 /* Generate code to initialize the parameters of the function at the
3206 top of the stack in ID from the GIMPLE_CALL STMT. */
3207
3208 static void
3209 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3210 tree fn, basic_block bb)
3211 {
3212 tree parms;
3213 size_t i;
3214 tree p;
3215 tree vars = NULL_TREE;
3216 tree static_chain = gimple_call_chain (stmt);
3217
3218 /* Figure out what the parameters are. */
3219 parms = DECL_ARGUMENTS (fn);
3220
3221 /* Loop through the parameter declarations, replacing each with an
3222 equivalent VAR_DECL, appropriately initialized. */
3223 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3224 {
3225 tree val;
3226 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3227 setup_one_parameter (id, p, val, fn, bb, &vars);
3228 }
3229 /* After remapping parameters remap their types. This has to be done
3230 in a second loop over all parameters to appropriately remap
3231 variable sized arrays when the size is specified in a
3232 parameter following the array. */
3233 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3234 {
3235 tree *varp = id->decl_map->get (p);
3236 if (varp && VAR_P (*varp))
3237 {
3238 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3239 ? ssa_default_def (id->src_cfun, p) : NULL);
3240 tree var = *varp;
3241 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3242 /* Also remap the default definition if it was remapped
3243 to the default definition of the parameter replacement
3244 by the parameter setup. */
3245 if (def)
3246 {
3247 tree *defp = id->decl_map->get (def);
3248 if (defp
3249 && TREE_CODE (*defp) == SSA_NAME
3250 && SSA_NAME_VAR (*defp) == var)
3251 TREE_TYPE (*defp) = TREE_TYPE (var);
3252 }
3253 }
3254 }
3255
3256 /* Initialize the static chain. */
3257 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3258 gcc_assert (fn != current_function_decl);
3259 if (p)
3260 {
3261 /* No static chain? Seems like a bug in tree-nested.c. */
3262 gcc_assert (static_chain);
3263
3264 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3265 }
3266
3267 declare_inline_vars (id->block, vars);
3268 }
3269
3270
3271 /* Declare a return variable to replace the RESULT_DECL for the
3272 function we are calling. An appropriate DECL_STMT is returned.
3273 The USE_STMT is filled to contain a use of the declaration to
3274 indicate the return value of the function.
3275
3276 RETURN_SLOT, if non-null is place where to store the result. It
3277 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3278 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3279
3280 The return value is a (possibly null) value that holds the result
3281 as seen by the caller. */
3282
3283 static tree
3284 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3285 basic_block entry_bb)
3286 {
3287 tree callee = id->src_fn;
3288 tree result = DECL_RESULT (callee);
3289 tree callee_type = TREE_TYPE (result);
3290 tree caller_type;
3291 tree var, use;
3292
3293 /* Handle type-mismatches in the function declaration return type
3294 vs. the call expression. */
3295 if (modify_dest)
3296 caller_type = TREE_TYPE (modify_dest);
3297 else
3298 caller_type = TREE_TYPE (TREE_TYPE (callee));
3299
3300 /* We don't need to do anything for functions that don't return anything. */
3301 if (VOID_TYPE_P (callee_type))
3302 return NULL_TREE;
3303
3304 /* If there was a return slot, then the return value is the
3305 dereferenced address of that object. */
3306 if (return_slot)
3307 {
3308 /* The front end shouldn't have used both return_slot and
3309 a modify expression. */
3310 gcc_assert (!modify_dest);
3311 if (DECL_BY_REFERENCE (result))
3312 {
3313 tree return_slot_addr = build_fold_addr_expr (return_slot);
3314 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3315
3316 /* We are going to construct *&return_slot and we can't do that
3317 for variables believed to be not addressable.
3318
3319 FIXME: This check possibly can match, because values returned
3320 via return slot optimization are not believed to have address
3321 taken by alias analysis. */
3322 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3323 var = return_slot_addr;
3324 }
3325 else
3326 {
3327 var = return_slot;
3328 gcc_assert (TREE_CODE (var) != SSA_NAME);
3329 if (TREE_ADDRESSABLE (result))
3330 mark_addressable (var);
3331 }
3332 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3333 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3334 && !DECL_GIMPLE_REG_P (result)
3335 && DECL_P (var))
3336 DECL_GIMPLE_REG_P (var) = 0;
3337 use = NULL;
3338 goto done;
3339 }
3340
3341 /* All types requiring non-trivial constructors should have been handled. */
3342 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3343
3344 /* Attempt to avoid creating a new temporary variable. */
3345 if (modify_dest
3346 && TREE_CODE (modify_dest) != SSA_NAME)
3347 {
3348 bool use_it = false;
3349
3350 /* We can't use MODIFY_DEST if there's type promotion involved. */
3351 if (!useless_type_conversion_p (callee_type, caller_type))
3352 use_it = false;
3353
3354 /* ??? If we're assigning to a variable sized type, then we must
3355 reuse the destination variable, because we've no good way to
3356 create variable sized temporaries at this point. */
3357 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3358 use_it = true;
3359
3360 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3361 reuse it as the result of the call directly. Don't do this if
3362 it would promote MODIFY_DEST to addressable. */
3363 else if (TREE_ADDRESSABLE (result))
3364 use_it = false;
3365 else
3366 {
3367 tree base_m = get_base_address (modify_dest);
3368
3369 /* If the base isn't a decl, then it's a pointer, and we don't
3370 know where that's going to go. */
3371 if (!DECL_P (base_m))
3372 use_it = false;
3373 else if (is_global_var (base_m))
3374 use_it = false;
3375 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3376 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3377 && !DECL_GIMPLE_REG_P (result)
3378 && DECL_GIMPLE_REG_P (base_m))
3379 use_it = false;
3380 else if (!TREE_ADDRESSABLE (base_m))
3381 use_it = true;
3382 }
3383
3384 if (use_it)
3385 {
3386 var = modify_dest;
3387 use = NULL;
3388 goto done;
3389 }
3390 }
3391
3392 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3393
3394 var = copy_result_decl_to_var (result, id);
3395 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3396
3397 /* Do not have the rest of GCC warn about this variable as it should
3398 not be visible to the user. */
3399 TREE_NO_WARNING (var) = 1;
3400
3401 declare_inline_vars (id->block, var);
3402
3403 /* Build the use expr. If the return type of the function was
3404 promoted, convert it back to the expected type. */
3405 use = var;
3406 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3407 {
3408 /* If we can match up types by promotion/demotion do so. */
3409 if (fold_convertible_p (caller_type, var))
3410 use = fold_convert (caller_type, var);
3411 else
3412 {
3413 /* ??? For valid programs we should not end up here.
3414 Still if we end up with truly mismatched types here, fall back
3415 to using a MEM_REF to not leak invalid GIMPLE to the following
3416 passes. */
3417 /* Prevent var from being written into SSA form. */
3418 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3419 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3420 DECL_GIMPLE_REG_P (var) = false;
3421 else if (is_gimple_reg_type (TREE_TYPE (var)))
3422 TREE_ADDRESSABLE (var) = true;
3423 use = fold_build2 (MEM_REF, caller_type,
3424 build_fold_addr_expr (var),
3425 build_int_cst (ptr_type_node, 0));
3426 }
3427 }
3428
3429 STRIP_USELESS_TYPE_CONVERSION (use);
3430
3431 if (DECL_BY_REFERENCE (result))
3432 {
3433 TREE_ADDRESSABLE (var) = 1;
3434 var = build_fold_addr_expr (var);
3435 }
3436
3437 done:
3438 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3439 way, when the RESULT_DECL is encountered, it will be
3440 automatically replaced by the VAR_DECL.
3441
3442 When returning by reference, ensure that RESULT_DECL remaps to
3443 gimple_val. */
3444 if (DECL_BY_REFERENCE (result)
3445 && !is_gimple_val (var))
3446 {
3447 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3448 insert_decl_map (id, result, temp);
3449 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3450 it's default_def SSA_NAME. */
3451 if (gimple_in_ssa_p (id->src_cfun)
3452 && is_gimple_reg (result))
3453 {
3454 temp = make_ssa_name (temp);
3455 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3456 }
3457 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3458 }
3459 else
3460 insert_decl_map (id, result, var);
3461
3462 /* Remember this so we can ignore it in remap_decls. */
3463 id->retvar = var;
3464 return use;
3465 }
3466
3467 /* Determine if the function can be copied. If so return NULL. If
3468 not return a string describng the reason for failure. */
3469
3470 const char *
3471 copy_forbidden (struct function *fun)
3472 {
3473 const char *reason = fun->cannot_be_copied_reason;
3474
3475 /* Only examine the function once. */
3476 if (fun->cannot_be_copied_set)
3477 return reason;
3478
3479 /* We cannot copy a function that receives a non-local goto
3480 because we cannot remap the destination label used in the
3481 function that is performing the non-local goto. */
3482 /* ??? Actually, this should be possible, if we work at it.
3483 No doubt there's just a handful of places that simply
3484 assume it doesn't happen and don't substitute properly. */
3485 if (fun->has_nonlocal_label)
3486 {
3487 reason = G_("function %q+F can never be copied "
3488 "because it receives a non-local goto");
3489 goto fail;
3490 }
3491
3492 if (fun->has_forced_label_in_static)
3493 {
3494 reason = G_("function %q+F can never be copied because it saves "
3495 "address of local label in a static variable");
3496 goto fail;
3497 }
3498
3499 fail:
3500 fun->cannot_be_copied_reason = reason;
3501 fun->cannot_be_copied_set = true;
3502 return reason;
3503 }
3504
3505
3506 static const char *inline_forbidden_reason;
3507
3508 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3509 iff a function can not be inlined. Also sets the reason why. */
3510
3511 static tree
3512 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3513 struct walk_stmt_info *wip)
3514 {
3515 tree fn = (tree) wip->info;
3516 tree t;
3517 gimple *stmt = gsi_stmt (*gsi);
3518
3519 switch (gimple_code (stmt))
3520 {
3521 case GIMPLE_CALL:
3522 /* Refuse to inline alloca call unless user explicitly forced so as
3523 this may change program's memory overhead drastically when the
3524 function using alloca is called in loop. In GCC present in
3525 SPEC2000 inlining into schedule_block cause it to require 2GB of
3526 RAM instead of 256MB. Don't do so for alloca calls emitted for
3527 VLA objects as those can't cause unbounded growth (they're always
3528 wrapped inside stack_save/stack_restore regions. */
3529 if (gimple_maybe_alloca_call_p (stmt)
3530 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3531 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3532 {
3533 inline_forbidden_reason
3534 = G_("function %q+F can never be inlined because it uses "
3535 "alloca (override using the always_inline attribute)");
3536 *handled_ops_p = true;
3537 return fn;
3538 }
3539
3540 t = gimple_call_fndecl (stmt);
3541 if (t == NULL_TREE)
3542 break;
3543
3544 /* We cannot inline functions that call setjmp. */
3545 if (setjmp_call_p (t))
3546 {
3547 inline_forbidden_reason
3548 = G_("function %q+F can never be inlined because it uses setjmp");
3549 *handled_ops_p = true;
3550 return t;
3551 }
3552
3553 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3554 switch (DECL_FUNCTION_CODE (t))
3555 {
3556 /* We cannot inline functions that take a variable number of
3557 arguments. */
3558 case BUILT_IN_VA_START:
3559 case BUILT_IN_NEXT_ARG:
3560 case BUILT_IN_VA_END:
3561 inline_forbidden_reason
3562 = G_("function %q+F can never be inlined because it "
3563 "uses variable argument lists");
3564 *handled_ops_p = true;
3565 return t;
3566
3567 case BUILT_IN_LONGJMP:
3568 /* We can't inline functions that call __builtin_longjmp at
3569 all. The non-local goto machinery really requires the
3570 destination be in a different function. If we allow the
3571 function calling __builtin_longjmp to be inlined into the
3572 function calling __builtin_setjmp, Things will Go Awry. */
3573 inline_forbidden_reason
3574 = G_("function %q+F can never be inlined because "
3575 "it uses setjmp-longjmp exception handling");
3576 *handled_ops_p = true;
3577 return t;
3578
3579 case BUILT_IN_NONLOCAL_GOTO:
3580 /* Similarly. */
3581 inline_forbidden_reason
3582 = G_("function %q+F can never be inlined because "
3583 "it uses non-local goto");
3584 *handled_ops_p = true;
3585 return t;
3586
3587 case BUILT_IN_RETURN:
3588 case BUILT_IN_APPLY_ARGS:
3589 /* If a __builtin_apply_args caller would be inlined,
3590 it would be saving arguments of the function it has
3591 been inlined into. Similarly __builtin_return would
3592 return from the function the inline has been inlined into. */
3593 inline_forbidden_reason
3594 = G_("function %q+F can never be inlined because "
3595 "it uses __builtin_return or __builtin_apply_args");
3596 *handled_ops_p = true;
3597 return t;
3598
3599 default:
3600 break;
3601 }
3602 break;
3603
3604 case GIMPLE_GOTO:
3605 t = gimple_goto_dest (stmt);
3606
3607 /* We will not inline a function which uses computed goto. The
3608 addresses of its local labels, which may be tucked into
3609 global storage, are of course not constant across
3610 instantiations, which causes unexpected behavior. */
3611 if (TREE_CODE (t) != LABEL_DECL)
3612 {
3613 inline_forbidden_reason
3614 = G_("function %q+F can never be inlined "
3615 "because it contains a computed goto");
3616 *handled_ops_p = true;
3617 return t;
3618 }
3619 break;
3620
3621 default:
3622 break;
3623 }
3624
3625 *handled_ops_p = false;
3626 return NULL_TREE;
3627 }
3628
3629 /* Return true if FNDECL is a function that cannot be inlined into
3630 another one. */
3631
3632 static bool
3633 inline_forbidden_p (tree fndecl)
3634 {
3635 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3636 struct walk_stmt_info wi;
3637 basic_block bb;
3638 bool forbidden_p = false;
3639
3640 /* First check for shared reasons not to copy the code. */
3641 inline_forbidden_reason = copy_forbidden (fun);
3642 if (inline_forbidden_reason != NULL)
3643 return true;
3644
3645 /* Next, walk the statements of the function looking for
3646 constraucts we can't handle, or are non-optimal for inlining. */
3647 hash_set<tree> visited_nodes;
3648 memset (&wi, 0, sizeof (wi));
3649 wi.info = (void *) fndecl;
3650 wi.pset = &visited_nodes;
3651
3652 FOR_EACH_BB_FN (bb, fun)
3653 {
3654 gimple *ret;
3655 gimple_seq seq = bb_seq (bb);
3656 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3657 forbidden_p = (ret != NULL);
3658 if (forbidden_p)
3659 break;
3660 }
3661
3662 return forbidden_p;
3663 }
3664 \f
3665 /* Return false if the function FNDECL cannot be inlined on account of its
3666 attributes, true otherwise. */
3667 static bool
3668 function_attribute_inlinable_p (const_tree fndecl)
3669 {
3670 if (targetm.attribute_table)
3671 {
3672 const_tree a;
3673
3674 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3675 {
3676 const_tree name = TREE_PURPOSE (a);
3677 int i;
3678
3679 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3680 if (is_attribute_p (targetm.attribute_table[i].name, name))
3681 return targetm.function_attribute_inlinable_p (fndecl);
3682 }
3683 }
3684
3685 return true;
3686 }
3687
3688 /* Returns nonzero if FN is a function that does not have any
3689 fundamental inline blocking properties. */
3690
3691 bool
3692 tree_inlinable_function_p (tree fn)
3693 {
3694 bool inlinable = true;
3695 bool do_warning;
3696 tree always_inline;
3697
3698 /* If we've already decided this function shouldn't be inlined,
3699 there's no need to check again. */
3700 if (DECL_UNINLINABLE (fn))
3701 return false;
3702
3703 /* We only warn for functions declared `inline' by the user. */
3704 do_warning = (warn_inline
3705 && DECL_DECLARED_INLINE_P (fn)
3706 && !DECL_NO_INLINE_WARNING_P (fn)
3707 && !DECL_IN_SYSTEM_HEADER (fn));
3708
3709 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3710
3711 if (flag_no_inline
3712 && always_inline == NULL)
3713 {
3714 if (do_warning)
3715 warning (OPT_Winline, "function %q+F can never be inlined because it "
3716 "is suppressed using -fno-inline", fn);
3717 inlinable = false;
3718 }
3719
3720 else if (!function_attribute_inlinable_p (fn))
3721 {
3722 if (do_warning)
3723 warning (OPT_Winline, "function %q+F can never be inlined because it "
3724 "uses attributes conflicting with inlining", fn);
3725 inlinable = false;
3726 }
3727
3728 else if (inline_forbidden_p (fn))
3729 {
3730 /* See if we should warn about uninlinable functions. Previously,
3731 some of these warnings would be issued while trying to expand
3732 the function inline, but that would cause multiple warnings
3733 about functions that would for example call alloca. But since
3734 this a property of the function, just one warning is enough.
3735 As a bonus we can now give more details about the reason why a
3736 function is not inlinable. */
3737 if (always_inline)
3738 error (inline_forbidden_reason, fn);
3739 else if (do_warning)
3740 warning (OPT_Winline, inline_forbidden_reason, fn);
3741
3742 inlinable = false;
3743 }
3744
3745 /* Squirrel away the result so that we don't have to check again. */
3746 DECL_UNINLINABLE (fn) = !inlinable;
3747
3748 return inlinable;
3749 }
3750
3751 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3752 word size and take possible memcpy call into account and return
3753 cost based on whether optimizing for size or speed according to SPEED_P. */
3754
3755 int
3756 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3757 {
3758 HOST_WIDE_INT size;
3759
3760 gcc_assert (!VOID_TYPE_P (type));
3761
3762 if (TREE_CODE (type) == VECTOR_TYPE)
3763 {
3764 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
3765 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
3766 int orig_mode_size
3767 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
3768 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
3769 return ((orig_mode_size + simd_mode_size - 1)
3770 / simd_mode_size);
3771 }
3772
3773 size = int_size_in_bytes (type);
3774
3775 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3776 /* Cost of a memcpy call, 3 arguments and the call. */
3777 return 4;
3778 else
3779 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3780 }
3781
3782 /* Returns cost of operation CODE, according to WEIGHTS */
3783
3784 static int
3785 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3786 tree op1 ATTRIBUTE_UNUSED, tree op2)
3787 {
3788 switch (code)
3789 {
3790 /* These are "free" conversions, or their presumed cost
3791 is folded into other operations. */
3792 case RANGE_EXPR:
3793 CASE_CONVERT:
3794 case COMPLEX_EXPR:
3795 case PAREN_EXPR:
3796 case VIEW_CONVERT_EXPR:
3797 return 0;
3798
3799 /* Assign cost of 1 to usual operations.
3800 ??? We may consider mapping RTL costs to this. */
3801 case COND_EXPR:
3802 case VEC_COND_EXPR:
3803 case VEC_PERM_EXPR:
3804
3805 case PLUS_EXPR:
3806 case POINTER_PLUS_EXPR:
3807 case POINTER_DIFF_EXPR:
3808 case MINUS_EXPR:
3809 case MULT_EXPR:
3810 case MULT_HIGHPART_EXPR:
3811
3812 case ADDR_SPACE_CONVERT_EXPR:
3813 case FIXED_CONVERT_EXPR:
3814 case FIX_TRUNC_EXPR:
3815
3816 case NEGATE_EXPR:
3817 case FLOAT_EXPR:
3818 case MIN_EXPR:
3819 case MAX_EXPR:
3820 case ABS_EXPR:
3821 case ABSU_EXPR:
3822
3823 case LSHIFT_EXPR:
3824 case RSHIFT_EXPR:
3825 case LROTATE_EXPR:
3826 case RROTATE_EXPR:
3827
3828 case BIT_IOR_EXPR:
3829 case BIT_XOR_EXPR:
3830 case BIT_AND_EXPR:
3831 case BIT_NOT_EXPR:
3832
3833 case TRUTH_ANDIF_EXPR:
3834 case TRUTH_ORIF_EXPR:
3835 case TRUTH_AND_EXPR:
3836 case TRUTH_OR_EXPR:
3837 case TRUTH_XOR_EXPR:
3838 case TRUTH_NOT_EXPR:
3839
3840 case LT_EXPR:
3841 case LE_EXPR:
3842 case GT_EXPR:
3843 case GE_EXPR:
3844 case EQ_EXPR:
3845 case NE_EXPR:
3846 case ORDERED_EXPR:
3847 case UNORDERED_EXPR:
3848
3849 case UNLT_EXPR:
3850 case UNLE_EXPR:
3851 case UNGT_EXPR:
3852 case UNGE_EXPR:
3853 case UNEQ_EXPR:
3854 case LTGT_EXPR:
3855
3856 case CONJ_EXPR:
3857
3858 case PREDECREMENT_EXPR:
3859 case PREINCREMENT_EXPR:
3860 case POSTDECREMENT_EXPR:
3861 case POSTINCREMENT_EXPR:
3862
3863 case REALIGN_LOAD_EXPR:
3864
3865 case WIDEN_SUM_EXPR:
3866 case WIDEN_MULT_EXPR:
3867 case DOT_PROD_EXPR:
3868 case SAD_EXPR:
3869 case WIDEN_MULT_PLUS_EXPR:
3870 case WIDEN_MULT_MINUS_EXPR:
3871 case WIDEN_LSHIFT_EXPR:
3872
3873 case VEC_WIDEN_MULT_HI_EXPR:
3874 case VEC_WIDEN_MULT_LO_EXPR:
3875 case VEC_WIDEN_MULT_EVEN_EXPR:
3876 case VEC_WIDEN_MULT_ODD_EXPR:
3877 case VEC_UNPACK_HI_EXPR:
3878 case VEC_UNPACK_LO_EXPR:
3879 case VEC_UNPACK_FLOAT_HI_EXPR:
3880 case VEC_UNPACK_FLOAT_LO_EXPR:
3881 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3882 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3883 case VEC_PACK_TRUNC_EXPR:
3884 case VEC_PACK_SAT_EXPR:
3885 case VEC_PACK_FIX_TRUNC_EXPR:
3886 case VEC_PACK_FLOAT_EXPR:
3887 case VEC_WIDEN_LSHIFT_HI_EXPR:
3888 case VEC_WIDEN_LSHIFT_LO_EXPR:
3889 case VEC_DUPLICATE_EXPR:
3890 case VEC_SERIES_EXPR:
3891
3892 return 1;
3893
3894 /* Few special cases of expensive operations. This is useful
3895 to avoid inlining on functions having too many of these. */
3896 case TRUNC_DIV_EXPR:
3897 case CEIL_DIV_EXPR:
3898 case FLOOR_DIV_EXPR:
3899 case ROUND_DIV_EXPR:
3900 case EXACT_DIV_EXPR:
3901 case TRUNC_MOD_EXPR:
3902 case CEIL_MOD_EXPR:
3903 case FLOOR_MOD_EXPR:
3904 case ROUND_MOD_EXPR:
3905 case RDIV_EXPR:
3906 if (TREE_CODE (op2) != INTEGER_CST)
3907 return weights->div_mod_cost;
3908 return 1;
3909
3910 /* Bit-field insertion needs several shift and mask operations. */
3911 case BIT_INSERT_EXPR:
3912 return 3;
3913
3914 default:
3915 /* We expect a copy assignment with no operator. */
3916 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3917 return 0;
3918 }
3919 }
3920
3921
3922 /* Estimate number of instructions that will be created by expanding
3923 the statements in the statement sequence STMTS.
3924 WEIGHTS contains weights attributed to various constructs. */
3925
3926 int
3927 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3928 {
3929 int cost;
3930 gimple_stmt_iterator gsi;
3931
3932 cost = 0;
3933 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3934 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3935
3936 return cost;
3937 }
3938
3939
3940 /* Estimate number of instructions that will be created by expanding STMT.
3941 WEIGHTS contains weights attributed to various constructs. */
3942
3943 int
3944 estimate_num_insns (gimple *stmt, eni_weights *weights)
3945 {
3946 unsigned cost, i;
3947 enum gimple_code code = gimple_code (stmt);
3948 tree lhs;
3949 tree rhs;
3950
3951 switch (code)
3952 {
3953 case GIMPLE_ASSIGN:
3954 /* Try to estimate the cost of assignments. We have three cases to
3955 deal with:
3956 1) Simple assignments to registers;
3957 2) Stores to things that must live in memory. This includes
3958 "normal" stores to scalars, but also assignments of large
3959 structures, or constructors of big arrays;
3960
3961 Let us look at the first two cases, assuming we have "a = b + C":
3962 <GIMPLE_ASSIGN <var_decl "a">
3963 <plus_expr <var_decl "b"> <constant C>>
3964 If "a" is a GIMPLE register, the assignment to it is free on almost
3965 any target, because "a" usually ends up in a real register. Hence
3966 the only cost of this expression comes from the PLUS_EXPR, and we
3967 can ignore the GIMPLE_ASSIGN.
3968 If "a" is not a GIMPLE register, the assignment to "a" will most
3969 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3970 of moving something into "a", which we compute using the function
3971 estimate_move_cost. */
3972 if (gimple_clobber_p (stmt))
3973 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
3974
3975 lhs = gimple_assign_lhs (stmt);
3976 rhs = gimple_assign_rhs1 (stmt);
3977
3978 cost = 0;
3979
3980 /* Account for the cost of moving to / from memory. */
3981 if (gimple_store_p (stmt))
3982 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
3983 if (gimple_assign_load_p (stmt))
3984 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
3985
3986 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3987 gimple_assign_rhs1 (stmt),
3988 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3989 == GIMPLE_BINARY_RHS
3990 ? gimple_assign_rhs2 (stmt) : NULL);
3991 break;
3992
3993 case GIMPLE_COND:
3994 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3995 gimple_op (stmt, 0),
3996 gimple_op (stmt, 1));
3997 break;
3998
3999 case GIMPLE_SWITCH:
4000 {
4001 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4002 /* Take into account cost of the switch + guess 2 conditional jumps for
4003 each case label.
4004
4005 TODO: once the switch expansion logic is sufficiently separated, we can
4006 do better job on estimating cost of the switch. */
4007 if (weights->time_based)
4008 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4009 else
4010 cost = gimple_switch_num_labels (switch_stmt) * 2;
4011 }
4012 break;
4013
4014 case GIMPLE_CALL:
4015 {
4016 tree decl;
4017
4018 if (gimple_call_internal_p (stmt))
4019 return 0;
4020 else if ((decl = gimple_call_fndecl (stmt))
4021 && DECL_BUILT_IN (decl))
4022 {
4023 /* Do not special case builtins where we see the body.
4024 This just confuse inliner. */
4025 struct cgraph_node *node;
4026 if (!(node = cgraph_node::get (decl))
4027 || node->definition)
4028 ;
4029 /* For buitins that are likely expanded to nothing or
4030 inlined do not account operand costs. */
4031 else if (is_simple_builtin (decl))
4032 return 0;
4033 else if (is_inexpensive_builtin (decl))
4034 return weights->target_builtin_call_cost;
4035 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4036 {
4037 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4038 specialize the cheap expansion we do here.
4039 ??? This asks for a more general solution. */
4040 switch (DECL_FUNCTION_CODE (decl))
4041 {
4042 case BUILT_IN_POW:
4043 case BUILT_IN_POWF:
4044 case BUILT_IN_POWL:
4045 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4046 && (real_equal
4047 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4048 &dconst2)))
4049 return estimate_operator_cost
4050 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4051 gimple_call_arg (stmt, 0));
4052 break;
4053
4054 default:
4055 break;
4056 }
4057 }
4058 }
4059
4060 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4061 if (gimple_call_lhs (stmt))
4062 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4063 weights->time_based);
4064 for (i = 0; i < gimple_call_num_args (stmt); i++)
4065 {
4066 tree arg = gimple_call_arg (stmt, i);
4067 cost += estimate_move_cost (TREE_TYPE (arg),
4068 weights->time_based);
4069 }
4070 break;
4071 }
4072
4073 case GIMPLE_RETURN:
4074 return weights->return_cost;
4075
4076 case GIMPLE_GOTO:
4077 case GIMPLE_LABEL:
4078 case GIMPLE_NOP:
4079 case GIMPLE_PHI:
4080 case GIMPLE_PREDICT:
4081 case GIMPLE_DEBUG:
4082 return 0;
4083
4084 case GIMPLE_ASM:
4085 {
4086 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4087 /* 1000 means infinity. This avoids overflows later
4088 with very long asm statements. */
4089 if (count > 1000)
4090 count = 1000;
4091 return MAX (1, count);
4092 }
4093
4094 case GIMPLE_RESX:
4095 /* This is either going to be an external function call with one
4096 argument, or two register copy statements plus a goto. */
4097 return 2;
4098
4099 case GIMPLE_EH_DISPATCH:
4100 /* ??? This is going to turn into a switch statement. Ideally
4101 we'd have a look at the eh region and estimate the number of
4102 edges involved. */
4103 return 10;
4104
4105 case GIMPLE_BIND:
4106 return estimate_num_insns_seq (
4107 gimple_bind_body (as_a <gbind *> (stmt)),
4108 weights);
4109
4110 case GIMPLE_EH_FILTER:
4111 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4112
4113 case GIMPLE_CATCH:
4114 return estimate_num_insns_seq (gimple_catch_handler (
4115 as_a <gcatch *> (stmt)),
4116 weights);
4117
4118 case GIMPLE_TRY:
4119 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4120 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4121
4122 /* OMP directives are generally very expensive. */
4123
4124 case GIMPLE_OMP_RETURN:
4125 case GIMPLE_OMP_SECTIONS_SWITCH:
4126 case GIMPLE_OMP_ATOMIC_STORE:
4127 case GIMPLE_OMP_CONTINUE:
4128 /* ...except these, which are cheap. */
4129 return 0;
4130
4131 case GIMPLE_OMP_ATOMIC_LOAD:
4132 return weights->omp_cost;
4133
4134 case GIMPLE_OMP_FOR:
4135 return (weights->omp_cost
4136 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4137 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4138
4139 case GIMPLE_OMP_PARALLEL:
4140 case GIMPLE_OMP_TASK:
4141 case GIMPLE_OMP_CRITICAL:
4142 case GIMPLE_OMP_MASTER:
4143 case GIMPLE_OMP_TASKGROUP:
4144 case GIMPLE_OMP_ORDERED:
4145 case GIMPLE_OMP_SECTION:
4146 case GIMPLE_OMP_SECTIONS:
4147 case GIMPLE_OMP_SINGLE:
4148 case GIMPLE_OMP_TARGET:
4149 case GIMPLE_OMP_TEAMS:
4150 return (weights->omp_cost
4151 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4152
4153 case GIMPLE_TRANSACTION:
4154 return (weights->tm_cost
4155 + estimate_num_insns_seq (gimple_transaction_body (
4156 as_a <gtransaction *> (stmt)),
4157 weights));
4158
4159 default:
4160 gcc_unreachable ();
4161 }
4162
4163 return cost;
4164 }
4165
4166 /* Estimate number of instructions that will be created by expanding
4167 function FNDECL. WEIGHTS contains weights attributed to various
4168 constructs. */
4169
4170 int
4171 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4172 {
4173 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4174 gimple_stmt_iterator bsi;
4175 basic_block bb;
4176 int n = 0;
4177
4178 gcc_assert (my_function && my_function->cfg);
4179 FOR_EACH_BB_FN (bb, my_function)
4180 {
4181 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4182 n += estimate_num_insns (gsi_stmt (bsi), weights);
4183 }
4184
4185 return n;
4186 }
4187
4188
4189 /* Initializes weights used by estimate_num_insns. */
4190
4191 void
4192 init_inline_once (void)
4193 {
4194 eni_size_weights.call_cost = 1;
4195 eni_size_weights.indirect_call_cost = 3;
4196 eni_size_weights.target_builtin_call_cost = 1;
4197 eni_size_weights.div_mod_cost = 1;
4198 eni_size_weights.omp_cost = 40;
4199 eni_size_weights.tm_cost = 10;
4200 eni_size_weights.time_based = false;
4201 eni_size_weights.return_cost = 1;
4202
4203 /* Estimating time for call is difficult, since we have no idea what the
4204 called function does. In the current uses of eni_time_weights,
4205 underestimating the cost does less harm than overestimating it, so
4206 we choose a rather small value here. */
4207 eni_time_weights.call_cost = 10;
4208 eni_time_weights.indirect_call_cost = 15;
4209 eni_time_weights.target_builtin_call_cost = 1;
4210 eni_time_weights.div_mod_cost = 10;
4211 eni_time_weights.omp_cost = 40;
4212 eni_time_weights.tm_cost = 40;
4213 eni_time_weights.time_based = true;
4214 eni_time_weights.return_cost = 2;
4215 }
4216
4217
4218 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4219
4220 static void
4221 prepend_lexical_block (tree current_block, tree new_block)
4222 {
4223 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4224 BLOCK_SUBBLOCKS (current_block) = new_block;
4225 BLOCK_SUPERCONTEXT (new_block) = current_block;
4226 }
4227
4228 /* Add local variables from CALLEE to CALLER. */
4229
4230 static inline void
4231 add_local_variables (struct function *callee, struct function *caller,
4232 copy_body_data *id)
4233 {
4234 tree var;
4235 unsigned ix;
4236
4237 FOR_EACH_LOCAL_DECL (callee, ix, var)
4238 if (!can_be_nonlocal (var, id))
4239 {
4240 tree new_var = remap_decl (var, id);
4241
4242 /* Remap debug-expressions. */
4243 if (VAR_P (new_var)
4244 && DECL_HAS_DEBUG_EXPR_P (var)
4245 && new_var != var)
4246 {
4247 tree tem = DECL_DEBUG_EXPR (var);
4248 bool old_regimplify = id->regimplify;
4249 id->remapping_type_depth++;
4250 walk_tree (&tem, copy_tree_body_r, id, NULL);
4251 id->remapping_type_depth--;
4252 id->regimplify = old_regimplify;
4253 SET_DECL_DEBUG_EXPR (new_var, tem);
4254 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4255 }
4256 add_local_decl (caller, new_var);
4257 }
4258 }
4259
4260 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4261 have brought in or introduced any debug stmts for SRCVAR. */
4262
4263 static inline void
4264 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4265 {
4266 tree *remappedvarp = id->decl_map->get (srcvar);
4267
4268 if (!remappedvarp)
4269 return;
4270
4271 if (!VAR_P (*remappedvarp))
4272 return;
4273
4274 if (*remappedvarp == id->retvar)
4275 return;
4276
4277 tree tvar = target_for_debug_bind (*remappedvarp);
4278 if (!tvar)
4279 return;
4280
4281 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4282 id->call_stmt);
4283 gimple_seq_add_stmt (bindings, stmt);
4284 }
4285
4286 /* For each inlined variable for which we may have debug bind stmts,
4287 add before GSI a final debug stmt resetting it, marking the end of
4288 its life, so that var-tracking knows it doesn't have to compute
4289 further locations for it. */
4290
4291 static inline void
4292 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4293 {
4294 tree var;
4295 unsigned ix;
4296 gimple_seq bindings = NULL;
4297
4298 if (!gimple_in_ssa_p (id->src_cfun))
4299 return;
4300
4301 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4302 return;
4303
4304 for (var = DECL_ARGUMENTS (id->src_fn);
4305 var; var = DECL_CHAIN (var))
4306 reset_debug_binding (id, var, &bindings);
4307
4308 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4309 reset_debug_binding (id, var, &bindings);
4310
4311 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4312 }
4313
4314 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4315
4316 static bool
4317 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4318 {
4319 tree use_retvar;
4320 tree fn;
4321 hash_map<tree, tree> *dst;
4322 hash_map<tree, tree> *st = NULL;
4323 tree return_slot;
4324 tree modify_dest;
4325 struct cgraph_edge *cg_edge;
4326 cgraph_inline_failed_t reason;
4327 basic_block return_block;
4328 edge e;
4329 gimple_stmt_iterator gsi, stmt_gsi;
4330 bool successfully_inlined = false;
4331 bool purge_dead_abnormal_edges;
4332 gcall *call_stmt;
4333 unsigned int prop_mask, src_properties;
4334 struct function *dst_cfun;
4335 tree simduid;
4336 use_operand_p use;
4337 gimple *simtenter_stmt = NULL;
4338 vec<tree> *simtvars_save;
4339
4340 /* The gimplifier uses input_location in too many places, such as
4341 internal_get_tmp_var (). */
4342 location_t saved_location = input_location;
4343 input_location = gimple_location (stmt);
4344
4345 /* From here on, we're only interested in CALL_EXPRs. */
4346 call_stmt = dyn_cast <gcall *> (stmt);
4347 if (!call_stmt)
4348 goto egress;
4349
4350 cg_edge = id->dst_node->get_edge (stmt);
4351 gcc_checking_assert (cg_edge);
4352 /* First, see if we can figure out what function is being called.
4353 If we cannot, then there is no hope of inlining the function. */
4354 if (cg_edge->indirect_unknown_callee)
4355 goto egress;
4356 fn = cg_edge->callee->decl;
4357 gcc_checking_assert (fn);
4358
4359 /* If FN is a declaration of a function in a nested scope that was
4360 globally declared inline, we don't set its DECL_INITIAL.
4361 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4362 C++ front-end uses it for cdtors to refer to their internal
4363 declarations, that are not real functions. Fortunately those
4364 don't have trees to be saved, so we can tell by checking their
4365 gimple_body. */
4366 if (!DECL_INITIAL (fn)
4367 && DECL_ABSTRACT_ORIGIN (fn)
4368 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4369 fn = DECL_ABSTRACT_ORIGIN (fn);
4370
4371 /* Don't try to inline functions that are not well-suited to inlining. */
4372 if (cg_edge->inline_failed)
4373 {
4374 reason = cg_edge->inline_failed;
4375 /* If this call was originally indirect, we do not want to emit any
4376 inlining related warnings or sorry messages because there are no
4377 guarantees regarding those. */
4378 if (cg_edge->indirect_inlining_edge)
4379 goto egress;
4380
4381 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4382 /* For extern inline functions that get redefined we always
4383 silently ignored always_inline flag. Better behavior would
4384 be to be able to keep both bodies and use extern inline body
4385 for inlining, but we can't do that because frontends overwrite
4386 the body. */
4387 && !cg_edge->callee->local.redefined_extern_inline
4388 /* During early inline pass, report only when optimization is
4389 not turned on. */
4390 && (symtab->global_info_ready
4391 || !optimize
4392 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4393 /* PR 20090218-1_0.c. Body can be provided by another module. */
4394 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4395 {
4396 error ("inlining failed in call to always_inline %q+F: %s", fn,
4397 cgraph_inline_failed_string (reason));
4398 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4399 inform (gimple_location (stmt), "called from here");
4400 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4401 inform (DECL_SOURCE_LOCATION (cfun->decl),
4402 "called from this function");
4403 }
4404 else if (warn_inline
4405 && DECL_DECLARED_INLINE_P (fn)
4406 && !DECL_NO_INLINE_WARNING_P (fn)
4407 && !DECL_IN_SYSTEM_HEADER (fn)
4408 && reason != CIF_UNSPECIFIED
4409 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4410 /* Do not warn about not inlined recursive calls. */
4411 && !cg_edge->recursive_p ()
4412 /* Avoid warnings during early inline pass. */
4413 && symtab->global_info_ready)
4414 {
4415 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4416 fn, _(cgraph_inline_failed_string (reason))))
4417 {
4418 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4419 inform (gimple_location (stmt), "called from here");
4420 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4421 inform (DECL_SOURCE_LOCATION (cfun->decl),
4422 "called from this function");
4423 }
4424 }
4425 goto egress;
4426 }
4427 id->src_node = cg_edge->callee;
4428
4429 /* If callee is thunk, all we need is to adjust the THIS pointer
4430 and redirect to function being thunked. */
4431 if (id->src_node->thunk.thunk_p)
4432 {
4433 cgraph_edge *edge;
4434 tree virtual_offset = NULL;
4435 profile_count count = cg_edge->count;
4436 tree op;
4437 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4438
4439 cg_edge->remove ();
4440 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4441 gimple_uid (stmt),
4442 profile_count::one (),
4443 profile_count::one (),
4444 true);
4445 edge->count = count;
4446 if (id->src_node->thunk.virtual_offset_p)
4447 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4448 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4449 NULL);
4450 gsi_insert_before (&iter, gimple_build_assign (op,
4451 gimple_call_arg (stmt, 0)),
4452 GSI_NEW_STMT);
4453 gcc_assert (id->src_node->thunk.this_adjusting);
4454 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4455 virtual_offset);
4456
4457 gimple_call_set_arg (stmt, 0, op);
4458 gimple_call_set_fndecl (stmt, edge->callee->decl);
4459 update_stmt (stmt);
4460 id->src_node->remove ();
4461 expand_call_inline (bb, stmt, id);
4462 maybe_remove_unused_call_args (cfun, stmt);
4463 return true;
4464 }
4465 fn = cg_edge->callee->decl;
4466 cg_edge->callee->get_untransformed_body ();
4467
4468 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4469 cg_edge->callee->verify ();
4470
4471 /* We will be inlining this callee. */
4472 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4473 id->assign_stmts.create (0);
4474
4475 /* Update the callers EH personality. */
4476 if (DECL_FUNCTION_PERSONALITY (fn))
4477 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4478 = DECL_FUNCTION_PERSONALITY (fn);
4479
4480 /* Split the block before the GIMPLE_CALL. */
4481 stmt_gsi = gsi_for_stmt (stmt);
4482 gsi_prev (&stmt_gsi);
4483 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4484 bb = e->src;
4485 return_block = e->dest;
4486 remove_edge (e);
4487
4488 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4489 been the source of abnormal edges. In this case, schedule
4490 the removal of dead abnormal edges. */
4491 gsi = gsi_start_bb (return_block);
4492 gsi_next (&gsi);
4493 purge_dead_abnormal_edges = gsi_end_p (gsi);
4494
4495 stmt_gsi = gsi_start_bb (return_block);
4496
4497 /* Build a block containing code to initialize the arguments, the
4498 actual inline expansion of the body, and a label for the return
4499 statements within the function to jump to. The type of the
4500 statement expression is the return type of the function call.
4501 ??? If the call does not have an associated block then we will
4502 remap all callee blocks to NULL, effectively dropping most of
4503 its debug information. This should only happen for calls to
4504 artificial decls inserted by the compiler itself. We need to
4505 either link the inlined blocks into the caller block tree or
4506 not refer to them in any way to not break GC for locations. */
4507 if (gimple_block (stmt))
4508 {
4509 id->block = make_node (BLOCK);
4510 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4511 BLOCK_SOURCE_LOCATION (id->block)
4512 = LOCATION_LOCUS (gimple_location (stmt));
4513 prepend_lexical_block (gimple_block (stmt), id->block);
4514 }
4515
4516 /* Local declarations will be replaced by their equivalents in this map. */
4517 st = id->decl_map;
4518 id->decl_map = new hash_map<tree, tree>;
4519 dst = id->debug_map;
4520 id->debug_map = NULL;
4521
4522 /* Record the function we are about to inline. */
4523 id->src_fn = fn;
4524 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4525 id->reset_location = DECL_IGNORED_P (fn);
4526 id->call_stmt = call_stmt;
4527
4528 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4529 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4530 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4531 simtvars_save = id->dst_simt_vars;
4532 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4533 && (simduid = bb->loop_father->simduid) != NULL_TREE
4534 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4535 && single_imm_use (simduid, &use, &simtenter_stmt)
4536 && is_gimple_call (simtenter_stmt)
4537 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4538 vec_alloc (id->dst_simt_vars, 0);
4539 else
4540 id->dst_simt_vars = NULL;
4541
4542 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4543 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4544
4545 /* If the src function contains an IFN_VA_ARG, then so will the dst
4546 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4547 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4548 src_properties = id->src_cfun->curr_properties & prop_mask;
4549 if (src_properties != prop_mask)
4550 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4551
4552 gcc_assert (!id->src_cfun->after_inlining);
4553
4554 id->entry_bb = bb;
4555 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4556 {
4557 gimple_stmt_iterator si = gsi_last_bb (bb);
4558 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4559 NOT_TAKEN),
4560 GSI_NEW_STMT);
4561 }
4562 initialize_inlined_parameters (id, stmt, fn, bb);
4563 if (debug_nonbind_markers_p && debug_inline_points && id->block
4564 && inlined_function_outer_scope_p (id->block))
4565 {
4566 gimple_stmt_iterator si = gsi_last_bb (bb);
4567 gsi_insert_after (&si, gimple_build_debug_inline_entry
4568 (id->block, input_location), GSI_NEW_STMT);
4569 }
4570
4571 if (DECL_INITIAL (fn))
4572 {
4573 if (gimple_block (stmt))
4574 {
4575 tree *var;
4576
4577 prepend_lexical_block (id->block,
4578 remap_blocks (DECL_INITIAL (fn), id));
4579 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4580 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4581 == NULL_TREE));
4582 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4583 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4584 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4585 under it. The parameters can be then evaluated in the debugger,
4586 but don't show in backtraces. */
4587 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4588 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4589 {
4590 tree v = *var;
4591 *var = TREE_CHAIN (v);
4592 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4593 BLOCK_VARS (id->block) = v;
4594 }
4595 else
4596 var = &TREE_CHAIN (*var);
4597 }
4598 else
4599 remap_blocks_to_null (DECL_INITIAL (fn), id);
4600 }
4601
4602 /* Return statements in the function body will be replaced by jumps
4603 to the RET_LABEL. */
4604 gcc_assert (DECL_INITIAL (fn));
4605 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4606
4607 /* Find the LHS to which the result of this call is assigned. */
4608 return_slot = NULL;
4609 if (gimple_call_lhs (stmt))
4610 {
4611 modify_dest = gimple_call_lhs (stmt);
4612
4613 /* The function which we are inlining might not return a value,
4614 in which case we should issue a warning that the function
4615 does not return a value. In that case the optimizers will
4616 see that the variable to which the value is assigned was not
4617 initialized. We do not want to issue a warning about that
4618 uninitialized variable. */
4619 if (DECL_P (modify_dest))
4620 TREE_NO_WARNING (modify_dest) = 1;
4621
4622 if (gimple_call_return_slot_opt_p (call_stmt))
4623 {
4624 return_slot = modify_dest;
4625 modify_dest = NULL;
4626 }
4627 }
4628 else
4629 modify_dest = NULL;
4630
4631 /* If we are inlining a call to the C++ operator new, we don't want
4632 to use type based alias analysis on the return value. Otherwise
4633 we may get confused if the compiler sees that the inlined new
4634 function returns a pointer which was just deleted. See bug
4635 33407. */
4636 if (DECL_IS_OPERATOR_NEW (fn))
4637 {
4638 return_slot = NULL;
4639 modify_dest = NULL;
4640 }
4641
4642 /* Declare the return variable for the function. */
4643 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4644
4645 /* Add local vars in this inlined callee to caller. */
4646 add_local_variables (id->src_cfun, cfun, id);
4647
4648 if (dump_file && (dump_flags & TDF_DETAILS))
4649 {
4650 fprintf (dump_file, "Inlining %s to %s with frequency %4.2f\n",
4651 id->src_node->dump_name (),
4652 id->dst_node->dump_name (),
4653 cg_edge->sreal_frequency ().to_double ());
4654 id->src_node->dump (dump_file);
4655 id->dst_node->dump (dump_file);
4656 }
4657
4658 /* This is it. Duplicate the callee body. Assume callee is
4659 pre-gimplified. Note that we must not alter the caller
4660 function in any way before this point, as this CALL_EXPR may be
4661 a self-referential call; if we're calling ourselves, we need to
4662 duplicate our body before altering anything. */
4663 copy_body (id, bb, return_block, NULL);
4664
4665 reset_debug_bindings (id, stmt_gsi);
4666
4667 if (flag_stack_reuse != SR_NONE)
4668 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4669 if (!TREE_THIS_VOLATILE (p))
4670 {
4671 tree *varp = id->decl_map->get (p);
4672 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4673 {
4674 tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4675 gimple *clobber_stmt;
4676 TREE_THIS_VOLATILE (clobber) = 1;
4677 clobber_stmt = gimple_build_assign (*varp, clobber);
4678 gimple_set_location (clobber_stmt, gimple_location (stmt));
4679 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4680 }
4681 }
4682
4683 /* Reset the escaped solution. */
4684 if (cfun->gimple_df)
4685 pt_solution_reset (&cfun->gimple_df->escaped);
4686
4687 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4688 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4689 {
4690 size_t nargs = gimple_call_num_args (simtenter_stmt);
4691 vec<tree> *vars = id->dst_simt_vars;
4692 auto_vec<tree> newargs (nargs + vars->length ());
4693 for (size_t i = 0; i < nargs; i++)
4694 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4695 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4696 {
4697 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4698 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4699 }
4700 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4701 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4702 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4703 gsi_replace (&gsi, g, false);
4704 }
4705 vec_free (id->dst_simt_vars);
4706 id->dst_simt_vars = simtvars_save;
4707
4708 /* Clean up. */
4709 if (id->debug_map)
4710 {
4711 delete id->debug_map;
4712 id->debug_map = dst;
4713 }
4714 delete id->decl_map;
4715 id->decl_map = st;
4716
4717 /* Unlink the calls virtual operands before replacing it. */
4718 unlink_stmt_vdef (stmt);
4719 if (gimple_vdef (stmt)
4720 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4721 release_ssa_name (gimple_vdef (stmt));
4722
4723 /* If the inlined function returns a result that we care about,
4724 substitute the GIMPLE_CALL with an assignment of the return
4725 variable to the LHS of the call. That is, if STMT was
4726 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4727 if (use_retvar && gimple_call_lhs (stmt))
4728 {
4729 gimple *old_stmt = stmt;
4730 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4731 gimple_set_location (stmt, gimple_location (old_stmt));
4732 gsi_replace (&stmt_gsi, stmt, false);
4733 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4734 /* Append a clobber for id->retvar if easily possible. */
4735 if (flag_stack_reuse != SR_NONE
4736 && id->retvar
4737 && VAR_P (id->retvar)
4738 && id->retvar != return_slot
4739 && id->retvar != modify_dest
4740 && !TREE_THIS_VOLATILE (id->retvar)
4741 && !is_gimple_reg (id->retvar)
4742 && !stmt_ends_bb_p (stmt))
4743 {
4744 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4745 gimple *clobber_stmt;
4746 TREE_THIS_VOLATILE (clobber) = 1;
4747 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4748 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4749 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4750 }
4751 }
4752 else
4753 {
4754 /* Handle the case of inlining a function with no return
4755 statement, which causes the return value to become undefined. */
4756 if (gimple_call_lhs (stmt)
4757 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4758 {
4759 tree name = gimple_call_lhs (stmt);
4760 tree var = SSA_NAME_VAR (name);
4761 tree def = var ? ssa_default_def (cfun, var) : NULL;
4762
4763 if (def)
4764 {
4765 /* If the variable is used undefined, make this name
4766 undefined via a move. */
4767 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4768 gsi_replace (&stmt_gsi, stmt, true);
4769 }
4770 else
4771 {
4772 if (!var)
4773 {
4774 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
4775 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
4776 }
4777 /* Otherwise make this variable undefined. */
4778 gsi_remove (&stmt_gsi, true);
4779 set_ssa_default_def (cfun, var, name);
4780 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4781 }
4782 }
4783 /* Replace with a clobber for id->retvar. */
4784 else if (flag_stack_reuse != SR_NONE
4785 && id->retvar
4786 && VAR_P (id->retvar)
4787 && id->retvar != return_slot
4788 && id->retvar != modify_dest
4789 && !TREE_THIS_VOLATILE (id->retvar)
4790 && !is_gimple_reg (id->retvar))
4791 {
4792 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4793 gimple *clobber_stmt;
4794 TREE_THIS_VOLATILE (clobber) = 1;
4795 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4796 gimple_set_location (clobber_stmt, gimple_location (stmt));
4797 gsi_replace (&stmt_gsi, clobber_stmt, false);
4798 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
4799 }
4800 else
4801 gsi_remove (&stmt_gsi, true);
4802 }
4803
4804 if (purge_dead_abnormal_edges)
4805 {
4806 gimple_purge_dead_eh_edges (return_block);
4807 gimple_purge_dead_abnormal_call_edges (return_block);
4808 }
4809
4810 /* If the value of the new expression is ignored, that's OK. We
4811 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4812 the equivalent inlined version either. */
4813 if (is_gimple_assign (stmt))
4814 {
4815 gcc_assert (gimple_assign_single_p (stmt)
4816 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4817 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4818 }
4819
4820 id->assign_stmts.release ();
4821
4822 /* Output the inlining info for this abstract function, since it has been
4823 inlined. If we don't do this now, we can lose the information about the
4824 variables in the function when the blocks get blown away as soon as we
4825 remove the cgraph node. */
4826 if (gimple_block (stmt))
4827 (*debug_hooks->outlining_inline_function) (fn);
4828
4829 /* Update callgraph if needed. */
4830 cg_edge->callee->remove ();
4831
4832 id->block = NULL_TREE;
4833 id->retvar = NULL_TREE;
4834 successfully_inlined = true;
4835
4836 egress:
4837 input_location = saved_location;
4838 return successfully_inlined;
4839 }
4840
4841 /* Expand call statements reachable from STMT_P.
4842 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4843 in a MODIFY_EXPR. */
4844
4845 static bool
4846 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4847 {
4848 gimple_stmt_iterator gsi;
4849 bool inlined = false;
4850
4851 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4852 {
4853 gimple *stmt = gsi_stmt (gsi);
4854 gsi_prev (&gsi);
4855
4856 if (is_gimple_call (stmt)
4857 && !gimple_call_internal_p (stmt))
4858 inlined |= expand_call_inline (bb, stmt, id);
4859 }
4860
4861 return inlined;
4862 }
4863
4864
4865 /* Walk all basic blocks created after FIRST and try to fold every statement
4866 in the STATEMENTS pointer set. */
4867
4868 static void
4869 fold_marked_statements (int first, hash_set<gimple *> *statements)
4870 {
4871 for (; first < n_basic_blocks_for_fn (cfun); first++)
4872 if (BASIC_BLOCK_FOR_FN (cfun, first))
4873 {
4874 gimple_stmt_iterator gsi;
4875
4876 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4877 !gsi_end_p (gsi);
4878 gsi_next (&gsi))
4879 if (statements->contains (gsi_stmt (gsi)))
4880 {
4881 gimple *old_stmt = gsi_stmt (gsi);
4882 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4883
4884 if (old_decl && DECL_BUILT_IN (old_decl))
4885 {
4886 /* Folding builtins can create multiple instructions,
4887 we need to look at all of them. */
4888 gimple_stmt_iterator i2 = gsi;
4889 gsi_prev (&i2);
4890 if (fold_stmt (&gsi))
4891 {
4892 gimple *new_stmt;
4893 /* If a builtin at the end of a bb folded into nothing,
4894 the following loop won't work. */
4895 if (gsi_end_p (gsi))
4896 {
4897 cgraph_update_edges_for_call_stmt (old_stmt,
4898 old_decl, NULL);
4899 break;
4900 }
4901 if (gsi_end_p (i2))
4902 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4903 else
4904 gsi_next (&i2);
4905 while (1)
4906 {
4907 new_stmt = gsi_stmt (i2);
4908 update_stmt (new_stmt);
4909 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4910 new_stmt);
4911
4912 if (new_stmt == gsi_stmt (gsi))
4913 {
4914 /* It is okay to check only for the very last
4915 of these statements. If it is a throwing
4916 statement nothing will change. If it isn't
4917 this can remove EH edges. If that weren't
4918 correct then because some intermediate stmts
4919 throw, but not the last one. That would mean
4920 we'd have to split the block, which we can't
4921 here and we'd loose anyway. And as builtins
4922 probably never throw, this all
4923 is mood anyway. */
4924 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4925 new_stmt))
4926 gimple_purge_dead_eh_edges (
4927 BASIC_BLOCK_FOR_FN (cfun, first));
4928 break;
4929 }
4930 gsi_next (&i2);
4931 }
4932 }
4933 }
4934 else if (fold_stmt (&gsi))
4935 {
4936 /* Re-read the statement from GSI as fold_stmt() may
4937 have changed it. */
4938 gimple *new_stmt = gsi_stmt (gsi);
4939 update_stmt (new_stmt);
4940
4941 if (is_gimple_call (old_stmt)
4942 || is_gimple_call (new_stmt))
4943 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4944 new_stmt);
4945
4946 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4947 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4948 first));
4949 }
4950 }
4951 }
4952 }
4953
4954 /* Expand calls to inline functions in the body of FN. */
4955
4956 unsigned int
4957 optimize_inline_calls (tree fn)
4958 {
4959 copy_body_data id;
4960 basic_block bb;
4961 int last = n_basic_blocks_for_fn (cfun);
4962 bool inlined_p = false;
4963
4964 /* Clear out ID. */
4965 memset (&id, 0, sizeof (id));
4966
4967 id.src_node = id.dst_node = cgraph_node::get (fn);
4968 gcc_assert (id.dst_node->definition);
4969 id.dst_fn = fn;
4970 /* Or any functions that aren't finished yet. */
4971 if (current_function_decl)
4972 id.dst_fn = current_function_decl;
4973
4974 id.copy_decl = copy_decl_maybe_to_var;
4975 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4976 id.transform_new_cfg = false;
4977 id.transform_return_to_modify = true;
4978 id.transform_parameter = true;
4979 id.transform_lang_insert_block = NULL;
4980 id.statements_to_fold = new hash_set<gimple *>;
4981
4982 push_gimplify_context ();
4983
4984 /* We make no attempts to keep dominance info up-to-date. */
4985 free_dominance_info (CDI_DOMINATORS);
4986 free_dominance_info (CDI_POST_DOMINATORS);
4987
4988 /* Register specific gimple functions. */
4989 gimple_register_cfg_hooks ();
4990
4991 /* Reach the trees by walking over the CFG, and note the
4992 enclosing basic-blocks in the call edges. */
4993 /* We walk the blocks going forward, because inlined function bodies
4994 will split id->current_basic_block, and the new blocks will
4995 follow it; we'll trudge through them, processing their CALL_EXPRs
4996 along the way. */
4997 FOR_EACH_BB_FN (bb, cfun)
4998 inlined_p |= gimple_expand_calls_inline (bb, &id);
4999
5000 pop_gimplify_context (NULL);
5001
5002 if (flag_checking)
5003 {
5004 struct cgraph_edge *e;
5005
5006 id.dst_node->verify ();
5007
5008 /* Double check that we inlined everything we are supposed to inline. */
5009 for (e = id.dst_node->callees; e; e = e->next_callee)
5010 gcc_assert (e->inline_failed);
5011 }
5012
5013 /* Fold queued statements. */
5014 update_max_bb_count ();
5015 fold_marked_statements (last, id.statements_to_fold);
5016 delete id.statements_to_fold;
5017
5018 gcc_assert (!id.debug_stmts.exists ());
5019
5020 /* If we didn't inline into the function there is nothing to do. */
5021 if (!inlined_p)
5022 return 0;
5023
5024 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5025 number_blocks (fn);
5026
5027 delete_unreachable_blocks_update_callgraph (&id);
5028 if (flag_checking)
5029 id.dst_node->verify ();
5030
5031 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5032 not possible yet - the IPA passes might make various functions to not
5033 throw and they don't care to proactively update local EH info. This is
5034 done later in fixup_cfg pass that also execute the verification. */
5035 return (TODO_update_ssa
5036 | TODO_cleanup_cfg
5037 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5038 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5039 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5040 ? TODO_rebuild_frequencies : 0));
5041 }
5042
5043 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5044
5045 tree
5046 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5047 {
5048 enum tree_code code = TREE_CODE (*tp);
5049 enum tree_code_class cl = TREE_CODE_CLASS (code);
5050
5051 /* We make copies of most nodes. */
5052 if (IS_EXPR_CODE_CLASS (cl)
5053 || code == TREE_LIST
5054 || code == TREE_VEC
5055 || code == TYPE_DECL
5056 || code == OMP_CLAUSE)
5057 {
5058 /* Because the chain gets clobbered when we make a copy, we save it
5059 here. */
5060 tree chain = NULL_TREE, new_tree;
5061
5062 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5063 chain = TREE_CHAIN (*tp);
5064
5065 /* Copy the node. */
5066 new_tree = copy_node (*tp);
5067
5068 *tp = new_tree;
5069
5070 /* Now, restore the chain, if appropriate. That will cause
5071 walk_tree to walk into the chain as well. */
5072 if (code == PARM_DECL
5073 || code == TREE_LIST
5074 || code == OMP_CLAUSE)
5075 TREE_CHAIN (*tp) = chain;
5076
5077 /* For now, we don't update BLOCKs when we make copies. So, we
5078 have to nullify all BIND_EXPRs. */
5079 if (TREE_CODE (*tp) == BIND_EXPR)
5080 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5081 }
5082 else if (code == CONSTRUCTOR)
5083 {
5084 /* CONSTRUCTOR nodes need special handling because
5085 we need to duplicate the vector of elements. */
5086 tree new_tree;
5087
5088 new_tree = copy_node (*tp);
5089 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5090 *tp = new_tree;
5091 }
5092 else if (code == STATEMENT_LIST)
5093 /* We used to just abort on STATEMENT_LIST, but we can run into them
5094 with statement-expressions (c++/40975). */
5095 copy_statement_list (tp);
5096 else if (TREE_CODE_CLASS (code) == tcc_type)
5097 *walk_subtrees = 0;
5098 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5099 *walk_subtrees = 0;
5100 else if (TREE_CODE_CLASS (code) == tcc_constant)
5101 *walk_subtrees = 0;
5102 return NULL_TREE;
5103 }
5104
5105 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5106 information indicating to what new SAVE_EXPR this one should be mapped,
5107 use that one. Otherwise, create a new node and enter it in ST. FN is
5108 the function into which the copy will be placed. */
5109
5110 static void
5111 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5112 {
5113 tree *n;
5114 tree t;
5115
5116 /* See if we already encountered this SAVE_EXPR. */
5117 n = st->get (*tp);
5118
5119 /* If we didn't already remap this SAVE_EXPR, do so now. */
5120 if (!n)
5121 {
5122 t = copy_node (*tp);
5123
5124 /* Remember this SAVE_EXPR. */
5125 st->put (*tp, t);
5126 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5127 st->put (t, t);
5128 }
5129 else
5130 {
5131 /* We've already walked into this SAVE_EXPR; don't do it again. */
5132 *walk_subtrees = 0;
5133 t = *n;
5134 }
5135
5136 /* Replace this SAVE_EXPR with the copy. */
5137 *tp = t;
5138 }
5139
5140 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5141 label, copies the declaration and enters it in the splay_tree in DATA (which
5142 is really a 'copy_body_data *'. */
5143
5144 static tree
5145 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5146 bool *handled_ops_p ATTRIBUTE_UNUSED,
5147 struct walk_stmt_info *wi)
5148 {
5149 copy_body_data *id = (copy_body_data *) wi->info;
5150 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5151
5152 if (stmt)
5153 {
5154 tree decl = gimple_label_label (stmt);
5155
5156 /* Copy the decl and remember the copy. */
5157 insert_decl_map (id, decl, id->copy_decl (decl, id));
5158 }
5159
5160 return NULL_TREE;
5161 }
5162
5163 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5164 struct walk_stmt_info *wi);
5165
5166 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5167 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5168 remaps all local declarations to appropriate replacements in gimple
5169 operands. */
5170
5171 static tree
5172 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5173 {
5174 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5175 copy_body_data *id = (copy_body_data *) wi->info;
5176 hash_map<tree, tree> *st = id->decl_map;
5177 tree *n;
5178 tree expr = *tp;
5179
5180 /* For recursive invocations this is no longer the LHS itself. */
5181 bool is_lhs = wi->is_lhs;
5182 wi->is_lhs = false;
5183
5184 if (TREE_CODE (expr) == SSA_NAME)
5185 {
5186 *tp = remap_ssa_name (*tp, id);
5187 *walk_subtrees = 0;
5188 if (is_lhs)
5189 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5190 }
5191 /* Only a local declaration (variable or label). */
5192 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5193 || TREE_CODE (expr) == LABEL_DECL)
5194 {
5195 /* Lookup the declaration. */
5196 n = st->get (expr);
5197
5198 /* If it's there, remap it. */
5199 if (n)
5200 *tp = *n;
5201 *walk_subtrees = 0;
5202 }
5203 else if (TREE_CODE (expr) == STATEMENT_LIST
5204 || TREE_CODE (expr) == BIND_EXPR
5205 || TREE_CODE (expr) == SAVE_EXPR)
5206 gcc_unreachable ();
5207 else if (TREE_CODE (expr) == TARGET_EXPR)
5208 {
5209 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5210 It's OK for this to happen if it was part of a subtree that
5211 isn't immediately expanded, such as operand 2 of another
5212 TARGET_EXPR. */
5213 if (!TREE_OPERAND (expr, 1))
5214 {
5215 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5216 TREE_OPERAND (expr, 3) = NULL_TREE;
5217 }
5218 }
5219 else if (TREE_CODE (expr) == OMP_CLAUSE)
5220 {
5221 /* Before the omplower pass completes, some OMP clauses can contain
5222 sequences that are neither copied by gimple_seq_copy nor walked by
5223 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5224 in those situations, we have to copy and process them explicitely. */
5225
5226 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5227 {
5228 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5229 seq = duplicate_remap_omp_clause_seq (seq, wi);
5230 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5231 }
5232 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5233 {
5234 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5235 seq = duplicate_remap_omp_clause_seq (seq, wi);
5236 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5237 }
5238 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5239 {
5240 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5241 seq = duplicate_remap_omp_clause_seq (seq, wi);
5242 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5243 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5244 seq = duplicate_remap_omp_clause_seq (seq, wi);
5245 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5246 }
5247 }
5248
5249 /* Keep iterating. */
5250 return NULL_TREE;
5251 }
5252
5253
5254 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5255 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5256 remaps all local declarations to appropriate replacements in gimple
5257 statements. */
5258
5259 static tree
5260 replace_locals_stmt (gimple_stmt_iterator *gsip,
5261 bool *handled_ops_p ATTRIBUTE_UNUSED,
5262 struct walk_stmt_info *wi)
5263 {
5264 copy_body_data *id = (copy_body_data *) wi->info;
5265 gimple *gs = gsi_stmt (*gsip);
5266
5267 if (gbind *stmt = dyn_cast <gbind *> (gs))
5268 {
5269 tree block = gimple_bind_block (stmt);
5270
5271 if (block)
5272 {
5273 remap_block (&block, id);
5274 gimple_bind_set_block (stmt, block);
5275 }
5276
5277 /* This will remap a lot of the same decls again, but this should be
5278 harmless. */
5279 if (gimple_bind_vars (stmt))
5280 {
5281 tree old_var, decls = gimple_bind_vars (stmt);
5282
5283 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5284 if (!can_be_nonlocal (old_var, id)
5285 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5286 remap_decl (old_var, id);
5287
5288 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5289 id->prevent_decl_creation_for_types = true;
5290 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5291 id->prevent_decl_creation_for_types = false;
5292 }
5293 }
5294
5295 /* Keep iterating. */
5296 return NULL_TREE;
5297 }
5298
5299 /* Create a copy of SEQ and remap all decls in it. */
5300
5301 static gimple_seq
5302 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5303 {
5304 if (!seq)
5305 return NULL;
5306
5307 /* If there are any labels in OMP sequences, they can be only referred to in
5308 the sequence itself and therefore we can do both here. */
5309 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5310 gimple_seq copy = gimple_seq_copy (seq);
5311 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5312 return copy;
5313 }
5314
5315 /* Copies everything in SEQ and replaces variables and labels local to
5316 current_function_decl. */
5317
5318 gimple_seq
5319 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5320 {
5321 copy_body_data id;
5322 struct walk_stmt_info wi;
5323 gimple_seq copy;
5324
5325 /* There's nothing to do for NULL_TREE. */
5326 if (seq == NULL)
5327 return seq;
5328
5329 /* Set up ID. */
5330 memset (&id, 0, sizeof (id));
5331 id.src_fn = current_function_decl;
5332 id.dst_fn = current_function_decl;
5333 id.src_cfun = cfun;
5334 id.decl_map = new hash_map<tree, tree>;
5335 id.debug_map = NULL;
5336
5337 id.copy_decl = copy_decl_no_change;
5338 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5339 id.transform_new_cfg = false;
5340 id.transform_return_to_modify = false;
5341 id.transform_parameter = false;
5342 id.transform_lang_insert_block = NULL;
5343
5344 /* Walk the tree once to find local labels. */
5345 memset (&wi, 0, sizeof (wi));
5346 hash_set<tree> visited;
5347 wi.info = &id;
5348 wi.pset = &visited;
5349 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5350
5351 copy = gimple_seq_copy (seq);
5352
5353 /* Walk the copy, remapping decls. */
5354 memset (&wi, 0, sizeof (wi));
5355 wi.info = &id;
5356 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5357
5358 /* Clean up. */
5359 delete id.decl_map;
5360 if (id.debug_map)
5361 delete id.debug_map;
5362 if (id.dependence_map)
5363 {
5364 delete id.dependence_map;
5365 id.dependence_map = NULL;
5366 }
5367
5368 return copy;
5369 }
5370
5371
5372 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5373
5374 static tree
5375 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5376 {
5377 if (*tp == data)
5378 return (tree) data;
5379 else
5380 return NULL;
5381 }
5382
5383 DEBUG_FUNCTION bool
5384 debug_find_tree (tree top, tree search)
5385 {
5386 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5387 }
5388
5389
5390 /* Declare the variables created by the inliner. Add all the variables in
5391 VARS to BIND_EXPR. */
5392
5393 static void
5394 declare_inline_vars (tree block, tree vars)
5395 {
5396 tree t;
5397 for (t = vars; t; t = DECL_CHAIN (t))
5398 {
5399 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5400 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5401 add_local_decl (cfun, t);
5402 }
5403
5404 if (block)
5405 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5406 }
5407
5408 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5409 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5410 VAR_DECL translation. */
5411
5412 tree
5413 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5414 {
5415 /* Don't generate debug information for the copy if we wouldn't have
5416 generated it for the copy either. */
5417 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5418 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5419
5420 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5421 declaration inspired this copy. */
5422 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5423
5424 /* The new variable/label has no RTL, yet. */
5425 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5426 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5427 SET_DECL_RTL (copy, 0);
5428
5429 /* These args would always appear unused, if not for this. */
5430 TREE_USED (copy) = 1;
5431
5432 /* Set the context for the new declaration. */
5433 if (!DECL_CONTEXT (decl))
5434 /* Globals stay global. */
5435 ;
5436 else if (DECL_CONTEXT (decl) != id->src_fn)
5437 /* Things that weren't in the scope of the function we're inlining
5438 from aren't in the scope we're inlining to, either. */
5439 ;
5440 else if (TREE_STATIC (decl))
5441 /* Function-scoped static variables should stay in the original
5442 function. */
5443 ;
5444 else
5445 {
5446 /* Ordinary automatic local variables are now in the scope of the
5447 new function. */
5448 DECL_CONTEXT (copy) = id->dst_fn;
5449 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5450 {
5451 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5452 DECL_ATTRIBUTES (copy)
5453 = tree_cons (get_identifier ("omp simt private"), NULL,
5454 DECL_ATTRIBUTES (copy));
5455 id->dst_simt_vars->safe_push (copy);
5456 }
5457 }
5458
5459 return copy;
5460 }
5461
5462 static tree
5463 copy_decl_to_var (tree decl, copy_body_data *id)
5464 {
5465 tree copy, type;
5466
5467 gcc_assert (TREE_CODE (decl) == PARM_DECL
5468 || TREE_CODE (decl) == RESULT_DECL);
5469
5470 type = TREE_TYPE (decl);
5471
5472 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5473 VAR_DECL, DECL_NAME (decl), type);
5474 if (DECL_PT_UID_SET_P (decl))
5475 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5476 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5477 TREE_READONLY (copy) = TREE_READONLY (decl);
5478 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5479 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5480
5481 return copy_decl_for_dup_finish (id, decl, copy);
5482 }
5483
5484 /* Like copy_decl_to_var, but create a return slot object instead of a
5485 pointer variable for return by invisible reference. */
5486
5487 static tree
5488 copy_result_decl_to_var (tree decl, copy_body_data *id)
5489 {
5490 tree copy, type;
5491
5492 gcc_assert (TREE_CODE (decl) == PARM_DECL
5493 || TREE_CODE (decl) == RESULT_DECL);
5494
5495 type = TREE_TYPE (decl);
5496 if (DECL_BY_REFERENCE (decl))
5497 type = TREE_TYPE (type);
5498
5499 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5500 VAR_DECL, DECL_NAME (decl), type);
5501 if (DECL_PT_UID_SET_P (decl))
5502 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5503 TREE_READONLY (copy) = TREE_READONLY (decl);
5504 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5505 if (!DECL_BY_REFERENCE (decl))
5506 {
5507 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5508 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5509 }
5510
5511 return copy_decl_for_dup_finish (id, decl, copy);
5512 }
5513
5514 tree
5515 copy_decl_no_change (tree decl, copy_body_data *id)
5516 {
5517 tree copy;
5518
5519 copy = copy_node (decl);
5520
5521 /* The COPY is not abstract; it will be generated in DST_FN. */
5522 DECL_ABSTRACT_P (copy) = false;
5523 lang_hooks.dup_lang_specific_decl (copy);
5524
5525 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5526 been taken; it's for internal bookkeeping in expand_goto_internal. */
5527 if (TREE_CODE (copy) == LABEL_DECL)
5528 {
5529 TREE_ADDRESSABLE (copy) = 0;
5530 LABEL_DECL_UID (copy) = -1;
5531 }
5532
5533 return copy_decl_for_dup_finish (id, decl, copy);
5534 }
5535
5536 static tree
5537 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5538 {
5539 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5540 return copy_decl_to_var (decl, id);
5541 else
5542 return copy_decl_no_change (decl, id);
5543 }
5544
5545 /* Return a copy of the function's argument tree. */
5546 static tree
5547 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5548 bitmap args_to_skip, tree *vars)
5549 {
5550 tree arg, *parg;
5551 tree new_parm = NULL;
5552 int i = 0;
5553
5554 parg = &new_parm;
5555
5556 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5557 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5558 {
5559 tree new_tree = remap_decl (arg, id);
5560 if (TREE_CODE (new_tree) != PARM_DECL)
5561 new_tree = id->copy_decl (arg, id);
5562 lang_hooks.dup_lang_specific_decl (new_tree);
5563 *parg = new_tree;
5564 parg = &DECL_CHAIN (new_tree);
5565 }
5566 else if (!id->decl_map->get (arg))
5567 {
5568 /* Make an equivalent VAR_DECL. If the argument was used
5569 as temporary variable later in function, the uses will be
5570 replaced by local variable. */
5571 tree var = copy_decl_to_var (arg, id);
5572 insert_decl_map (id, arg, var);
5573 /* Declare this new variable. */
5574 DECL_CHAIN (var) = *vars;
5575 *vars = var;
5576 }
5577 return new_parm;
5578 }
5579
5580 /* Return a copy of the function's static chain. */
5581 static tree
5582 copy_static_chain (tree static_chain, copy_body_data * id)
5583 {
5584 tree *chain_copy, *pvar;
5585
5586 chain_copy = &static_chain;
5587 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5588 {
5589 tree new_tree = remap_decl (*pvar, id);
5590 lang_hooks.dup_lang_specific_decl (new_tree);
5591 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5592 *pvar = new_tree;
5593 }
5594 return static_chain;
5595 }
5596
5597 /* Return true if the function is allowed to be versioned.
5598 This is a guard for the versioning functionality. */
5599
5600 bool
5601 tree_versionable_function_p (tree fndecl)
5602 {
5603 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5604 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5605 }
5606
5607 /* Delete all unreachable basic blocks and update callgraph.
5608 Doing so is somewhat nontrivial because we need to update all clones and
5609 remove inline function that become unreachable. */
5610
5611 static bool
5612 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5613 {
5614 bool changed = false;
5615 basic_block b, next_bb;
5616
5617 find_unreachable_blocks ();
5618
5619 /* Delete all unreachable basic blocks. */
5620
5621 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5622 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5623 {
5624 next_bb = b->next_bb;
5625
5626 if (!(b->flags & BB_REACHABLE))
5627 {
5628 gimple_stmt_iterator bsi;
5629
5630 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5631 {
5632 struct cgraph_edge *e;
5633 struct cgraph_node *node;
5634
5635 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5636
5637 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5638 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5639 {
5640 if (!e->inline_failed)
5641 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5642 else
5643 e->remove ();
5644 }
5645 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5646 && id->dst_node->clones)
5647 for (node = id->dst_node->clones; node != id->dst_node;)
5648 {
5649 node->remove_stmt_references (gsi_stmt (bsi));
5650 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5651 && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5652 {
5653 if (!e->inline_failed)
5654 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5655 else
5656 e->remove ();
5657 }
5658
5659 if (node->clones)
5660 node = node->clones;
5661 else if (node->next_sibling_clone)
5662 node = node->next_sibling_clone;
5663 else
5664 {
5665 while (node != id->dst_node && !node->next_sibling_clone)
5666 node = node->clone_of;
5667 if (node != id->dst_node)
5668 node = node->next_sibling_clone;
5669 }
5670 }
5671 }
5672 delete_basic_block (b);
5673 changed = true;
5674 }
5675 }
5676
5677 return changed;
5678 }
5679
5680 /* Update clone info after duplication. */
5681
5682 static void
5683 update_clone_info (copy_body_data * id)
5684 {
5685 struct cgraph_node *node;
5686 if (!id->dst_node->clones)
5687 return;
5688 for (node = id->dst_node->clones; node != id->dst_node;)
5689 {
5690 /* First update replace maps to match the new body. */
5691 if (node->clone.tree_map)
5692 {
5693 unsigned int i;
5694 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5695 {
5696 struct ipa_replace_map *replace_info;
5697 replace_info = (*node->clone.tree_map)[i];
5698 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5699 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5700 }
5701 }
5702 if (node->clones)
5703 node = node->clones;
5704 else if (node->next_sibling_clone)
5705 node = node->next_sibling_clone;
5706 else
5707 {
5708 while (node != id->dst_node && !node->next_sibling_clone)
5709 node = node->clone_of;
5710 if (node != id->dst_node)
5711 node = node->next_sibling_clone;
5712 }
5713 }
5714 }
5715
5716 /* Create a copy of a function's tree.
5717 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5718 of the original function and the new copied function
5719 respectively. In case we want to replace a DECL
5720 tree with another tree while duplicating the function's
5721 body, TREE_MAP represents the mapping between these
5722 trees. If UPDATE_CLONES is set, the call_stmt fields
5723 of edges of clones of the function will be updated.
5724
5725 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5726 from new version.
5727 If SKIP_RETURN is true, the new version will return void.
5728 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5729 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5730 */
5731 void
5732 tree_function_versioning (tree old_decl, tree new_decl,
5733 vec<ipa_replace_map *, va_gc> *tree_map,
5734 bool update_clones, bitmap args_to_skip,
5735 bool skip_return, bitmap blocks_to_copy,
5736 basic_block new_entry)
5737 {
5738 struct cgraph_node *old_version_node;
5739 struct cgraph_node *new_version_node;
5740 copy_body_data id;
5741 tree p;
5742 unsigned i;
5743 struct ipa_replace_map *replace_info;
5744 basic_block old_entry_block, bb;
5745 auto_vec<gimple *, 10> init_stmts;
5746 tree vars = NULL_TREE;
5747 bitmap debug_args_to_skip = args_to_skip;
5748
5749 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5750 && TREE_CODE (new_decl) == FUNCTION_DECL);
5751 DECL_POSSIBLY_INLINED (old_decl) = 1;
5752
5753 old_version_node = cgraph_node::get (old_decl);
5754 gcc_checking_assert (old_version_node);
5755 new_version_node = cgraph_node::get (new_decl);
5756 gcc_checking_assert (new_version_node);
5757
5758 /* Copy over debug args. */
5759 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5760 {
5761 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5762 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5763 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5764 old_debug_args = decl_debug_args_lookup (old_decl);
5765 if (old_debug_args)
5766 {
5767 new_debug_args = decl_debug_args_insert (new_decl);
5768 *new_debug_args = vec_safe_copy (*old_debug_args);
5769 }
5770 }
5771
5772 /* Output the inlining info for this abstract function, since it has been
5773 inlined. If we don't do this now, we can lose the information about the
5774 variables in the function when the blocks get blown away as soon as we
5775 remove the cgraph node. */
5776 (*debug_hooks->outlining_inline_function) (old_decl);
5777
5778 DECL_ARTIFICIAL (new_decl) = 1;
5779 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5780 if (DECL_ORIGIN (old_decl) == old_decl)
5781 old_version_node->used_as_abstract_origin = true;
5782 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5783
5784 /* Prepare the data structures for the tree copy. */
5785 memset (&id, 0, sizeof (id));
5786
5787 /* Generate a new name for the new version. */
5788 id.statements_to_fold = new hash_set<gimple *>;
5789
5790 id.decl_map = new hash_map<tree, tree>;
5791 id.debug_map = NULL;
5792 id.src_fn = old_decl;
5793 id.dst_fn = new_decl;
5794 id.src_node = old_version_node;
5795 id.dst_node = new_version_node;
5796 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5797 id.blocks_to_copy = blocks_to_copy;
5798
5799 id.copy_decl = copy_decl_no_change;
5800 id.transform_call_graph_edges
5801 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5802 id.transform_new_cfg = true;
5803 id.transform_return_to_modify = false;
5804 id.transform_parameter = false;
5805 id.transform_lang_insert_block = NULL;
5806
5807 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5808 (DECL_STRUCT_FUNCTION (old_decl));
5809 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5810 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5811 initialize_cfun (new_decl, old_decl,
5812 new_entry ? new_entry->count : old_entry_block->count);
5813 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5814 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5815 = id.src_cfun->gimple_df->ipa_pta;
5816
5817 /* Copy the function's static chain. */
5818 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5819 if (p)
5820 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5821 = copy_static_chain (p, &id);
5822
5823 /* If there's a tree_map, prepare for substitution. */
5824 if (tree_map)
5825 for (i = 0; i < tree_map->length (); i++)
5826 {
5827 gimple *init;
5828 replace_info = (*tree_map)[i];
5829 if (replace_info->replace_p)
5830 {
5831 int parm_num = -1;
5832 if (!replace_info->old_tree)
5833 {
5834 int p = replace_info->parm_num;
5835 tree parm;
5836 tree req_type, new_type;
5837
5838 for (parm = DECL_ARGUMENTS (old_decl); p;
5839 parm = DECL_CHAIN (parm))
5840 p--;
5841 replace_info->old_tree = parm;
5842 parm_num = replace_info->parm_num;
5843 req_type = TREE_TYPE (parm);
5844 new_type = TREE_TYPE (replace_info->new_tree);
5845 if (!useless_type_conversion_p (req_type, new_type))
5846 {
5847 if (fold_convertible_p (req_type, replace_info->new_tree))
5848 replace_info->new_tree
5849 = fold_build1 (NOP_EXPR, req_type,
5850 replace_info->new_tree);
5851 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
5852 replace_info->new_tree
5853 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
5854 replace_info->new_tree);
5855 else
5856 {
5857 if (dump_file)
5858 {
5859 fprintf (dump_file, " const ");
5860 print_generic_expr (dump_file,
5861 replace_info->new_tree);
5862 fprintf (dump_file,
5863 " can't be converted to param ");
5864 print_generic_expr (dump_file, parm);
5865 fprintf (dump_file, "\n");
5866 }
5867 replace_info->old_tree = NULL;
5868 }
5869 }
5870 }
5871 else
5872 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5873 if (replace_info->old_tree)
5874 {
5875 init = setup_one_parameter (&id, replace_info->old_tree,
5876 replace_info->new_tree, id.src_fn,
5877 NULL,
5878 &vars);
5879 if (init)
5880 init_stmts.safe_push (init);
5881 if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
5882 {
5883 if (parm_num == -1)
5884 {
5885 tree parm;
5886 int p;
5887 for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
5888 parm = DECL_CHAIN (parm), p++)
5889 if (parm == replace_info->old_tree)
5890 {
5891 parm_num = p;
5892 break;
5893 }
5894 }
5895 if (parm_num != -1)
5896 {
5897 if (debug_args_to_skip == args_to_skip)
5898 {
5899 debug_args_to_skip = BITMAP_ALLOC (NULL);
5900 bitmap_copy (debug_args_to_skip, args_to_skip);
5901 }
5902 bitmap_clear_bit (debug_args_to_skip, parm_num);
5903 }
5904 }
5905 }
5906 }
5907 }
5908 /* Copy the function's arguments. */
5909 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5910 DECL_ARGUMENTS (new_decl)
5911 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5912 args_to_skip, &vars);
5913
5914 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5915 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5916
5917 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5918
5919 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5920 /* Add local vars. */
5921 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5922
5923 if (DECL_RESULT (old_decl) == NULL_TREE)
5924 ;
5925 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5926 {
5927 DECL_RESULT (new_decl)
5928 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5929 RESULT_DECL, NULL_TREE, void_type_node);
5930 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5931 cfun->returns_struct = 0;
5932 cfun->returns_pcc_struct = 0;
5933 }
5934 else
5935 {
5936 tree old_name;
5937 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5938 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5939 if (gimple_in_ssa_p (id.src_cfun)
5940 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5941 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5942 {
5943 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
5944 insert_decl_map (&id, old_name, new_name);
5945 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5946 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5947 }
5948 }
5949
5950 /* Set up the destination functions loop tree. */
5951 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5952 {
5953 cfun->curr_properties &= ~PROP_loops;
5954 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5955 cfun->curr_properties |= PROP_loops;
5956 }
5957
5958 /* Copy the Function's body. */
5959 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5960 new_entry);
5961
5962 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5963 number_blocks (new_decl);
5964
5965 /* We want to create the BB unconditionally, so that the addition of
5966 debug stmts doesn't affect BB count, which may in the end cause
5967 codegen differences. */
5968 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5969 while (init_stmts.length ())
5970 insert_init_stmt (&id, bb, init_stmts.pop ());
5971 update_clone_info (&id);
5972
5973 /* Remap the nonlocal_goto_save_area, if any. */
5974 if (cfun->nonlocal_goto_save_area)
5975 {
5976 struct walk_stmt_info wi;
5977
5978 memset (&wi, 0, sizeof (wi));
5979 wi.info = &id;
5980 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5981 }
5982
5983 /* Clean up. */
5984 delete id.decl_map;
5985 if (id.debug_map)
5986 delete id.debug_map;
5987 free_dominance_info (CDI_DOMINATORS);
5988 free_dominance_info (CDI_POST_DOMINATORS);
5989
5990 update_max_bb_count ();
5991 fold_marked_statements (0, id.statements_to_fold);
5992 delete id.statements_to_fold;
5993 delete_unreachable_blocks_update_callgraph (&id);
5994 if (id.dst_node->definition)
5995 cgraph_edge::rebuild_references ();
5996 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
5997 {
5998 calculate_dominance_info (CDI_DOMINATORS);
5999 fix_loop_structure (NULL);
6000 }
6001 update_ssa (TODO_update_ssa);
6002
6003 /* After partial cloning we need to rescale frequencies, so they are
6004 within proper range in the cloned function. */
6005 if (new_entry)
6006 {
6007 struct cgraph_edge *e;
6008 rebuild_frequencies ();
6009
6010 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6011 for (e = new_version_node->callees; e; e = e->next_callee)
6012 {
6013 basic_block bb = gimple_bb (e->call_stmt);
6014 e->count = bb->count;
6015 }
6016 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6017 {
6018 basic_block bb = gimple_bb (e->call_stmt);
6019 e->count = bb->count;
6020 }
6021 }
6022
6023 if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6024 {
6025 tree parm;
6026 vec<tree, va_gc> **debug_args = NULL;
6027 unsigned int len = 0;
6028 for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6029 parm; parm = DECL_CHAIN (parm), i++)
6030 if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6031 {
6032 tree ddecl;
6033
6034 if (debug_args == NULL)
6035 {
6036 debug_args = decl_debug_args_insert (new_decl);
6037 len = vec_safe_length (*debug_args);
6038 }
6039 ddecl = make_node (DEBUG_EXPR_DECL);
6040 DECL_ARTIFICIAL (ddecl) = 1;
6041 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6042 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6043 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6044 vec_safe_push (*debug_args, ddecl);
6045 }
6046 if (debug_args != NULL)
6047 {
6048 /* On the callee side, add
6049 DEBUG D#Y s=> parm
6050 DEBUG var => D#Y
6051 stmts to the first bb where var is a VAR_DECL created for the
6052 optimized away parameter in DECL_INITIAL block. This hints
6053 in the debug info that var (whole DECL_ORIGIN is the parm
6054 PARM_DECL) is optimized away, but could be looked up at the
6055 call site as value of D#X there. */
6056 tree var = vars, vexpr;
6057 gimple_stmt_iterator cgsi
6058 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6059 gimple *def_temp;
6060 var = vars;
6061 i = vec_safe_length (*debug_args);
6062 do
6063 {
6064 i -= 2;
6065 while (var != NULL_TREE
6066 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6067 var = TREE_CHAIN (var);
6068 if (var == NULL_TREE)
6069 break;
6070 vexpr = make_node (DEBUG_EXPR_DECL);
6071 parm = (**debug_args)[i];
6072 DECL_ARTIFICIAL (vexpr) = 1;
6073 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6074 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6075 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6076 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6077 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6078 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6079 }
6080 while (i > len);
6081 }
6082 }
6083
6084 if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6085 BITMAP_FREE (debug_args_to_skip);
6086 free_dominance_info (CDI_DOMINATORS);
6087 free_dominance_info (CDI_POST_DOMINATORS);
6088
6089 gcc_assert (!id.debug_stmts.exists ());
6090 pop_cfun ();
6091 return;
6092 }
6093
6094 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6095 the callee and return the inlined body on success. */
6096
6097 tree
6098 maybe_inline_call_in_expr (tree exp)
6099 {
6100 tree fn = get_callee_fndecl (exp);
6101
6102 /* We can only try to inline "const" functions. */
6103 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6104 {
6105 call_expr_arg_iterator iter;
6106 copy_body_data id;
6107 tree param, arg, t;
6108 hash_map<tree, tree> decl_map;
6109
6110 /* Remap the parameters. */
6111 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6112 param;
6113 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6114 decl_map.put (param, arg);
6115
6116 memset (&id, 0, sizeof (id));
6117 id.src_fn = fn;
6118 id.dst_fn = current_function_decl;
6119 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6120 id.decl_map = &decl_map;
6121
6122 id.copy_decl = copy_decl_no_change;
6123 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6124 id.transform_new_cfg = false;
6125 id.transform_return_to_modify = true;
6126 id.transform_parameter = true;
6127 id.transform_lang_insert_block = NULL;
6128
6129 /* Make sure not to unshare trees behind the front-end's back
6130 since front-end specific mechanisms may rely on sharing. */
6131 id.regimplify = false;
6132 id.do_not_unshare = true;
6133
6134 /* We're not inside any EH region. */
6135 id.eh_lp_nr = 0;
6136
6137 t = copy_tree_body (&id);
6138
6139 /* We can only return something suitable for use in a GENERIC
6140 expression tree. */
6141 if (TREE_CODE (t) == MODIFY_EXPR)
6142 return TREE_OPERAND (t, 1);
6143 }
6144
6145 return NULL_TREE;
6146 }
6147
6148 /* Duplicate a type, fields and all. */
6149
6150 tree
6151 build_duplicate_type (tree type)
6152 {
6153 struct copy_body_data id;
6154
6155 memset (&id, 0, sizeof (id));
6156 id.src_fn = current_function_decl;
6157 id.dst_fn = current_function_decl;
6158 id.src_cfun = cfun;
6159 id.decl_map = new hash_map<tree, tree>;
6160 id.debug_map = NULL;
6161 id.copy_decl = copy_decl_no_change;
6162
6163 type = remap_type_1 (type, &id);
6164
6165 delete id.decl_map;
6166 if (id.debug_map)
6167 delete id.debug_map;
6168
6169 TYPE_CANONICAL (type) = type;
6170
6171 return type;
6172 }
6173
6174 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6175 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6176 evaluation. */
6177
6178 tree
6179 copy_fn (tree fn, tree& parms, tree& result)
6180 {
6181 copy_body_data id;
6182 tree param;
6183 hash_map<tree, tree> decl_map;
6184
6185 tree *p = &parms;
6186 *p = NULL_TREE;
6187
6188 memset (&id, 0, sizeof (id));
6189 id.src_fn = fn;
6190 id.dst_fn = current_function_decl;
6191 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6192 id.decl_map = &decl_map;
6193
6194 id.copy_decl = copy_decl_no_change;
6195 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6196 id.transform_new_cfg = false;
6197 id.transform_return_to_modify = false;
6198 id.transform_parameter = true;
6199 id.transform_lang_insert_block = NULL;
6200
6201 /* Make sure not to unshare trees behind the front-end's back
6202 since front-end specific mechanisms may rely on sharing. */
6203 id.regimplify = false;
6204 id.do_not_unshare = true;
6205
6206 /* We're not inside any EH region. */
6207 id.eh_lp_nr = 0;
6208
6209 /* Remap the parameters and result and return them to the caller. */
6210 for (param = DECL_ARGUMENTS (fn);
6211 param;
6212 param = DECL_CHAIN (param))
6213 {
6214 *p = remap_decl (param, &id);
6215 p = &DECL_CHAIN (*p);
6216 }
6217
6218 if (DECL_RESULT (fn))
6219 result = remap_decl (DECL_RESULT (fn), &id);
6220 else
6221 result = NULL_TREE;
6222
6223 return copy_tree_body (&id);
6224 }