]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-inline.c
Move operand_less_p to vr-values.c.
[thirdparty/gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2020 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "value-prof.h"
57 #include "cfgloop.h"
58 #include "builtins.h"
59 #include "stringpool.h"
60 #include "attribs.h"
61 #include "sreal.h"
62 #include "tree-cfgcleanup.h"
63 #include "tree-ssa-live.h"
64
65 /* I'm not real happy about this, but we need to handle gimple and
66 non-gimple trees. */
67
68 /* Inlining, Cloning, Versioning, Parallelization
69
70 Inlining: a function body is duplicated, but the PARM_DECLs are
71 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
72 MODIFY_EXPRs that store to a dedicated returned-value variable.
73 The duplicated eh_region info of the copy will later be appended
74 to the info for the caller; the eh_region info in copied throwing
75 statements and RESX statements are adjusted accordingly.
76
77 Cloning: (only in C++) We have one body for a con/de/structor, and
78 multiple function decls, each with a unique parameter list.
79 Duplicate the body, using the given splay tree; some parameters
80 will become constants (like 0 or 1).
81
82 Versioning: a function body is duplicated and the result is a new
83 function rather than into blocks of an existing function as with
84 inlining. Some parameters will become constants.
85
86 Parallelization: a region of a function is duplicated resulting in
87 a new function. Variables may be replaced with complex expressions
88 to enable shared variable semantics.
89
90 All of these will simultaneously lookup any callgraph edges. If
91 we're going to inline the duplicated function body, and the given
92 function has some cloned callgraph nodes (one for each place this
93 function will be inlined) those callgraph edges will be duplicated.
94 If we're cloning the body, those callgraph edges will be
95 updated to point into the new body. (Note that the original
96 callgraph node and edge list will not be altered.)
97
98 See the CALL_EXPR handling case in copy_tree_body_r (). */
99
100 /* To Do:
101
102 o In order to make inlining-on-trees work, we pessimized
103 function-local static constants. In particular, they are now
104 always output, even when not addressed. Fix this by treating
105 function-local static constants just like global static
106 constants; the back-end already knows not to output them if they
107 are not needed.
108
109 o Provide heuristics to clamp inlining of recursive template
110 calls? */
111
112
113 /* Weights that estimate_num_insns uses to estimate the size of the
114 produced code. */
115
116 eni_weights eni_size_weights;
117
118 /* Weights that estimate_num_insns uses to estimate the time necessary
119 to execute the produced code. */
120
121 eni_weights eni_time_weights;
122
123 /* Prototypes. */
124
125 static tree declare_return_variable (copy_body_data *, tree, tree,
126 basic_block);
127 static void remap_block (tree *, copy_body_data *);
128 static void copy_bind_expr (tree *, int *, copy_body_data *);
129 static void declare_inline_vars (tree, tree);
130 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
131 static void prepend_lexical_block (tree current_block, tree new_block);
132 static tree copy_result_decl_to_var (tree, copy_body_data *);
133 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
134 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
135 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
136
137 /* Insert a tree->tree mapping for ID. Despite the name suggests
138 that the trees should be variables, it is used for more than that. */
139
140 void
141 insert_decl_map (copy_body_data *id, tree key, tree value)
142 {
143 id->decl_map->put (key, value);
144
145 /* Always insert an identity map as well. If we see this same new
146 node again, we won't want to duplicate it a second time. */
147 if (key != value)
148 id->decl_map->put (value, value);
149 }
150
151 /* Insert a tree->tree mapping for ID. This is only used for
152 variables. */
153
154 static void
155 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
156 {
157 if (!gimple_in_ssa_p (id->src_cfun))
158 return;
159
160 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
161 return;
162
163 if (!target_for_debug_bind (key))
164 return;
165
166 gcc_assert (TREE_CODE (key) == PARM_DECL);
167 gcc_assert (VAR_P (value));
168
169 if (!id->debug_map)
170 id->debug_map = new hash_map<tree, tree>;
171
172 id->debug_map->put (key, value);
173 }
174
175 /* If nonzero, we're remapping the contents of inlined debug
176 statements. If negative, an error has occurred, such as a
177 reference to a variable that isn't available in the inlined
178 context. */
179 static int processing_debug_stmt = 0;
180
181 /* Construct new SSA name for old NAME. ID is the inline context. */
182
183 static tree
184 remap_ssa_name (tree name, copy_body_data *id)
185 {
186 tree new_tree, var;
187 tree *n;
188
189 gcc_assert (TREE_CODE (name) == SSA_NAME);
190
191 n = id->decl_map->get (name);
192 if (n)
193 {
194 /* WHen we perform edge redirection as part of CFG copy, IPA-SRA can
195 remove an unused LHS from a call statement. Such LHS can however
196 still appear in debug statements, but their value is lost in this
197 function and we do not want to map them. */
198 if (id->killed_new_ssa_names
199 && id->killed_new_ssa_names->contains (*n))
200 {
201 gcc_assert (processing_debug_stmt);
202 processing_debug_stmt = -1;
203 return name;
204 }
205
206 return unshare_expr (*n);
207 }
208
209 if (processing_debug_stmt)
210 {
211 if (SSA_NAME_IS_DEFAULT_DEF (name)
212 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
213 && id->entry_bb == NULL
214 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
215 {
216 tree vexpr = make_node (DEBUG_EXPR_DECL);
217 gimple *def_temp;
218 gimple_stmt_iterator gsi;
219 tree val = SSA_NAME_VAR (name);
220
221 n = id->decl_map->get (val);
222 if (n != NULL)
223 val = *n;
224 if (TREE_CODE (val) != PARM_DECL
225 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
226 {
227 processing_debug_stmt = -1;
228 return name;
229 }
230 n = id->decl_map->get (val);
231 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
232 return *n;
233 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
234 DECL_ARTIFICIAL (vexpr) = 1;
235 TREE_TYPE (vexpr) = TREE_TYPE (name);
236 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
237 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
238 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
239 insert_decl_map (id, val, vexpr);
240 return vexpr;
241 }
242
243 processing_debug_stmt = -1;
244 return name;
245 }
246
247 /* Remap anonymous SSA names or SSA names of anonymous decls. */
248 var = SSA_NAME_VAR (name);
249 if (!var
250 || (!SSA_NAME_IS_DEFAULT_DEF (name)
251 && VAR_P (var)
252 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
253 && DECL_ARTIFICIAL (var)
254 && DECL_IGNORED_P (var)
255 && !DECL_NAME (var)))
256 {
257 struct ptr_info_def *pi;
258 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
259 if (!var && SSA_NAME_IDENTIFIER (name))
260 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
261 insert_decl_map (id, name, new_tree);
262 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
263 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
264 /* At least IPA points-to info can be directly transferred. */
265 if (id->src_cfun->gimple_df
266 && id->src_cfun->gimple_df->ipa_pta
267 && POINTER_TYPE_P (TREE_TYPE (name))
268 && (pi = SSA_NAME_PTR_INFO (name))
269 && !pi->pt.anything)
270 {
271 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
272 new_pi->pt = pi->pt;
273 }
274 /* So can range-info. */
275 if (!POINTER_TYPE_P (TREE_TYPE (name))
276 && SSA_NAME_RANGE_INFO (name))
277 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
278 SSA_NAME_RANGE_INFO (name));
279 return new_tree;
280 }
281
282 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
283 in copy_bb. */
284 new_tree = remap_decl (var, id);
285
286 /* We might've substituted constant or another SSA_NAME for
287 the variable.
288
289 Replace the SSA name representing RESULT_DECL by variable during
290 inlining: this saves us from need to introduce PHI node in a case
291 return value is just partly initialized. */
292 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
293 && (!SSA_NAME_VAR (name)
294 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
295 || !id->transform_return_to_modify))
296 {
297 struct ptr_info_def *pi;
298 new_tree = make_ssa_name (new_tree);
299 insert_decl_map (id, name, new_tree);
300 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
301 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
302 /* At least IPA points-to info can be directly transferred. */
303 if (id->src_cfun->gimple_df
304 && id->src_cfun->gimple_df->ipa_pta
305 && POINTER_TYPE_P (TREE_TYPE (name))
306 && (pi = SSA_NAME_PTR_INFO (name))
307 && !pi->pt.anything)
308 {
309 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
310 new_pi->pt = pi->pt;
311 }
312 /* So can range-info. */
313 if (!POINTER_TYPE_P (TREE_TYPE (name))
314 && SSA_NAME_RANGE_INFO (name))
315 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
316 SSA_NAME_RANGE_INFO (name));
317 if (SSA_NAME_IS_DEFAULT_DEF (name))
318 {
319 /* By inlining function having uninitialized variable, we might
320 extend the lifetime (variable might get reused). This cause
321 ICE in the case we end up extending lifetime of SSA name across
322 abnormal edge, but also increase register pressure.
323
324 We simply initialize all uninitialized vars by 0 except
325 for case we are inlining to very first BB. We can avoid
326 this for all BBs that are not inside strongly connected
327 regions of the CFG, but this is expensive to test. */
328 if (id->entry_bb
329 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
330 && (!SSA_NAME_VAR (name)
331 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
332 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
333 0)->dest
334 || EDGE_COUNT (id->entry_bb->preds) != 1))
335 {
336 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
337 gimple *init_stmt;
338 tree zero = build_zero_cst (TREE_TYPE (new_tree));
339
340 init_stmt = gimple_build_assign (new_tree, zero);
341 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
342 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
343 }
344 else
345 {
346 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
347 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
348 }
349 }
350 }
351 else
352 insert_decl_map (id, name, new_tree);
353 return new_tree;
354 }
355
356 /* Remap DECL during the copying of the BLOCK tree for the function. */
357
358 tree
359 remap_decl (tree decl, copy_body_data *id)
360 {
361 tree *n;
362
363 /* We only remap local variables in the current function. */
364
365 /* See if we have remapped this declaration. */
366
367 n = id->decl_map->get (decl);
368
369 if (!n && processing_debug_stmt)
370 {
371 processing_debug_stmt = -1;
372 return decl;
373 }
374
375 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
376 necessary DECLs have already been remapped and we do not want to duplicate
377 a decl coming from outside of the sequence we are copying. */
378 if (!n
379 && id->prevent_decl_creation_for_types
380 && id->remapping_type_depth > 0
381 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
382 return decl;
383
384 /* If we didn't already have an equivalent for this declaration, create one
385 now. */
386 if (!n)
387 {
388 /* Make a copy of the variable or label. */
389 tree t = id->copy_decl (decl, id);
390
391 /* Remember it, so that if we encounter this local entity again
392 we can reuse this copy. Do this early because remap_type may
393 need this decl for TYPE_STUB_DECL. */
394 insert_decl_map (id, decl, t);
395
396 if (!DECL_P (t))
397 return t;
398
399 /* Remap types, if necessary. */
400 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
401 if (TREE_CODE (t) == TYPE_DECL)
402 {
403 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
404
405 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
406 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
407 is not set on the TYPE_DECL, for example in LTO mode. */
408 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
409 {
410 tree x = build_variant_type_copy (TREE_TYPE (t));
411 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
412 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
413 DECL_ORIGINAL_TYPE (t) = x;
414 }
415 }
416
417 /* Remap sizes as necessary. */
418 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
419 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
420
421 /* If fields, do likewise for offset and qualifier. */
422 if (TREE_CODE (t) == FIELD_DECL)
423 {
424 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
425 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
426 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
427 }
428
429 return t;
430 }
431
432 if (id->do_not_unshare)
433 return *n;
434 else
435 return unshare_expr (*n);
436 }
437
438 static tree
439 remap_type_1 (tree type, copy_body_data *id)
440 {
441 tree new_tree, t;
442
443 /* We do need a copy. build and register it now. If this is a pointer or
444 reference type, remap the designated type and make a new pointer or
445 reference type. */
446 if (TREE_CODE (type) == POINTER_TYPE)
447 {
448 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
449 TYPE_MODE (type),
450 TYPE_REF_CAN_ALIAS_ALL (type));
451 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
452 new_tree = build_type_attribute_qual_variant (new_tree,
453 TYPE_ATTRIBUTES (type),
454 TYPE_QUALS (type));
455 insert_decl_map (id, type, new_tree);
456 return new_tree;
457 }
458 else if (TREE_CODE (type) == REFERENCE_TYPE)
459 {
460 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
461 TYPE_MODE (type),
462 TYPE_REF_CAN_ALIAS_ALL (type));
463 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
464 new_tree = build_type_attribute_qual_variant (new_tree,
465 TYPE_ATTRIBUTES (type),
466 TYPE_QUALS (type));
467 insert_decl_map (id, type, new_tree);
468 return new_tree;
469 }
470 else
471 new_tree = copy_node (type);
472
473 insert_decl_map (id, type, new_tree);
474
475 /* This is a new type, not a copy of an old type. Need to reassociate
476 variants. We can handle everything except the main variant lazily. */
477 t = TYPE_MAIN_VARIANT (type);
478 if (type != t)
479 {
480 t = remap_type (t, id);
481 TYPE_MAIN_VARIANT (new_tree) = t;
482 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
483 TYPE_NEXT_VARIANT (t) = new_tree;
484 }
485 else
486 {
487 TYPE_MAIN_VARIANT (new_tree) = new_tree;
488 TYPE_NEXT_VARIANT (new_tree) = NULL;
489 }
490
491 if (TYPE_STUB_DECL (type))
492 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
493
494 /* Lazily create pointer and reference types. */
495 TYPE_POINTER_TO (new_tree) = NULL;
496 TYPE_REFERENCE_TO (new_tree) = NULL;
497
498 /* Copy all types that may contain references to local variables; be sure to
499 preserve sharing in between type and its main variant when possible. */
500 switch (TREE_CODE (new_tree))
501 {
502 case INTEGER_TYPE:
503 case REAL_TYPE:
504 case FIXED_POINT_TYPE:
505 case ENUMERAL_TYPE:
506 case BOOLEAN_TYPE:
507 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
508 {
509 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
510 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
511
512 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
513 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
514 }
515 else
516 {
517 t = TYPE_MIN_VALUE (new_tree);
518 if (t && TREE_CODE (t) != INTEGER_CST)
519 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
520
521 t = TYPE_MAX_VALUE (new_tree);
522 if (t && TREE_CODE (t) != INTEGER_CST)
523 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
524 }
525 return new_tree;
526
527 case FUNCTION_TYPE:
528 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
529 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
530 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
531 else
532 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
533 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
534 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
535 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
536 else
537 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
538 return new_tree;
539
540 case ARRAY_TYPE:
541 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
542 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
543 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
544 else
545 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
546
547 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
548 {
549 gcc_checking_assert (TYPE_DOMAIN (type)
550 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
551 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
552 }
553 else
554 {
555 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
556 /* For array bounds where we have decided not to copy over the bounds
557 variable which isn't used in OpenMP/OpenACC region, change them to
558 an uninitialized VAR_DECL temporary. */
559 if (id->adjust_array_error_bounds
560 && TYPE_DOMAIN (new_tree)
561 && TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
562 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
563 {
564 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
565 DECL_ATTRIBUTES (v)
566 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
567 DECL_ATTRIBUTES (v));
568 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
569 }
570 }
571 break;
572
573 case RECORD_TYPE:
574 case UNION_TYPE:
575 case QUAL_UNION_TYPE:
576 if (TYPE_MAIN_VARIANT (type) != type
577 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
578 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
579 else
580 {
581 tree f, nf = NULL;
582
583 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
584 {
585 t = remap_decl (f, id);
586 DECL_CONTEXT (t) = new_tree;
587 DECL_CHAIN (t) = nf;
588 nf = t;
589 }
590 TYPE_FIELDS (new_tree) = nreverse (nf);
591 }
592 break;
593
594 case OFFSET_TYPE:
595 default:
596 /* Shouldn't have been thought variable sized. */
597 gcc_unreachable ();
598 }
599
600 /* All variants of type share the same size, so use the already remaped data. */
601 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
602 {
603 tree s = TYPE_SIZE (type);
604 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
605 tree su = TYPE_SIZE_UNIT (type);
606 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
607 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
608 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
609 || s == mvs);
610 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
611 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
612 || su == mvsu);
613 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
614 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
615 }
616 else
617 {
618 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
619 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
620 }
621
622 return new_tree;
623 }
624
625 /* Helper function for remap_type_2, called through walk_tree. */
626
627 static tree
628 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
629 {
630 copy_body_data *id = (copy_body_data *) data;
631
632 if (TYPE_P (*tp))
633 *walk_subtrees = 0;
634
635 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
636 return *tp;
637
638 return NULL_TREE;
639 }
640
641 /* Return true if TYPE needs to be remapped because remap_decl on any
642 needed embedded decl returns something other than that decl. */
643
644 static bool
645 remap_type_2 (tree type, copy_body_data *id)
646 {
647 tree t;
648
649 #define RETURN_TRUE_IF_VAR(T) \
650 do \
651 { \
652 tree _t = (T); \
653 if (_t) \
654 { \
655 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
656 return true; \
657 if (!TYPE_SIZES_GIMPLIFIED (type) \
658 && walk_tree (&_t, remap_type_3, id, NULL)) \
659 return true; \
660 } \
661 } \
662 while (0)
663
664 switch (TREE_CODE (type))
665 {
666 case POINTER_TYPE:
667 case REFERENCE_TYPE:
668 case FUNCTION_TYPE:
669 case METHOD_TYPE:
670 return remap_type_2 (TREE_TYPE (type), id);
671
672 case INTEGER_TYPE:
673 case REAL_TYPE:
674 case FIXED_POINT_TYPE:
675 case ENUMERAL_TYPE:
676 case BOOLEAN_TYPE:
677 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
678 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
679 return false;
680
681 case ARRAY_TYPE:
682 if (remap_type_2 (TREE_TYPE (type), id)
683 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
684 return true;
685 break;
686
687 case RECORD_TYPE:
688 case UNION_TYPE:
689 case QUAL_UNION_TYPE:
690 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
691 if (TREE_CODE (t) == FIELD_DECL)
692 {
693 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
694 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
695 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
696 if (TREE_CODE (type) == QUAL_UNION_TYPE)
697 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
698 }
699 break;
700
701 default:
702 return false;
703 }
704
705 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
706 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
707 return false;
708 #undef RETURN_TRUE_IF_VAR
709 }
710
711 tree
712 remap_type (tree type, copy_body_data *id)
713 {
714 tree *node;
715 tree tmp;
716
717 if (type == NULL)
718 return type;
719
720 /* See if we have remapped this type. */
721 node = id->decl_map->get (type);
722 if (node)
723 return *node;
724
725 /* The type only needs remapping if it's variably modified. */
726 if (! variably_modified_type_p (type, id->src_fn)
727 /* Don't remap if copy_decl method doesn't always return a new
728 decl and for all embedded decls returns the passed in decl. */
729 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
730 {
731 insert_decl_map (id, type, type);
732 return type;
733 }
734
735 id->remapping_type_depth++;
736 tmp = remap_type_1 (type, id);
737 id->remapping_type_depth--;
738
739 return tmp;
740 }
741
742 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
743
744 static bool
745 can_be_nonlocal (tree decl, copy_body_data *id)
746 {
747 /* We cannot duplicate function decls. */
748 if (TREE_CODE (decl) == FUNCTION_DECL)
749 return true;
750
751 /* Local static vars must be non-local or we get multiple declaration
752 problems. */
753 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
754 return true;
755
756 return false;
757 }
758
759 static tree
760 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
761 copy_body_data *id)
762 {
763 tree old_var;
764 tree new_decls = NULL_TREE;
765
766 /* Remap its variables. */
767 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
768 {
769 tree new_var;
770
771 if (can_be_nonlocal (old_var, id))
772 {
773 /* We need to add this variable to the local decls as otherwise
774 nothing else will do so. */
775 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
776 add_local_decl (cfun, old_var);
777 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
778 && !DECL_IGNORED_P (old_var)
779 && nonlocalized_list)
780 vec_safe_push (*nonlocalized_list, old_var);
781 continue;
782 }
783
784 /* Remap the variable. */
785 new_var = remap_decl (old_var, id);
786
787 /* If we didn't remap this variable, we can't mess with its
788 TREE_CHAIN. If we remapped this variable to the return slot, it's
789 already declared somewhere else, so don't declare it here. */
790
791 if (new_var == id->retvar)
792 ;
793 else if (!new_var)
794 {
795 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
796 && !DECL_IGNORED_P (old_var)
797 && nonlocalized_list)
798 vec_safe_push (*nonlocalized_list, old_var);
799 }
800 else
801 {
802 gcc_assert (DECL_P (new_var));
803 DECL_CHAIN (new_var) = new_decls;
804 new_decls = new_var;
805
806 /* Also copy value-expressions. */
807 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
808 {
809 tree tem = DECL_VALUE_EXPR (new_var);
810 bool old_regimplify = id->regimplify;
811 id->remapping_type_depth++;
812 walk_tree (&tem, copy_tree_body_r, id, NULL);
813 id->remapping_type_depth--;
814 id->regimplify = old_regimplify;
815 SET_DECL_VALUE_EXPR (new_var, tem);
816 }
817 }
818 }
819
820 return nreverse (new_decls);
821 }
822
823 /* Copy the BLOCK to contain remapped versions of the variables
824 therein. And hook the new block into the block-tree. */
825
826 static void
827 remap_block (tree *block, copy_body_data *id)
828 {
829 tree old_block;
830 tree new_block;
831
832 /* Make the new block. */
833 old_block = *block;
834 new_block = make_node (BLOCK);
835 TREE_USED (new_block) = TREE_USED (old_block);
836 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
837 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
838 BLOCK_NONLOCALIZED_VARS (new_block)
839 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
840 *block = new_block;
841
842 /* Remap its variables. */
843 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
844 &BLOCK_NONLOCALIZED_VARS (new_block),
845 id);
846
847 if (id->transform_lang_insert_block)
848 id->transform_lang_insert_block (new_block);
849
850 /* Remember the remapped block. */
851 insert_decl_map (id, old_block, new_block);
852 }
853
854 /* Copy the whole block tree and root it in id->block. */
855
856 static tree
857 remap_blocks (tree block, copy_body_data *id)
858 {
859 tree t;
860 tree new_tree = block;
861
862 if (!block)
863 return NULL;
864
865 remap_block (&new_tree, id);
866 gcc_assert (new_tree != block);
867 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
868 prepend_lexical_block (new_tree, remap_blocks (t, id));
869 /* Blocks are in arbitrary order, but make things slightly prettier and do
870 not swap order when producing a copy. */
871 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
872 return new_tree;
873 }
874
875 /* Remap the block tree rooted at BLOCK to nothing. */
876
877 static void
878 remap_blocks_to_null (tree block, copy_body_data *id)
879 {
880 tree t;
881 insert_decl_map (id, block, NULL_TREE);
882 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
883 remap_blocks_to_null (t, id);
884 }
885
886 /* Remap the location info pointed to by LOCUS. */
887
888 static location_t
889 remap_location (location_t locus, copy_body_data *id)
890 {
891 if (LOCATION_BLOCK (locus))
892 {
893 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
894 gcc_assert (n);
895 if (*n)
896 return set_block (locus, *n);
897 }
898
899 locus = LOCATION_LOCUS (locus);
900
901 if (locus != UNKNOWN_LOCATION && id->block)
902 return set_block (locus, id->block);
903
904 return locus;
905 }
906
907 static void
908 copy_statement_list (tree *tp)
909 {
910 tree_stmt_iterator oi, ni;
911 tree new_tree;
912
913 new_tree = alloc_stmt_list ();
914 ni = tsi_start (new_tree);
915 oi = tsi_start (*tp);
916 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
917 *tp = new_tree;
918
919 for (; !tsi_end_p (oi); tsi_next (&oi))
920 {
921 tree stmt = tsi_stmt (oi);
922 if (TREE_CODE (stmt) == STATEMENT_LIST)
923 /* This copy is not redundant; tsi_link_after will smash this
924 STATEMENT_LIST into the end of the one we're building, and we
925 don't want to do that with the original. */
926 copy_statement_list (&stmt);
927 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
928 }
929 }
930
931 static void
932 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
933 {
934 tree block = BIND_EXPR_BLOCK (*tp);
935 /* Copy (and replace) the statement. */
936 copy_tree_r (tp, walk_subtrees, NULL);
937 if (block)
938 {
939 remap_block (&block, id);
940 BIND_EXPR_BLOCK (*tp) = block;
941 }
942
943 if (BIND_EXPR_VARS (*tp))
944 /* This will remap a lot of the same decls again, but this should be
945 harmless. */
946 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
947 }
948
949
950 /* Create a new gimple_seq by remapping all the statements in BODY
951 using the inlining information in ID. */
952
953 static gimple_seq
954 remap_gimple_seq (gimple_seq body, copy_body_data *id)
955 {
956 gimple_stmt_iterator si;
957 gimple_seq new_body = NULL;
958
959 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
960 {
961 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
962 gimple_seq_add_seq (&new_body, new_stmts);
963 }
964
965 return new_body;
966 }
967
968
969 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
970 block using the mapping information in ID. */
971
972 static gimple *
973 copy_gimple_bind (gbind *stmt, copy_body_data *id)
974 {
975 gimple *new_bind;
976 tree new_block, new_vars;
977 gimple_seq body, new_body;
978
979 /* Copy the statement. Note that we purposely don't use copy_stmt
980 here because we need to remap statements as we copy. */
981 body = gimple_bind_body (stmt);
982 new_body = remap_gimple_seq (body, id);
983
984 new_block = gimple_bind_block (stmt);
985 if (new_block)
986 remap_block (&new_block, id);
987
988 /* This will remap a lot of the same decls again, but this should be
989 harmless. */
990 new_vars = gimple_bind_vars (stmt);
991 if (new_vars)
992 new_vars = remap_decls (new_vars, NULL, id);
993
994 new_bind = gimple_build_bind (new_vars, new_body, new_block);
995
996 return new_bind;
997 }
998
999 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
1000
1001 static bool
1002 is_parm (tree decl)
1003 {
1004 if (TREE_CODE (decl) == SSA_NAME)
1005 {
1006 decl = SSA_NAME_VAR (decl);
1007 if (!decl)
1008 return false;
1009 }
1010
1011 return (TREE_CODE (decl) == PARM_DECL);
1012 }
1013
1014 /* Remap the dependence CLIQUE from the source to the destination function
1015 as specified in ID. */
1016
1017 static unsigned short
1018 remap_dependence_clique (copy_body_data *id, unsigned short clique)
1019 {
1020 if (clique == 0 || processing_debug_stmt)
1021 return 0;
1022 if (!id->dependence_map)
1023 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1024 bool existed;
1025 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1026 if (!existed)
1027 {
1028 /* Clique 1 is reserved for local ones set by PTA. */
1029 if (cfun->last_clique == 0)
1030 cfun->last_clique = 1;
1031 newc = ++cfun->last_clique;
1032 }
1033 return newc;
1034 }
1035
1036 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1037 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1038 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1039 recursing into the children nodes of *TP. */
1040
1041 static tree
1042 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1043 {
1044 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1045 copy_body_data *id = (copy_body_data *) wi_p->info;
1046 tree fn = id->src_fn;
1047
1048 /* For recursive invocations this is no longer the LHS itself. */
1049 bool is_lhs = wi_p->is_lhs;
1050 wi_p->is_lhs = false;
1051
1052 if (TREE_CODE (*tp) == SSA_NAME)
1053 {
1054 *tp = remap_ssa_name (*tp, id);
1055 *walk_subtrees = 0;
1056 if (is_lhs)
1057 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1058 return NULL;
1059 }
1060 else if (auto_var_in_fn_p (*tp, fn))
1061 {
1062 /* Local variables and labels need to be replaced by equivalent
1063 variables. We don't want to copy static variables; there's
1064 only one of those, no matter how many times we inline the
1065 containing function. Similarly for globals from an outer
1066 function. */
1067 tree new_decl;
1068
1069 /* Remap the declaration. */
1070 new_decl = remap_decl (*tp, id);
1071 gcc_assert (new_decl);
1072 /* Replace this variable with the copy. */
1073 STRIP_TYPE_NOPS (new_decl);
1074 /* ??? The C++ frontend uses void * pointer zero to initialize
1075 any other type. This confuses the middle-end type verification.
1076 As cloned bodies do not go through gimplification again the fixup
1077 there doesn't trigger. */
1078 if (TREE_CODE (new_decl) == INTEGER_CST
1079 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1080 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1081 *tp = new_decl;
1082 *walk_subtrees = 0;
1083 }
1084 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1085 gcc_unreachable ();
1086 else if (TREE_CODE (*tp) == SAVE_EXPR)
1087 gcc_unreachable ();
1088 else if (TREE_CODE (*tp) == LABEL_DECL
1089 && (!DECL_CONTEXT (*tp)
1090 || decl_function_context (*tp) == id->src_fn))
1091 /* These may need to be remapped for EH handling. */
1092 *tp = remap_decl (*tp, id);
1093 else if (TREE_CODE (*tp) == FIELD_DECL)
1094 {
1095 /* If the enclosing record type is variably_modified_type_p, the field
1096 has already been remapped. Otherwise, it need not be. */
1097 tree *n = id->decl_map->get (*tp);
1098 if (n)
1099 *tp = *n;
1100 *walk_subtrees = 0;
1101 }
1102 else if (TYPE_P (*tp))
1103 /* Types may need remapping as well. */
1104 *tp = remap_type (*tp, id);
1105 else if (CONSTANT_CLASS_P (*tp))
1106 {
1107 /* If this is a constant, we have to copy the node iff the type
1108 will be remapped. copy_tree_r will not copy a constant. */
1109 tree new_type = remap_type (TREE_TYPE (*tp), id);
1110
1111 if (new_type == TREE_TYPE (*tp))
1112 *walk_subtrees = 0;
1113
1114 else if (TREE_CODE (*tp) == INTEGER_CST)
1115 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1116 else
1117 {
1118 *tp = copy_node (*tp);
1119 TREE_TYPE (*tp) = new_type;
1120 }
1121 }
1122 else
1123 {
1124 /* Otherwise, just copy the node. Note that copy_tree_r already
1125 knows not to copy VAR_DECLs, etc., so this is safe. */
1126
1127 if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1128 {
1129 /* We need to re-canonicalize MEM_REFs from inline substitutions
1130 that can happen when a pointer argument is an ADDR_EXPR.
1131 Recurse here manually to allow that. */
1132 tree ptr = TREE_OPERAND (*tp, 0);
1133 tree type = remap_type (TREE_TYPE (*tp), id);
1134 tree old = *tp;
1135 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1136 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1137 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1138 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1139 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1140 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1141 {
1142 MR_DEPENDENCE_CLIQUE (*tp)
1143 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1144 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1145 }
1146 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1147 remapped a parameter as the property might be valid only
1148 for the parameter itself. */
1149 if (TREE_THIS_NOTRAP (old)
1150 && (!is_parm (TREE_OPERAND (old, 0))
1151 || (!id->transform_parameter && is_parm (ptr))))
1152 TREE_THIS_NOTRAP (*tp) = 1;
1153 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1154 *walk_subtrees = 0;
1155 return NULL;
1156 }
1157
1158 /* Here is the "usual case". Copy this tree node, and then
1159 tweak some special cases. */
1160 copy_tree_r (tp, walk_subtrees, NULL);
1161
1162 if (TREE_CODE (*tp) != OMP_CLAUSE)
1163 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1164
1165 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1166 {
1167 /* The copied TARGET_EXPR has never been expanded, even if the
1168 original node was expanded already. */
1169 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1170 TREE_OPERAND (*tp, 3) = NULL_TREE;
1171 }
1172 else if (TREE_CODE (*tp) == ADDR_EXPR)
1173 {
1174 /* Variable substitution need not be simple. In particular,
1175 the MEM_REF substitution above. Make sure that
1176 TREE_CONSTANT and friends are up-to-date. */
1177 int invariant = is_gimple_min_invariant (*tp);
1178 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1179 recompute_tree_invariant_for_addr_expr (*tp);
1180
1181 /* If this used to be invariant, but is not any longer,
1182 then regimplification is probably needed. */
1183 if (invariant && !is_gimple_min_invariant (*tp))
1184 id->regimplify = true;
1185
1186 *walk_subtrees = 0;
1187 }
1188 }
1189
1190 /* Update the TREE_BLOCK for the cloned expr. */
1191 if (EXPR_P (*tp))
1192 {
1193 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1194 tree old_block = TREE_BLOCK (*tp);
1195 if (old_block)
1196 {
1197 tree *n;
1198 n = id->decl_map->get (TREE_BLOCK (*tp));
1199 if (n)
1200 new_block = *n;
1201 }
1202 TREE_SET_BLOCK (*tp, new_block);
1203 }
1204
1205 /* Keep iterating. */
1206 return NULL_TREE;
1207 }
1208
1209
1210 /* Called from copy_body_id via walk_tree. DATA is really a
1211 `copy_body_data *'. */
1212
1213 tree
1214 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1215 {
1216 copy_body_data *id = (copy_body_data *) data;
1217 tree fn = id->src_fn;
1218 tree new_block;
1219
1220 /* Begin by recognizing trees that we'll completely rewrite for the
1221 inlining context. Our output for these trees is completely
1222 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1223 into an edge). Further down, we'll handle trees that get
1224 duplicated and/or tweaked. */
1225
1226 /* When requested, RETURN_EXPRs should be transformed to just the
1227 contained MODIFY_EXPR. The branch semantics of the return will
1228 be handled elsewhere by manipulating the CFG rather than a statement. */
1229 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1230 {
1231 tree assignment = TREE_OPERAND (*tp, 0);
1232
1233 /* If we're returning something, just turn that into an
1234 assignment into the equivalent of the original RESULT_DECL.
1235 If the "assignment" is just the result decl, the result
1236 decl has already been set (e.g. a recent "foo (&result_decl,
1237 ...)"); just toss the entire RETURN_EXPR. */
1238 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1239 {
1240 /* Replace the RETURN_EXPR with (a copy of) the
1241 MODIFY_EXPR hanging underneath. */
1242 *tp = copy_node (assignment);
1243 }
1244 else /* Else the RETURN_EXPR returns no value. */
1245 {
1246 *tp = NULL;
1247 return (tree) (void *)1;
1248 }
1249 }
1250 else if (TREE_CODE (*tp) == SSA_NAME)
1251 {
1252 *tp = remap_ssa_name (*tp, id);
1253 *walk_subtrees = 0;
1254 return NULL;
1255 }
1256
1257 /* Local variables and labels need to be replaced by equivalent
1258 variables. We don't want to copy static variables; there's only
1259 one of those, no matter how many times we inline the containing
1260 function. Similarly for globals from an outer function. */
1261 else if (auto_var_in_fn_p (*tp, fn))
1262 {
1263 tree new_decl;
1264
1265 /* Remap the declaration. */
1266 new_decl = remap_decl (*tp, id);
1267 gcc_assert (new_decl);
1268 /* Replace this variable with the copy. */
1269 STRIP_TYPE_NOPS (new_decl);
1270 *tp = new_decl;
1271 *walk_subtrees = 0;
1272 }
1273 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1274 copy_statement_list (tp);
1275 else if (TREE_CODE (*tp) == SAVE_EXPR
1276 || TREE_CODE (*tp) == TARGET_EXPR)
1277 remap_save_expr (tp, id->decl_map, walk_subtrees);
1278 else if (TREE_CODE (*tp) == LABEL_DECL
1279 && (! DECL_CONTEXT (*tp)
1280 || decl_function_context (*tp) == id->src_fn))
1281 /* These may need to be remapped for EH handling. */
1282 *tp = remap_decl (*tp, id);
1283 else if (TREE_CODE (*tp) == BIND_EXPR)
1284 copy_bind_expr (tp, walk_subtrees, id);
1285 /* Types may need remapping as well. */
1286 else if (TYPE_P (*tp))
1287 *tp = remap_type (*tp, id);
1288
1289 /* If this is a constant, we have to copy the node iff the type will be
1290 remapped. copy_tree_r will not copy a constant. */
1291 else if (CONSTANT_CLASS_P (*tp))
1292 {
1293 tree new_type = remap_type (TREE_TYPE (*tp), id);
1294
1295 if (new_type == TREE_TYPE (*tp))
1296 *walk_subtrees = 0;
1297
1298 else if (TREE_CODE (*tp) == INTEGER_CST)
1299 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1300 else
1301 {
1302 *tp = copy_node (*tp);
1303 TREE_TYPE (*tp) = new_type;
1304 }
1305 }
1306
1307 /* Otherwise, just copy the node. Note that copy_tree_r already
1308 knows not to copy VAR_DECLs, etc., so this is safe. */
1309 else
1310 {
1311 /* Here we handle trees that are not completely rewritten.
1312 First we detect some inlining-induced bogosities for
1313 discarding. */
1314 if (TREE_CODE (*tp) == MODIFY_EXPR
1315 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1316 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1317 {
1318 /* Some assignments VAR = VAR; don't generate any rtl code
1319 and thus don't count as variable modification. Avoid
1320 keeping bogosities like 0 = 0. */
1321 tree decl = TREE_OPERAND (*tp, 0), value;
1322 tree *n;
1323
1324 n = id->decl_map->get (decl);
1325 if (n)
1326 {
1327 value = *n;
1328 STRIP_TYPE_NOPS (value);
1329 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1330 {
1331 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1332 return copy_tree_body_r (tp, walk_subtrees, data);
1333 }
1334 }
1335 }
1336 else if (TREE_CODE (*tp) == INDIRECT_REF)
1337 {
1338 /* Get rid of *& from inline substitutions that can happen when a
1339 pointer argument is an ADDR_EXPR. */
1340 tree decl = TREE_OPERAND (*tp, 0);
1341 tree *n = id->decl_map->get (decl);
1342 if (n)
1343 {
1344 /* If we happen to get an ADDR_EXPR in n->value, strip
1345 it manually here as we'll eventually get ADDR_EXPRs
1346 which lie about their types pointed to. In this case
1347 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1348 but we absolutely rely on that. As fold_indirect_ref
1349 does other useful transformations, try that first, though. */
1350 tree type = TREE_TYPE (*tp);
1351 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1352 tree old = *tp;
1353 *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1354 if (! *tp)
1355 {
1356 type = remap_type (type, id);
1357 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1358 {
1359 *tp
1360 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1361 /* ??? We should either assert here or build
1362 a VIEW_CONVERT_EXPR instead of blindly leaking
1363 incompatible types to our IL. */
1364 if (! *tp)
1365 *tp = TREE_OPERAND (ptr, 0);
1366 }
1367 else
1368 {
1369 *tp = build1 (INDIRECT_REF, type, ptr);
1370 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1371 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1372 TREE_READONLY (*tp) = TREE_READONLY (old);
1373 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1374 have remapped a parameter as the property might be
1375 valid only for the parameter itself. */
1376 if (TREE_THIS_NOTRAP (old)
1377 && (!is_parm (TREE_OPERAND (old, 0))
1378 || (!id->transform_parameter && is_parm (ptr))))
1379 TREE_THIS_NOTRAP (*tp) = 1;
1380 }
1381 }
1382 *walk_subtrees = 0;
1383 return NULL;
1384 }
1385 }
1386 else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1387 {
1388 /* We need to re-canonicalize MEM_REFs from inline substitutions
1389 that can happen when a pointer argument is an ADDR_EXPR.
1390 Recurse here manually to allow that. */
1391 tree ptr = TREE_OPERAND (*tp, 0);
1392 tree type = remap_type (TREE_TYPE (*tp), id);
1393 tree old = *tp;
1394 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1395 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1396 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1397 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1398 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1399 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1400 {
1401 MR_DEPENDENCE_CLIQUE (*tp)
1402 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1403 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1404 }
1405 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1406 remapped a parameter as the property might be valid only
1407 for the parameter itself. */
1408 if (TREE_THIS_NOTRAP (old)
1409 && (!is_parm (TREE_OPERAND (old, 0))
1410 || (!id->transform_parameter && is_parm (ptr))))
1411 TREE_THIS_NOTRAP (*tp) = 1;
1412 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1413 *walk_subtrees = 0;
1414 return NULL;
1415 }
1416
1417 /* Here is the "usual case". Copy this tree node, and then
1418 tweak some special cases. */
1419 copy_tree_r (tp, walk_subtrees, NULL);
1420
1421 /* If EXPR has block defined, map it to newly constructed block.
1422 When inlining we want EXPRs without block appear in the block
1423 of function call if we are not remapping a type. */
1424 if (EXPR_P (*tp))
1425 {
1426 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1427 if (TREE_BLOCK (*tp))
1428 {
1429 tree *n;
1430 n = id->decl_map->get (TREE_BLOCK (*tp));
1431 if (n)
1432 new_block = *n;
1433 }
1434 TREE_SET_BLOCK (*tp, new_block);
1435 }
1436
1437 if (TREE_CODE (*tp) != OMP_CLAUSE)
1438 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1439
1440 /* The copied TARGET_EXPR has never been expanded, even if the
1441 original node was expanded already. */
1442 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1443 {
1444 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1445 TREE_OPERAND (*tp, 3) = NULL_TREE;
1446 }
1447
1448 /* Variable substitution need not be simple. In particular, the
1449 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1450 and friends are up-to-date. */
1451 else if (TREE_CODE (*tp) == ADDR_EXPR)
1452 {
1453 int invariant = is_gimple_min_invariant (*tp);
1454 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1455
1456 /* Handle the case where we substituted an INDIRECT_REF
1457 into the operand of the ADDR_EXPR. */
1458 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1459 && !id->do_not_fold)
1460 {
1461 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1462 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1463 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1464 *tp = t;
1465 }
1466 else
1467 recompute_tree_invariant_for_addr_expr (*tp);
1468
1469 /* If this used to be invariant, but is not any longer,
1470 then regimplification is probably needed. */
1471 if (invariant && !is_gimple_min_invariant (*tp))
1472 id->regimplify = true;
1473
1474 *walk_subtrees = 0;
1475 }
1476 }
1477
1478 /* Keep iterating. */
1479 return NULL_TREE;
1480 }
1481
1482 /* Helper for remap_gimple_stmt. Given an EH region number for the
1483 source function, map that to the duplicate EH region number in
1484 the destination function. */
1485
1486 static int
1487 remap_eh_region_nr (int old_nr, copy_body_data *id)
1488 {
1489 eh_region old_r, new_r;
1490
1491 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1492 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1493
1494 return new_r->index;
1495 }
1496
1497 /* Similar, but operate on INTEGER_CSTs. */
1498
1499 static tree
1500 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1501 {
1502 int old_nr, new_nr;
1503
1504 old_nr = tree_to_shwi (old_t_nr);
1505 new_nr = remap_eh_region_nr (old_nr, id);
1506
1507 return build_int_cst (integer_type_node, new_nr);
1508 }
1509
1510 /* Helper for copy_bb. Remap statement STMT using the inlining
1511 information in ID. Return the new statement copy. */
1512
1513 static gimple_seq
1514 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1515 {
1516 gimple *copy = NULL;
1517 struct walk_stmt_info wi;
1518 bool skip_first = false;
1519 gimple_seq stmts = NULL;
1520
1521 if (is_gimple_debug (stmt)
1522 && (gimple_debug_nonbind_marker_p (stmt)
1523 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1524 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1525 return NULL;
1526
1527 /* Begin by recognizing trees that we'll completely rewrite for the
1528 inlining context. Our output for these trees is completely
1529 different from our input (e.g. RETURN_EXPR is deleted and morphs
1530 into an edge). Further down, we'll handle trees that get
1531 duplicated and/or tweaked. */
1532
1533 /* When requested, GIMPLE_RETURN should be transformed to just the
1534 contained GIMPLE_ASSIGN. The branch semantics of the return will
1535 be handled elsewhere by manipulating the CFG rather than the
1536 statement. */
1537 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1538 {
1539 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1540
1541 /* If we're returning something, just turn that into an
1542 assignment to the equivalent of the original RESULT_DECL.
1543 If RETVAL is just the result decl, the result decl has
1544 already been set (e.g. a recent "foo (&result_decl, ...)");
1545 just toss the entire GIMPLE_RETURN. Likewise for when the
1546 call doesn't want the return value. */
1547 if (retval
1548 && (TREE_CODE (retval) != RESULT_DECL
1549 && (!id->call_stmt
1550 || gimple_call_lhs (id->call_stmt) != NULL_TREE)
1551 && (TREE_CODE (retval) != SSA_NAME
1552 || ! SSA_NAME_VAR (retval)
1553 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1554 {
1555 copy = gimple_build_assign (id->do_not_unshare
1556 ? id->retvar : unshare_expr (id->retvar),
1557 retval);
1558 /* id->retvar is already substituted. Skip it on later remapping. */
1559 skip_first = true;
1560 }
1561 else
1562 return NULL;
1563 }
1564 else if (gimple_has_substatements (stmt))
1565 {
1566 gimple_seq s1, s2;
1567
1568 /* When cloning bodies from the C++ front end, we will be handed bodies
1569 in High GIMPLE form. Handle here all the High GIMPLE statements that
1570 have embedded statements. */
1571 switch (gimple_code (stmt))
1572 {
1573 case GIMPLE_BIND:
1574 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1575 break;
1576
1577 case GIMPLE_CATCH:
1578 {
1579 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1580 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1581 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1582 }
1583 break;
1584
1585 case GIMPLE_EH_FILTER:
1586 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1587 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1588 break;
1589
1590 case GIMPLE_TRY:
1591 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1592 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1593 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1594 break;
1595
1596 case GIMPLE_WITH_CLEANUP_EXPR:
1597 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1598 copy = gimple_build_wce (s1);
1599 break;
1600
1601 case GIMPLE_OMP_PARALLEL:
1602 {
1603 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1604 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1605 copy = gimple_build_omp_parallel
1606 (s1,
1607 gimple_omp_parallel_clauses (omp_par_stmt),
1608 gimple_omp_parallel_child_fn (omp_par_stmt),
1609 gimple_omp_parallel_data_arg (omp_par_stmt));
1610 }
1611 break;
1612
1613 case GIMPLE_OMP_TASK:
1614 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1615 copy = gimple_build_omp_task
1616 (s1,
1617 gimple_omp_task_clauses (stmt),
1618 gimple_omp_task_child_fn (stmt),
1619 gimple_omp_task_data_arg (stmt),
1620 gimple_omp_task_copy_fn (stmt),
1621 gimple_omp_task_arg_size (stmt),
1622 gimple_omp_task_arg_align (stmt));
1623 break;
1624
1625 case GIMPLE_OMP_FOR:
1626 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1627 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1628 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1629 gimple_omp_for_clauses (stmt),
1630 gimple_omp_for_collapse (stmt), s2);
1631 {
1632 size_t i;
1633 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1634 {
1635 gimple_omp_for_set_index (copy, i,
1636 gimple_omp_for_index (stmt, i));
1637 gimple_omp_for_set_initial (copy, i,
1638 gimple_omp_for_initial (stmt, i));
1639 gimple_omp_for_set_final (copy, i,
1640 gimple_omp_for_final (stmt, i));
1641 gimple_omp_for_set_incr (copy, i,
1642 gimple_omp_for_incr (stmt, i));
1643 gimple_omp_for_set_cond (copy, i,
1644 gimple_omp_for_cond (stmt, i));
1645 }
1646 }
1647 break;
1648
1649 case GIMPLE_OMP_MASTER:
1650 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1651 copy = gimple_build_omp_master (s1);
1652 break;
1653
1654 case GIMPLE_OMP_TASKGROUP:
1655 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1656 copy = gimple_build_omp_taskgroup
1657 (s1, gimple_omp_taskgroup_clauses (stmt));
1658 break;
1659
1660 case GIMPLE_OMP_ORDERED:
1661 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1662 copy = gimple_build_omp_ordered
1663 (s1,
1664 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1665 break;
1666
1667 case GIMPLE_OMP_SCAN:
1668 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1669 copy = gimple_build_omp_scan
1670 (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1671 break;
1672
1673 case GIMPLE_OMP_SECTION:
1674 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1675 copy = gimple_build_omp_section (s1);
1676 break;
1677
1678 case GIMPLE_OMP_SECTIONS:
1679 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1680 copy = gimple_build_omp_sections
1681 (s1, gimple_omp_sections_clauses (stmt));
1682 break;
1683
1684 case GIMPLE_OMP_SINGLE:
1685 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1686 copy = gimple_build_omp_single
1687 (s1, gimple_omp_single_clauses (stmt));
1688 break;
1689
1690 case GIMPLE_OMP_TARGET:
1691 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1692 copy = gimple_build_omp_target
1693 (s1, gimple_omp_target_kind (stmt),
1694 gimple_omp_target_clauses (stmt));
1695 break;
1696
1697 case GIMPLE_OMP_TEAMS:
1698 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1699 copy = gimple_build_omp_teams
1700 (s1, gimple_omp_teams_clauses (stmt));
1701 break;
1702
1703 case GIMPLE_OMP_CRITICAL:
1704 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1705 copy = gimple_build_omp_critical (s1,
1706 gimple_omp_critical_name
1707 (as_a <gomp_critical *> (stmt)),
1708 gimple_omp_critical_clauses
1709 (as_a <gomp_critical *> (stmt)));
1710 break;
1711
1712 case GIMPLE_TRANSACTION:
1713 {
1714 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1715 gtransaction *new_trans_stmt;
1716 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1717 id);
1718 copy = new_trans_stmt = gimple_build_transaction (s1);
1719 gimple_transaction_set_subcode (new_trans_stmt,
1720 gimple_transaction_subcode (old_trans_stmt));
1721 gimple_transaction_set_label_norm (new_trans_stmt,
1722 gimple_transaction_label_norm (old_trans_stmt));
1723 gimple_transaction_set_label_uninst (new_trans_stmt,
1724 gimple_transaction_label_uninst (old_trans_stmt));
1725 gimple_transaction_set_label_over (new_trans_stmt,
1726 gimple_transaction_label_over (old_trans_stmt));
1727 }
1728 break;
1729
1730 default:
1731 gcc_unreachable ();
1732 }
1733 }
1734 else
1735 {
1736 if (gimple_assign_copy_p (stmt)
1737 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1738 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1739 {
1740 /* Here we handle statements that are not completely rewritten.
1741 First we detect some inlining-induced bogosities for
1742 discarding. */
1743
1744 /* Some assignments VAR = VAR; don't generate any rtl code
1745 and thus don't count as variable modification. Avoid
1746 keeping bogosities like 0 = 0. */
1747 tree decl = gimple_assign_lhs (stmt), value;
1748 tree *n;
1749
1750 n = id->decl_map->get (decl);
1751 if (n)
1752 {
1753 value = *n;
1754 STRIP_TYPE_NOPS (value);
1755 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1756 return NULL;
1757 }
1758 }
1759
1760 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1761 in a block that we aren't copying during tree_function_versioning,
1762 just drop the clobber stmt. */
1763 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1764 {
1765 tree lhs = gimple_assign_lhs (stmt);
1766 if (TREE_CODE (lhs) == MEM_REF
1767 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1768 {
1769 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1770 if (gimple_bb (def_stmt)
1771 && !bitmap_bit_p (id->blocks_to_copy,
1772 gimple_bb (def_stmt)->index))
1773 return NULL;
1774 }
1775 }
1776
1777 /* We do not allow CLOBBERs of handled components. In case
1778 returned value is stored via such handled component, remove
1779 the clobber so stmt verifier is happy. */
1780 if (gimple_clobber_p (stmt)
1781 && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1782 {
1783 tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1784 if (!DECL_P (remapped)
1785 && TREE_CODE (remapped) != MEM_REF)
1786 return NULL;
1787 }
1788
1789 if (gimple_debug_bind_p (stmt))
1790 {
1791 gdebug *copy
1792 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1793 gimple_debug_bind_get_value (stmt),
1794 stmt);
1795 if (id->reset_location)
1796 gimple_set_location (copy, input_location);
1797 id->debug_stmts.safe_push (copy);
1798 gimple_seq_add_stmt (&stmts, copy);
1799 return stmts;
1800 }
1801 if (gimple_debug_source_bind_p (stmt))
1802 {
1803 gdebug *copy = gimple_build_debug_source_bind
1804 (gimple_debug_source_bind_get_var (stmt),
1805 gimple_debug_source_bind_get_value (stmt),
1806 stmt);
1807 if (id->reset_location)
1808 gimple_set_location (copy, input_location);
1809 id->debug_stmts.safe_push (copy);
1810 gimple_seq_add_stmt (&stmts, copy);
1811 return stmts;
1812 }
1813 if (gimple_debug_nonbind_marker_p (stmt))
1814 {
1815 /* If the inlined function has too many debug markers,
1816 don't copy them. */
1817 if (id->src_cfun->debug_marker_count
1818 > param_max_debug_marker_count)
1819 return stmts;
1820
1821 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1822 if (id->reset_location)
1823 gimple_set_location (copy, input_location);
1824 id->debug_stmts.safe_push (copy);
1825 gimple_seq_add_stmt (&stmts, copy);
1826 return stmts;
1827 }
1828
1829 /* Create a new deep copy of the statement. */
1830 copy = gimple_copy (stmt);
1831
1832 /* Clear flags that need revisiting. */
1833 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1834 {
1835 if (gimple_call_tail_p (call_stmt))
1836 gimple_call_set_tail (call_stmt, false);
1837 if (gimple_call_from_thunk_p (call_stmt))
1838 gimple_call_set_from_thunk (call_stmt, false);
1839 if (gimple_call_internal_p (call_stmt))
1840 switch (gimple_call_internal_fn (call_stmt))
1841 {
1842 case IFN_GOMP_SIMD_LANE:
1843 case IFN_GOMP_SIMD_VF:
1844 case IFN_GOMP_SIMD_LAST_LANE:
1845 case IFN_GOMP_SIMD_ORDERED_START:
1846 case IFN_GOMP_SIMD_ORDERED_END:
1847 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1848 break;
1849 default:
1850 break;
1851 }
1852 }
1853
1854 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1855 RESX and EH_DISPATCH. */
1856 if (id->eh_map)
1857 switch (gimple_code (copy))
1858 {
1859 case GIMPLE_CALL:
1860 {
1861 tree r, fndecl = gimple_call_fndecl (copy);
1862 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1863 switch (DECL_FUNCTION_CODE (fndecl))
1864 {
1865 case BUILT_IN_EH_COPY_VALUES:
1866 r = gimple_call_arg (copy, 1);
1867 r = remap_eh_region_tree_nr (r, id);
1868 gimple_call_set_arg (copy, 1, r);
1869 /* FALLTHRU */
1870
1871 case BUILT_IN_EH_POINTER:
1872 case BUILT_IN_EH_FILTER:
1873 r = gimple_call_arg (copy, 0);
1874 r = remap_eh_region_tree_nr (r, id);
1875 gimple_call_set_arg (copy, 0, r);
1876 break;
1877
1878 default:
1879 break;
1880 }
1881
1882 /* Reset alias info if we didn't apply measures to
1883 keep it valid over inlining by setting DECL_PT_UID. */
1884 if (!id->src_cfun->gimple_df
1885 || !id->src_cfun->gimple_df->ipa_pta)
1886 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1887 }
1888 break;
1889
1890 case GIMPLE_RESX:
1891 {
1892 gresx *resx_stmt = as_a <gresx *> (copy);
1893 int r = gimple_resx_region (resx_stmt);
1894 r = remap_eh_region_nr (r, id);
1895 gimple_resx_set_region (resx_stmt, r);
1896 }
1897 break;
1898
1899 case GIMPLE_EH_DISPATCH:
1900 {
1901 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1902 int r = gimple_eh_dispatch_region (eh_dispatch);
1903 r = remap_eh_region_nr (r, id);
1904 gimple_eh_dispatch_set_region (eh_dispatch, r);
1905 }
1906 break;
1907
1908 default:
1909 break;
1910 }
1911 }
1912
1913 /* If STMT has a block defined, map it to the newly constructed block. */
1914 if (tree block = gimple_block (copy))
1915 {
1916 tree *n;
1917 n = id->decl_map->get (block);
1918 gcc_assert (n);
1919 gimple_set_block (copy, *n);
1920 }
1921 if (id->param_body_adjs)
1922 {
1923 gimple_seq extra_stmts = NULL;
1924 id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts);
1925 if (!gimple_seq_empty_p (extra_stmts))
1926 {
1927 memset (&wi, 0, sizeof (wi));
1928 wi.info = id;
1929 for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1930 !gsi_end_p (egsi);
1931 gsi_next (&egsi))
1932 walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
1933 gimple_seq_add_seq (&stmts, extra_stmts);
1934 }
1935 }
1936
1937 if (id->reset_location)
1938 gimple_set_location (copy, input_location);
1939
1940 /* Debug statements ought to be rebuilt and not copied. */
1941 gcc_checking_assert (!is_gimple_debug (copy));
1942
1943 /* Remap all the operands in COPY. */
1944 memset (&wi, 0, sizeof (wi));
1945 wi.info = id;
1946 if (skip_first)
1947 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1948 else
1949 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1950
1951 /* Clear the copied virtual operands. We are not remapping them here
1952 but are going to recreate them from scratch. */
1953 if (gimple_has_mem_ops (copy))
1954 {
1955 gimple_set_vdef (copy, NULL_TREE);
1956 gimple_set_vuse (copy, NULL_TREE);
1957 }
1958
1959 gimple_seq_add_stmt (&stmts, copy);
1960 return stmts;
1961 }
1962
1963
1964 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1965 later */
1966
1967 static basic_block
1968 copy_bb (copy_body_data *id, basic_block bb,
1969 profile_count num, profile_count den)
1970 {
1971 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1972 basic_block copy_basic_block;
1973 tree decl;
1974 basic_block prev;
1975
1976 profile_count::adjust_for_ipa_scaling (&num, &den);
1977
1978 /* Search for previous copied basic block. */
1979 prev = bb->prev_bb;
1980 while (!prev->aux)
1981 prev = prev->prev_bb;
1982
1983 /* create_basic_block() will append every new block to
1984 basic_block_info automatically. */
1985 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1986 copy_basic_block->count = bb->count.apply_scale (num, den);
1987
1988 copy_gsi = gsi_start_bb (copy_basic_block);
1989
1990 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1991 {
1992 gimple_seq stmts;
1993 gimple *stmt = gsi_stmt (gsi);
1994 gimple *orig_stmt = stmt;
1995 gimple_stmt_iterator stmts_gsi;
1996 bool stmt_added = false;
1997
1998 id->regimplify = false;
1999 stmts = remap_gimple_stmt (stmt, id);
2000
2001 if (gimple_seq_empty_p (stmts))
2002 continue;
2003
2004 seq_gsi = copy_gsi;
2005
2006 for (stmts_gsi = gsi_start (stmts);
2007 !gsi_end_p (stmts_gsi); )
2008 {
2009 stmt = gsi_stmt (stmts_gsi);
2010
2011 /* Advance iterator now before stmt is moved to seq_gsi. */
2012 gsi_next (&stmts_gsi);
2013
2014 if (gimple_nop_p (stmt))
2015 continue;
2016
2017 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2018 orig_stmt);
2019
2020 /* With return slot optimization we can end up with
2021 non-gimple (foo *)&this->m, fix that here. */
2022 if (is_gimple_assign (stmt)
2023 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
2024 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
2025 {
2026 tree new_rhs;
2027 new_rhs = force_gimple_operand_gsi (&seq_gsi,
2028 gimple_assign_rhs1 (stmt),
2029 true, NULL, false,
2030 GSI_CONTINUE_LINKING);
2031 gimple_assign_set_rhs1 (stmt, new_rhs);
2032 id->regimplify = false;
2033 }
2034
2035 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2036
2037 if (id->regimplify)
2038 gimple_regimplify_operands (stmt, &seq_gsi);
2039
2040 stmt_added = true;
2041 }
2042
2043 if (!stmt_added)
2044 continue;
2045
2046 /* If copy_basic_block has been empty at the start of this iteration,
2047 call gsi_start_bb again to get at the newly added statements. */
2048 if (gsi_end_p (copy_gsi))
2049 copy_gsi = gsi_start_bb (copy_basic_block);
2050 else
2051 gsi_next (&copy_gsi);
2052
2053 /* Process the new statement. The call to gimple_regimplify_operands
2054 possibly turned the statement into multiple statements, we
2055 need to process all of them. */
2056 do
2057 {
2058 tree fn;
2059 gcall *call_stmt;
2060
2061 stmt = gsi_stmt (copy_gsi);
2062 call_stmt = dyn_cast <gcall *> (stmt);
2063 if (call_stmt
2064 && gimple_call_va_arg_pack_p (call_stmt)
2065 && id->call_stmt
2066 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2067 {
2068 /* __builtin_va_arg_pack () should be replaced by
2069 all arguments corresponding to ... in the caller. */
2070 tree p;
2071 gcall *new_call;
2072 vec<tree> argarray;
2073 size_t nargs = gimple_call_num_args (id->call_stmt);
2074 size_t n;
2075
2076 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2077 nargs--;
2078
2079 /* Create the new array of arguments. */
2080 n = nargs + gimple_call_num_args (call_stmt);
2081 argarray.create (n);
2082 argarray.safe_grow_cleared (n);
2083
2084 /* Copy all the arguments before '...' */
2085 memcpy (argarray.address (),
2086 gimple_call_arg_ptr (call_stmt, 0),
2087 gimple_call_num_args (call_stmt) * sizeof (tree));
2088
2089 /* Append the arguments passed in '...' */
2090 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2091 gimple_call_arg_ptr (id->call_stmt, 0)
2092 + (gimple_call_num_args (id->call_stmt) - nargs),
2093 nargs * sizeof (tree));
2094
2095 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2096 argarray);
2097
2098 argarray.release ();
2099
2100 /* Copy all GIMPLE_CALL flags, location and block, except
2101 GF_CALL_VA_ARG_PACK. */
2102 gimple_call_copy_flags (new_call, call_stmt);
2103 gimple_call_set_va_arg_pack (new_call, false);
2104 /* location includes block. */
2105 gimple_set_location (new_call, gimple_location (stmt));
2106 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2107
2108 gsi_replace (&copy_gsi, new_call, false);
2109 stmt = new_call;
2110 }
2111 else if (call_stmt
2112 && id->call_stmt
2113 && (decl = gimple_call_fndecl (stmt))
2114 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2115 {
2116 /* __builtin_va_arg_pack_len () should be replaced by
2117 the number of anonymous arguments. */
2118 size_t nargs = gimple_call_num_args (id->call_stmt);
2119 tree count, p;
2120 gimple *new_stmt;
2121
2122 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2123 nargs--;
2124
2125 if (!gimple_call_lhs (stmt))
2126 {
2127 /* Drop unused calls. */
2128 gsi_remove (&copy_gsi, false);
2129 continue;
2130 }
2131 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2132 {
2133 count = build_int_cst (integer_type_node, nargs);
2134 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2135 gsi_replace (&copy_gsi, new_stmt, false);
2136 stmt = new_stmt;
2137 }
2138 else if (nargs != 0)
2139 {
2140 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2141 count = build_int_cst (integer_type_node, nargs);
2142 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2143 PLUS_EXPR, newlhs, count);
2144 gimple_call_set_lhs (stmt, newlhs);
2145 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2146 }
2147 }
2148 else if (call_stmt
2149 && id->call_stmt
2150 && gimple_call_internal_p (stmt)
2151 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2152 {
2153 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2154 gsi_remove (&copy_gsi, false);
2155 continue;
2156 }
2157
2158 /* Statements produced by inlining can be unfolded, especially
2159 when we constant propagated some operands. We can't fold
2160 them right now for two reasons:
2161 1) folding require SSA_NAME_DEF_STMTs to be correct
2162 2) we can't change function calls to builtins.
2163 So we just mark statement for later folding. We mark
2164 all new statements, instead just statements that has changed
2165 by some nontrivial substitution so even statements made
2166 foldable indirectly are updated. If this turns out to be
2167 expensive, copy_body can be told to watch for nontrivial
2168 changes. */
2169 if (id->statements_to_fold)
2170 id->statements_to_fold->add (stmt);
2171
2172 /* We're duplicating a CALL_EXPR. Find any corresponding
2173 callgraph edges and update or duplicate them. */
2174 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2175 {
2176 struct cgraph_edge *edge;
2177
2178 switch (id->transform_call_graph_edges)
2179 {
2180 case CB_CGE_DUPLICATE:
2181 edge = id->src_node->get_edge (orig_stmt);
2182 if (edge)
2183 {
2184 struct cgraph_edge *old_edge = edge;
2185
2186 /* A speculative call is consist of multiple
2187 edges - indirect edge and one or more direct edges
2188 Duplicate the whole thing and distribute frequencies
2189 accordingly. */
2190 if (edge->speculative)
2191 {
2192 int n = 0;
2193 profile_count direct_cnt
2194 = profile_count::zero ();
2195
2196 /* First figure out the distribution of counts
2197 so we can re-scale BB profile accordingly. */
2198 for (cgraph_edge *e = old_edge; e;
2199 e = e->next_speculative_call_target ())
2200 direct_cnt = direct_cnt + e->count;
2201
2202 cgraph_edge *indirect
2203 = old_edge->speculative_call_indirect_edge ();
2204 profile_count indir_cnt = indirect->count;
2205
2206 /* Next iterate all direct edges, clone it and its
2207 corresponding reference and update profile. */
2208 for (cgraph_edge *e = old_edge;
2209 e;
2210 e = e->next_speculative_call_target ())
2211 {
2212 profile_count cnt = e->count;
2213
2214 id->dst_node->clone_reference
2215 (e->speculative_call_target_ref (), stmt);
2216 edge = e->clone (id->dst_node, call_stmt,
2217 gimple_uid (stmt), num, den,
2218 true);
2219 profile_probability prob
2220 = cnt.probability_in (direct_cnt
2221 + indir_cnt);
2222 edge->count
2223 = copy_basic_block->count.apply_probability
2224 (prob);
2225 n++;
2226 }
2227 gcc_checking_assert
2228 (indirect->num_speculative_call_targets_p ()
2229 == n);
2230
2231 /* Duplicate the indirect edge after all direct edges
2232 cloned. */
2233 indirect = indirect->clone (id->dst_node, call_stmt,
2234 gimple_uid (stmt),
2235 num, den,
2236 true);
2237
2238 profile_probability prob
2239 = indir_cnt.probability_in (direct_cnt
2240 + indir_cnt);
2241 indirect->count
2242 = copy_basic_block->count.apply_probability (prob);
2243 }
2244 else
2245 {
2246 edge = edge->clone (id->dst_node, call_stmt,
2247 gimple_uid (stmt),
2248 num, den,
2249 true);
2250 edge->count = copy_basic_block->count;
2251 }
2252 }
2253 break;
2254
2255 case CB_CGE_MOVE_CLONES:
2256 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2257 call_stmt);
2258 edge = id->dst_node->get_edge (stmt);
2259 break;
2260
2261 case CB_CGE_MOVE:
2262 edge = id->dst_node->get_edge (orig_stmt);
2263 if (edge)
2264 edge = cgraph_edge::set_call_stmt (edge, call_stmt);
2265 break;
2266
2267 default:
2268 gcc_unreachable ();
2269 }
2270
2271 /* Constant propagation on argument done during inlining
2272 may create new direct call. Produce an edge for it. */
2273 if ((!edge
2274 || (edge->indirect_inlining_edge
2275 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2276 && id->dst_node->definition
2277 && (fn = gimple_call_fndecl (stmt)) != NULL)
2278 {
2279 struct cgraph_node *dest = cgraph_node::get_create (fn);
2280
2281 /* We have missing edge in the callgraph. This can happen
2282 when previous inlining turned an indirect call into a
2283 direct call by constant propagating arguments or we are
2284 producing dead clone (for further cloning). In all
2285 other cases we hit a bug (incorrect node sharing is the
2286 most common reason for missing edges). */
2287 gcc_assert (!dest->definition
2288 || dest->address_taken
2289 || !id->src_node->definition
2290 || !id->dst_node->definition);
2291 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2292 id->dst_node->create_edge_including_clones
2293 (dest, orig_stmt, call_stmt, bb->count,
2294 CIF_ORIGINALLY_INDIRECT_CALL);
2295 else
2296 id->dst_node->create_edge (dest, call_stmt,
2297 bb->count)->inline_failed
2298 = CIF_ORIGINALLY_INDIRECT_CALL;
2299 if (dump_file)
2300 {
2301 fprintf (dump_file, "Created new direct edge to %s\n",
2302 dest->dump_name ());
2303 }
2304 }
2305
2306 notice_special_calls (as_a <gcall *> (stmt));
2307 }
2308
2309 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2310 id->eh_map, id->eh_lp_nr);
2311
2312 gsi_next (&copy_gsi);
2313 }
2314 while (!gsi_end_p (copy_gsi));
2315
2316 copy_gsi = gsi_last_bb (copy_basic_block);
2317 }
2318
2319 return copy_basic_block;
2320 }
2321
2322 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2323 form is quite easy, since dominator relationship for old basic blocks does
2324 not change.
2325
2326 There is however exception where inlining might change dominator relation
2327 across EH edges from basic block within inlined functions destinating
2328 to landing pads in function we inline into.
2329
2330 The function fills in PHI_RESULTs of such PHI nodes if they refer
2331 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2332 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2333 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2334 set, and this means that there will be no overlapping live ranges
2335 for the underlying symbol.
2336
2337 This might change in future if we allow redirecting of EH edges and
2338 we might want to change way build CFG pre-inlining to include
2339 all the possible edges then. */
2340 static void
2341 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2342 bool can_throw, bool nonlocal_goto)
2343 {
2344 edge e;
2345 edge_iterator ei;
2346
2347 FOR_EACH_EDGE (e, ei, bb->succs)
2348 if (!e->dest->aux
2349 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2350 {
2351 gphi *phi;
2352 gphi_iterator si;
2353
2354 if (!nonlocal_goto)
2355 gcc_assert (e->flags & EDGE_EH);
2356
2357 if (!can_throw)
2358 gcc_assert (!(e->flags & EDGE_EH));
2359
2360 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2361 {
2362 edge re;
2363
2364 phi = si.phi ();
2365
2366 /* For abnormal goto/call edges the receiver can be the
2367 ENTRY_BLOCK. Do not assert this cannot happen. */
2368
2369 gcc_assert ((e->flags & EDGE_EH)
2370 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2371
2372 re = find_edge (ret_bb, e->dest);
2373 gcc_checking_assert (re);
2374 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2375 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2376
2377 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2378 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2379 }
2380 }
2381 }
2382
2383 /* Insert clobbers for automatic variables of inlined ID->src_fn
2384 function at the start of basic block ID->eh_landing_pad_dest. */
2385
2386 static void
2387 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2388 {
2389 tree var;
2390 basic_block bb = id->eh_landing_pad_dest;
2391 live_vars_map *vars = NULL;
2392 unsigned int cnt = 0;
2393 unsigned int i;
2394 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2395 if (VAR_P (var)
2396 && !DECL_HARD_REGISTER (var)
2397 && !TREE_THIS_VOLATILE (var)
2398 && !DECL_HAS_VALUE_EXPR_P (var)
2399 && !is_gimple_reg (var)
2400 && auto_var_in_fn_p (var, id->src_fn)
2401 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2402 {
2403 tree *t = id->decl_map->get (var);
2404 if (!t)
2405 continue;
2406 tree new_var = *t;
2407 if (VAR_P (new_var)
2408 && !DECL_HARD_REGISTER (new_var)
2409 && !TREE_THIS_VOLATILE (new_var)
2410 && !DECL_HAS_VALUE_EXPR_P (new_var)
2411 && !is_gimple_reg (new_var)
2412 && auto_var_in_fn_p (new_var, id->dst_fn))
2413 {
2414 if (vars == NULL)
2415 vars = new live_vars_map;
2416 vars->put (DECL_UID (var), cnt++);
2417 }
2418 }
2419 if (vars == NULL)
2420 return;
2421
2422 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2423 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2424 if (VAR_P (var))
2425 {
2426 edge e;
2427 edge_iterator ei;
2428 bool needed = false;
2429 unsigned int *v = vars->get (DECL_UID (var));
2430 if (v == NULL)
2431 continue;
2432 FOR_EACH_EDGE (e, ei, bb->preds)
2433 if ((e->flags & EDGE_EH) != 0
2434 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2435 {
2436 basic_block src_bb = (basic_block) e->src->aux;
2437
2438 if (bitmap_bit_p (&live[src_bb->index], *v))
2439 {
2440 needed = true;
2441 break;
2442 }
2443 }
2444 if (needed)
2445 {
2446 tree new_var = *id->decl_map->get (var);
2447 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2448 tree clobber = build_clobber (TREE_TYPE (new_var));
2449 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2450 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2451 }
2452 }
2453 destroy_live_vars (live);
2454 delete vars;
2455 }
2456
2457 /* Copy edges from BB into its copy constructed earlier, scale profile
2458 accordingly. Edges will be taken care of later. Assume aux
2459 pointers to point to the copies of each BB. Return true if any
2460 debug stmts are left after a statement that must end the basic block. */
2461
2462 static bool
2463 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2464 basic_block ret_bb, basic_block abnormal_goto_dest,
2465 copy_body_data *id)
2466 {
2467 basic_block new_bb = (basic_block) bb->aux;
2468 edge_iterator ei;
2469 edge old_edge;
2470 gimple_stmt_iterator si;
2471 bool need_debug_cleanup = false;
2472
2473 /* Use the indices from the original blocks to create edges for the
2474 new ones. */
2475 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2476 if (!(old_edge->flags & EDGE_EH))
2477 {
2478 edge new_edge;
2479 int flags = old_edge->flags;
2480 location_t locus = old_edge->goto_locus;
2481
2482 /* Return edges do get a FALLTHRU flag when they get inlined. */
2483 if (old_edge->dest->index == EXIT_BLOCK
2484 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2485 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2486 flags |= EDGE_FALLTHRU;
2487
2488 new_edge
2489 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2490 new_edge->probability = old_edge->probability;
2491 if (!id->reset_location)
2492 new_edge->goto_locus = remap_location (locus, id);
2493 }
2494
2495 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2496 return false;
2497
2498 /* When doing function splitting, we must decrease count of the return block
2499 which was previously reachable by block we did not copy. */
2500 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2501 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2502 if (old_edge->src->index != ENTRY_BLOCK
2503 && !old_edge->src->aux)
2504 new_bb->count -= old_edge->count ().apply_scale (num, den);
2505
2506 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2507 {
2508 gimple *copy_stmt;
2509 bool can_throw, nonlocal_goto;
2510
2511 copy_stmt = gsi_stmt (si);
2512 if (!is_gimple_debug (copy_stmt))
2513 update_stmt (copy_stmt);
2514
2515 /* Do this before the possible split_block. */
2516 gsi_next (&si);
2517
2518 /* If this tree could throw an exception, there are two
2519 cases where we need to add abnormal edge(s): the
2520 tree wasn't in a region and there is a "current
2521 region" in the caller; or the original tree had
2522 EH edges. In both cases split the block after the tree,
2523 and add abnormal edge(s) as needed; we need both
2524 those from the callee and the caller.
2525 We check whether the copy can throw, because the const
2526 propagation can change an INDIRECT_REF which throws
2527 into a COMPONENT_REF which doesn't. If the copy
2528 can throw, the original could also throw. */
2529 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2530 nonlocal_goto
2531 = (stmt_can_make_abnormal_goto (copy_stmt)
2532 && !computed_goto_p (copy_stmt));
2533
2534 if (can_throw || nonlocal_goto)
2535 {
2536 if (!gsi_end_p (si))
2537 {
2538 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2539 gsi_next (&si);
2540 if (gsi_end_p (si))
2541 need_debug_cleanup = true;
2542 }
2543 if (!gsi_end_p (si))
2544 /* Note that bb's predecessor edges aren't necessarily
2545 right at this point; split_block doesn't care. */
2546 {
2547 edge e = split_block (new_bb, copy_stmt);
2548
2549 new_bb = e->dest;
2550 new_bb->aux = e->src->aux;
2551 si = gsi_start_bb (new_bb);
2552 }
2553 }
2554
2555 bool update_probs = false;
2556
2557 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2558 {
2559 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2560 update_probs = true;
2561 }
2562 else if (can_throw)
2563 {
2564 make_eh_edges (copy_stmt);
2565 update_probs = true;
2566 }
2567
2568 /* EH edges may not match old edges. Copy as much as possible. */
2569 if (update_probs)
2570 {
2571 edge e;
2572 edge_iterator ei;
2573 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2574
2575 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2576 if ((old_edge->flags & EDGE_EH)
2577 && (e = find_edge (copy_stmt_bb,
2578 (basic_block) old_edge->dest->aux))
2579 && (e->flags & EDGE_EH))
2580 e->probability = old_edge->probability;
2581
2582 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2583 if (e->flags & EDGE_EH)
2584 {
2585 if (!e->probability.initialized_p ())
2586 e->probability = profile_probability::never ();
2587 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2588 {
2589 if (id->eh_landing_pad_dest == NULL)
2590 id->eh_landing_pad_dest = e->dest;
2591 else
2592 gcc_assert (id->eh_landing_pad_dest == e->dest);
2593 }
2594 }
2595 }
2596
2597
2598 /* If the call we inline cannot make abnormal goto do not add
2599 additional abnormal edges but only retain those already present
2600 in the original function body. */
2601 if (abnormal_goto_dest == NULL)
2602 nonlocal_goto = false;
2603 if (nonlocal_goto)
2604 {
2605 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2606
2607 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2608 nonlocal_goto = false;
2609 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2610 in OpenMP regions which aren't allowed to be left abnormally.
2611 So, no need to add abnormal edge in that case. */
2612 else if (is_gimple_call (copy_stmt)
2613 && gimple_call_internal_p (copy_stmt)
2614 && (gimple_call_internal_fn (copy_stmt)
2615 == IFN_ABNORMAL_DISPATCHER)
2616 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2617 nonlocal_goto = false;
2618 else
2619 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2620 EDGE_ABNORMAL);
2621 }
2622
2623 if ((can_throw || nonlocal_goto)
2624 && gimple_in_ssa_p (cfun))
2625 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2626 can_throw, nonlocal_goto);
2627 }
2628 return need_debug_cleanup;
2629 }
2630
2631 /* Copy the PHIs. All blocks and edges are copied, some blocks
2632 was possibly split and new outgoing EH edges inserted.
2633 BB points to the block of original function and AUX pointers links
2634 the original and newly copied blocks. */
2635
2636 static void
2637 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2638 {
2639 basic_block const new_bb = (basic_block) bb->aux;
2640 edge_iterator ei;
2641 gphi *phi;
2642 gphi_iterator si;
2643 edge new_edge;
2644 bool inserted = false;
2645
2646 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2647 {
2648 tree res, new_res;
2649 gphi *new_phi;
2650
2651 phi = si.phi ();
2652 res = PHI_RESULT (phi);
2653 new_res = res;
2654 if (!virtual_operand_p (res))
2655 {
2656 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2657 if (EDGE_COUNT (new_bb->preds) == 0)
2658 {
2659 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2660 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2661 }
2662 else
2663 {
2664 new_phi = create_phi_node (new_res, new_bb);
2665 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2666 {
2667 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2668 bb);
2669 tree arg;
2670 tree new_arg;
2671 edge_iterator ei2;
2672 location_t locus;
2673
2674 /* When doing partial cloning, we allow PHIs on the entry
2675 block as long as all the arguments are the same.
2676 Find any input edge to see argument to copy. */
2677 if (!old_edge)
2678 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2679 if (!old_edge->src->aux)
2680 break;
2681
2682 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2683 new_arg = arg;
2684 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2685 gcc_assert (new_arg);
2686 /* With return slot optimization we can end up with
2687 non-gimple (foo *)&this->m, fix that here. */
2688 if (TREE_CODE (new_arg) != SSA_NAME
2689 && TREE_CODE (new_arg) != FUNCTION_DECL
2690 && !is_gimple_val (new_arg))
2691 {
2692 gimple_seq stmts = NULL;
2693 new_arg = force_gimple_operand (new_arg, &stmts, true,
2694 NULL);
2695 gsi_insert_seq_on_edge (new_edge, stmts);
2696 inserted = true;
2697 }
2698 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2699 if (id->reset_location)
2700 locus = input_location;
2701 else
2702 locus = remap_location (locus, id);
2703 add_phi_arg (new_phi, new_arg, new_edge, locus);
2704 }
2705 }
2706 }
2707 }
2708
2709 /* Commit the delayed edge insertions. */
2710 if (inserted)
2711 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2712 gsi_commit_one_edge_insert (new_edge, NULL);
2713 }
2714
2715
2716 /* Wrapper for remap_decl so it can be used as a callback. */
2717
2718 static tree
2719 remap_decl_1 (tree decl, void *data)
2720 {
2721 return remap_decl (decl, (copy_body_data *) data);
2722 }
2723
2724 /* Build struct function and associated datastructures for the new clone
2725 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2726 the cfun to the function of new_fndecl (and current_function_decl too). */
2727
2728 static void
2729 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2730 {
2731 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2732
2733 if (!DECL_ARGUMENTS (new_fndecl))
2734 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2735 if (!DECL_RESULT (new_fndecl))
2736 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2737
2738 /* Register specific tree functions. */
2739 gimple_register_cfg_hooks ();
2740
2741 /* Get clean struct function. */
2742 push_struct_function (new_fndecl);
2743
2744 /* We will rebuild these, so just sanity check that they are empty. */
2745 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2746 gcc_assert (cfun->local_decls == NULL);
2747 gcc_assert (cfun->cfg == NULL);
2748 gcc_assert (cfun->decl == new_fndecl);
2749
2750 /* Copy items we preserve during cloning. */
2751 cfun->static_chain_decl = src_cfun->static_chain_decl;
2752 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2753 cfun->function_end_locus = src_cfun->function_end_locus;
2754 cfun->curr_properties = src_cfun->curr_properties;
2755 cfun->last_verified = src_cfun->last_verified;
2756 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2757 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2758 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2759 cfun->calls_eh_return = src_cfun->calls_eh_return;
2760 cfun->stdarg = src_cfun->stdarg;
2761 cfun->after_inlining = src_cfun->after_inlining;
2762 cfun->can_throw_non_call_exceptions
2763 = src_cfun->can_throw_non_call_exceptions;
2764 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2765 cfun->returns_struct = src_cfun->returns_struct;
2766 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2767
2768 init_empty_tree_cfg ();
2769
2770 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2771
2772 profile_count num = count;
2773 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2774 profile_count::adjust_for_ipa_scaling (&num, &den);
2775
2776 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2777 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2778 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2779 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2780 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2781 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2782 if (src_cfun->eh)
2783 init_eh_for_function ();
2784
2785 if (src_cfun->gimple_df)
2786 {
2787 init_tree_ssa (cfun);
2788 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2789 if (cfun->gimple_df->in_ssa_p)
2790 init_ssa_operands (cfun);
2791 }
2792 }
2793
2794 /* Helper function for copy_cfg_body. Move debug stmts from the end
2795 of NEW_BB to the beginning of successor basic blocks when needed. If the
2796 successor has multiple predecessors, reset them, otherwise keep
2797 their value. */
2798
2799 static void
2800 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2801 {
2802 edge e;
2803 edge_iterator ei;
2804 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2805
2806 if (gsi_end_p (si)
2807 || gsi_one_before_end_p (si)
2808 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2809 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2810 return;
2811
2812 FOR_EACH_EDGE (e, ei, new_bb->succs)
2813 {
2814 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2815 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2816 while (is_gimple_debug (gsi_stmt (ssi)))
2817 {
2818 gimple *stmt = gsi_stmt (ssi);
2819 gdebug *new_stmt;
2820 tree var;
2821 tree value;
2822
2823 /* For the last edge move the debug stmts instead of copying
2824 them. */
2825 if (ei_one_before_end_p (ei))
2826 {
2827 si = ssi;
2828 gsi_prev (&ssi);
2829 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2830 {
2831 gimple_debug_bind_reset_value (stmt);
2832 gimple_set_location (stmt, UNKNOWN_LOCATION);
2833 }
2834 gsi_remove (&si, false);
2835 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2836 continue;
2837 }
2838
2839 if (gimple_debug_bind_p (stmt))
2840 {
2841 var = gimple_debug_bind_get_var (stmt);
2842 if (single_pred_p (e->dest))
2843 {
2844 value = gimple_debug_bind_get_value (stmt);
2845 value = unshare_expr (value);
2846 new_stmt = gimple_build_debug_bind (var, value, stmt);
2847 }
2848 else
2849 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2850 }
2851 else if (gimple_debug_source_bind_p (stmt))
2852 {
2853 var = gimple_debug_source_bind_get_var (stmt);
2854 value = gimple_debug_source_bind_get_value (stmt);
2855 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2856 }
2857 else if (gimple_debug_nonbind_marker_p (stmt))
2858 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2859 else
2860 gcc_unreachable ();
2861 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2862 id->debug_stmts.safe_push (new_stmt);
2863 gsi_prev (&ssi);
2864 }
2865 }
2866 }
2867
2868 /* Make a copy of the sub-loops of SRC_PARENT and place them
2869 as siblings of DEST_PARENT. */
2870
2871 static void
2872 copy_loops (copy_body_data *id,
2873 class loop *dest_parent, class loop *src_parent)
2874 {
2875 class loop *src_loop = src_parent->inner;
2876 while (src_loop)
2877 {
2878 if (!id->blocks_to_copy
2879 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2880 {
2881 class loop *dest_loop = alloc_loop ();
2882
2883 /* Assign the new loop its header and latch and associate
2884 those with the new loop. */
2885 dest_loop->header = (basic_block)src_loop->header->aux;
2886 dest_loop->header->loop_father = dest_loop;
2887 if (src_loop->latch != NULL)
2888 {
2889 dest_loop->latch = (basic_block)src_loop->latch->aux;
2890 dest_loop->latch->loop_father = dest_loop;
2891 }
2892
2893 /* Copy loop meta-data. */
2894 copy_loop_info (src_loop, dest_loop);
2895 if (dest_loop->unroll)
2896 cfun->has_unroll = true;
2897 if (dest_loop->force_vectorize)
2898 cfun->has_force_vectorize_loops = true;
2899 if (id->src_cfun->last_clique != 0)
2900 dest_loop->owned_clique
2901 = remap_dependence_clique (id,
2902 src_loop->owned_clique
2903 ? src_loop->owned_clique : 1);
2904
2905 /* Finally place it into the loop array and the loop tree. */
2906 place_new_loop (cfun, dest_loop);
2907 flow_loop_tree_node_add (dest_parent, dest_loop);
2908
2909 if (src_loop->simduid)
2910 {
2911 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2912 cfun->has_simduid_loops = true;
2913 }
2914
2915 /* Recurse. */
2916 copy_loops (id, dest_loop, src_loop);
2917 }
2918 src_loop = src_loop->next;
2919 }
2920 }
2921
2922 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2923
2924 void
2925 redirect_all_calls (copy_body_data * id, basic_block bb)
2926 {
2927 gimple_stmt_iterator si;
2928 gimple *last = last_stmt (bb);
2929 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2930 {
2931 gimple *stmt = gsi_stmt (si);
2932 if (is_gimple_call (stmt))
2933 {
2934 tree old_lhs = gimple_call_lhs (stmt);
2935 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2936 if (edge)
2937 {
2938 gimple *new_stmt
2939 = cgraph_edge::redirect_call_stmt_to_callee (edge);
2940 /* If IPA-SRA transformation, run as part of edge redirection,
2941 removed the LHS because it is unused, save it to
2942 killed_new_ssa_names so that we can prune it from debug
2943 statements. */
2944 if (old_lhs
2945 && TREE_CODE (old_lhs) == SSA_NAME
2946 && !gimple_call_lhs (new_stmt))
2947 {
2948 if (!id->killed_new_ssa_names)
2949 id->killed_new_ssa_names = new hash_set<tree> (16);
2950 id->killed_new_ssa_names->add (old_lhs);
2951 }
2952
2953 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2954 gimple_purge_dead_eh_edges (bb);
2955 }
2956 }
2957 }
2958 }
2959
2960 /* Make a copy of the body of FN so that it can be inserted inline in
2961 another function. Walks FN via CFG, returns new fndecl. */
2962
2963 static tree
2964 copy_cfg_body (copy_body_data * id,
2965 basic_block entry_block_map, basic_block exit_block_map,
2966 basic_block new_entry)
2967 {
2968 tree callee_fndecl = id->src_fn;
2969 /* Original cfun for the callee, doesn't change. */
2970 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2971 struct function *cfun_to_copy;
2972 basic_block bb;
2973 tree new_fndecl = NULL;
2974 bool need_debug_cleanup = false;
2975 int last;
2976 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2977 profile_count num = entry_block_map->count;
2978
2979 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2980
2981 /* Register specific tree functions. */
2982 gimple_register_cfg_hooks ();
2983
2984 /* If we are inlining just region of the function, make sure to connect
2985 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2986 part of loop, we must compute frequency and probability of
2987 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2988 probabilities of edges incoming from nonduplicated region. */
2989 if (new_entry)
2990 {
2991 edge e;
2992 edge_iterator ei;
2993 den = profile_count::zero ();
2994
2995 FOR_EACH_EDGE (e, ei, new_entry->preds)
2996 if (!e->src->aux)
2997 den += e->count ();
2998 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2999 }
3000
3001 profile_count::adjust_for_ipa_scaling (&num, &den);
3002
3003 /* Must have a CFG here at this point. */
3004 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
3005 (DECL_STRUCT_FUNCTION (callee_fndecl)));
3006
3007
3008 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
3009 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
3010 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
3011 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
3012
3013 /* Duplicate any exception-handling regions. */
3014 if (cfun->eh)
3015 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
3016 remap_decl_1, id);
3017
3018 /* Use aux pointers to map the original blocks to copy. */
3019 FOR_EACH_BB_FN (bb, cfun_to_copy)
3020 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
3021 {
3022 basic_block new_bb = copy_bb (id, bb, num, den);
3023 bb->aux = new_bb;
3024 new_bb->aux = bb;
3025 new_bb->loop_father = entry_block_map->loop_father;
3026 }
3027
3028 last = last_basic_block_for_fn (cfun);
3029
3030 /* Now that we've duplicated the blocks, duplicate their edges. */
3031 basic_block abnormal_goto_dest = NULL;
3032 if (id->call_stmt
3033 && stmt_can_make_abnormal_goto (id->call_stmt))
3034 {
3035 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
3036
3037 bb = gimple_bb (id->call_stmt);
3038 gsi_next (&gsi);
3039 if (gsi_end_p (gsi))
3040 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3041 }
3042 FOR_ALL_BB_FN (bb, cfun_to_copy)
3043 if (!id->blocks_to_copy
3044 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3045 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3046 abnormal_goto_dest, id);
3047
3048 if (id->eh_landing_pad_dest)
3049 {
3050 add_clobbers_to_eh_landing_pad (id);
3051 id->eh_landing_pad_dest = NULL;
3052 }
3053
3054 if (new_entry)
3055 {
3056 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3057 EDGE_FALLTHRU);
3058 e->probability = profile_probability::always ();
3059 }
3060
3061 /* Duplicate the loop tree, if available and wanted. */
3062 if (loops_for_fn (src_cfun) != NULL
3063 && current_loops != NULL)
3064 {
3065 copy_loops (id, entry_block_map->loop_father,
3066 get_loop (src_cfun, 0));
3067 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
3068 loops_state_set (LOOPS_NEED_FIXUP);
3069 }
3070
3071 /* If the loop tree in the source function needed fixup, mark the
3072 destination loop tree for fixup, too. */
3073 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3074 loops_state_set (LOOPS_NEED_FIXUP);
3075
3076 if (gimple_in_ssa_p (cfun))
3077 FOR_ALL_BB_FN (bb, cfun_to_copy)
3078 if (!id->blocks_to_copy
3079 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3080 copy_phis_for_bb (bb, id);
3081
3082 FOR_ALL_BB_FN (bb, cfun_to_copy)
3083 if (bb->aux)
3084 {
3085 if (need_debug_cleanup
3086 && bb->index != ENTRY_BLOCK
3087 && bb->index != EXIT_BLOCK)
3088 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3089 /* Update call edge destinations. This cannot be done before loop
3090 info is updated, because we may split basic blocks. */
3091 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3092 && bb->index != ENTRY_BLOCK
3093 && bb->index != EXIT_BLOCK)
3094 redirect_all_calls (id, (basic_block)bb->aux);
3095 ((basic_block)bb->aux)->aux = NULL;
3096 bb->aux = NULL;
3097 }
3098
3099 /* Zero out AUX fields of newly created block during EH edge
3100 insertion. */
3101 for (; last < last_basic_block_for_fn (cfun); last++)
3102 {
3103 if (need_debug_cleanup)
3104 maybe_move_debug_stmts_to_successors (id,
3105 BASIC_BLOCK_FOR_FN (cfun, last));
3106 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3107 /* Update call edge destinations. This cannot be done before loop
3108 info is updated, because we may split basic blocks. */
3109 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3110 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3111 }
3112 entry_block_map->aux = NULL;
3113 exit_block_map->aux = NULL;
3114
3115 if (id->eh_map)
3116 {
3117 delete id->eh_map;
3118 id->eh_map = NULL;
3119 }
3120 if (id->dependence_map)
3121 {
3122 delete id->dependence_map;
3123 id->dependence_map = NULL;
3124 }
3125
3126 return new_fndecl;
3127 }
3128
3129 /* Copy the debug STMT using ID. We deal with these statements in a
3130 special way: if any variable in their VALUE expression wasn't
3131 remapped yet, we won't remap it, because that would get decl uids
3132 out of sync, causing codegen differences between -g and -g0. If
3133 this arises, we drop the VALUE expression altogether. */
3134
3135 static void
3136 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3137 {
3138 tree t, *n;
3139 struct walk_stmt_info wi;
3140
3141 if (tree block = gimple_block (stmt))
3142 {
3143 n = id->decl_map->get (block);
3144 gimple_set_block (stmt, n ? *n : id->block);
3145 }
3146
3147 if (gimple_debug_nonbind_marker_p (stmt))
3148 return;
3149
3150 /* Remap all the operands in COPY. */
3151 memset (&wi, 0, sizeof (wi));
3152 wi.info = id;
3153
3154 processing_debug_stmt = 1;
3155
3156 if (gimple_debug_source_bind_p (stmt))
3157 t = gimple_debug_source_bind_get_var (stmt);
3158 else if (gimple_debug_bind_p (stmt))
3159 t = gimple_debug_bind_get_var (stmt);
3160 else
3161 gcc_unreachable ();
3162
3163 if (TREE_CODE (t) == PARM_DECL && id->debug_map
3164 && (n = id->debug_map->get (t)))
3165 {
3166 gcc_assert (VAR_P (*n));
3167 t = *n;
3168 }
3169 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3170 /* T is a non-localized variable. */;
3171 else
3172 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3173
3174 if (gimple_debug_bind_p (stmt))
3175 {
3176 gimple_debug_bind_set_var (stmt, t);
3177
3178 if (gimple_debug_bind_has_value_p (stmt))
3179 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3180 remap_gimple_op_r, &wi, NULL);
3181
3182 /* Punt if any decl couldn't be remapped. */
3183 if (processing_debug_stmt < 0)
3184 gimple_debug_bind_reset_value (stmt);
3185 }
3186 else if (gimple_debug_source_bind_p (stmt))
3187 {
3188 gimple_debug_source_bind_set_var (stmt, t);
3189 /* When inlining and source bind refers to one of the optimized
3190 away parameters, change the source bind into normal debug bind
3191 referring to the corresponding DEBUG_EXPR_DECL that should have
3192 been bound before the call stmt. */
3193 t = gimple_debug_source_bind_get_value (stmt);
3194 if (t != NULL_TREE
3195 && TREE_CODE (t) == PARM_DECL
3196 && id->call_stmt)
3197 {
3198 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3199 unsigned int i;
3200 if (debug_args != NULL)
3201 {
3202 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3203 if ((**debug_args)[i] == DECL_ORIGIN (t)
3204 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3205 {
3206 t = (**debug_args)[i + 1];
3207 stmt->subcode = GIMPLE_DEBUG_BIND;
3208 gimple_debug_bind_set_value (stmt, t);
3209 break;
3210 }
3211 }
3212 }
3213 if (gimple_debug_source_bind_p (stmt))
3214 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3215 remap_gimple_op_r, &wi, NULL);
3216 }
3217
3218 processing_debug_stmt = 0;
3219
3220 update_stmt (stmt);
3221 }
3222
3223 /* Process deferred debug stmts. In order to give values better odds
3224 of being successfully remapped, we delay the processing of debug
3225 stmts until all other stmts that might require remapping are
3226 processed. */
3227
3228 static void
3229 copy_debug_stmts (copy_body_data *id)
3230 {
3231 size_t i;
3232 gdebug *stmt;
3233
3234 if (!id->debug_stmts.exists ())
3235 return;
3236
3237 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3238 copy_debug_stmt (stmt, id);
3239
3240 id->debug_stmts.release ();
3241 }
3242
3243 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3244 another function. */
3245
3246 static tree
3247 copy_tree_body (copy_body_data *id)
3248 {
3249 tree fndecl = id->src_fn;
3250 tree body = DECL_SAVED_TREE (fndecl);
3251
3252 walk_tree (&body, copy_tree_body_r, id, NULL);
3253
3254 return body;
3255 }
3256
3257 /* Make a copy of the body of FN so that it can be inserted inline in
3258 another function. */
3259
3260 static tree
3261 copy_body (copy_body_data *id,
3262 basic_block entry_block_map, basic_block exit_block_map,
3263 basic_block new_entry)
3264 {
3265 tree fndecl = id->src_fn;
3266 tree body;
3267
3268 /* If this body has a CFG, walk CFG and copy. */
3269 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3270 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3271 new_entry);
3272 copy_debug_stmts (id);
3273 delete id->killed_new_ssa_names;
3274 id->killed_new_ssa_names = NULL;
3275
3276 return body;
3277 }
3278
3279 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3280 defined in function FN, or of a data member thereof. */
3281
3282 static bool
3283 self_inlining_addr_expr (tree value, tree fn)
3284 {
3285 tree var;
3286
3287 if (TREE_CODE (value) != ADDR_EXPR)
3288 return false;
3289
3290 var = get_base_address (TREE_OPERAND (value, 0));
3291
3292 return var && auto_var_in_fn_p (var, fn);
3293 }
3294
3295 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3296 lexical block and line number information from base_stmt, if given,
3297 or from the last stmt of the block otherwise. */
3298
3299 static gimple *
3300 insert_init_debug_bind (copy_body_data *id,
3301 basic_block bb, tree var, tree value,
3302 gimple *base_stmt)
3303 {
3304 gimple *note;
3305 gimple_stmt_iterator gsi;
3306 tree tracked_var;
3307
3308 if (!gimple_in_ssa_p (id->src_cfun))
3309 return NULL;
3310
3311 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3312 return NULL;
3313
3314 tracked_var = target_for_debug_bind (var);
3315 if (!tracked_var)
3316 return NULL;
3317
3318 if (bb)
3319 {
3320 gsi = gsi_last_bb (bb);
3321 if (!base_stmt && !gsi_end_p (gsi))
3322 base_stmt = gsi_stmt (gsi);
3323 }
3324
3325 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3326
3327 if (bb)
3328 {
3329 if (!gsi_end_p (gsi))
3330 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3331 else
3332 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3333 }
3334
3335 return note;
3336 }
3337
3338 static void
3339 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3340 {
3341 /* If VAR represents a zero-sized variable, it's possible that the
3342 assignment statement may result in no gimple statements. */
3343 if (init_stmt)
3344 {
3345 gimple_stmt_iterator si = gsi_last_bb (bb);
3346
3347 /* We can end up with init statements that store to a non-register
3348 from a rhs with a conversion. Handle that here by forcing the
3349 rhs into a temporary. gimple_regimplify_operands is not
3350 prepared to do this for us. */
3351 if (!is_gimple_debug (init_stmt)
3352 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3353 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3354 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3355 {
3356 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3357 gimple_expr_type (init_stmt),
3358 gimple_assign_rhs1 (init_stmt));
3359 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3360 GSI_NEW_STMT);
3361 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3362 gimple_assign_set_rhs1 (init_stmt, rhs);
3363 }
3364 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3365 if (!is_gimple_debug (init_stmt))
3366 {
3367 gimple_regimplify_operands (init_stmt, &si);
3368
3369 tree def = gimple_assign_lhs (init_stmt);
3370 insert_init_debug_bind (id, bb, def, def, init_stmt);
3371 }
3372 }
3373 }
3374
3375 /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3376 if need be (which should only be necessary for invalid programs). Attempt
3377 to convert VAL to TYPE and return the result if it is possible, just return
3378 a zero constant of the given type if it fails. */
3379
3380 tree
3381 force_value_to_type (tree type, tree value)
3382 {
3383 /* If we can match up types by promotion/demotion do so. */
3384 if (fold_convertible_p (type, value))
3385 return fold_convert (type, value);
3386
3387 /* ??? For valid programs we should not end up here.
3388 Still if we end up with truly mismatched types here, fall back
3389 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3390 GIMPLE to the following passes. */
3391 if (!is_gimple_reg_type (TREE_TYPE (value))
3392 || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3393 return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3394 else
3395 return build_zero_cst (type);
3396 }
3397
3398 /* Initialize parameter P with VALUE. If needed, produce init statement
3399 at the end of BB. When BB is NULL, we return init statement to be
3400 output later. */
3401 static gimple *
3402 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3403 basic_block bb, tree *vars)
3404 {
3405 gimple *init_stmt = NULL;
3406 tree var;
3407 tree rhs = value;
3408 tree def = (gimple_in_ssa_p (cfun)
3409 ? ssa_default_def (id->src_cfun, p) : NULL);
3410
3411 if (value
3412 && value != error_mark_node
3413 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3414 rhs = force_value_to_type (TREE_TYPE (p), value);
3415
3416 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3417 here since the type of this decl must be visible to the calling
3418 function. */
3419 var = copy_decl_to_var (p, id);
3420
3421 /* Declare this new variable. */
3422 DECL_CHAIN (var) = *vars;
3423 *vars = var;
3424
3425 /* Make gimplifier happy about this variable. */
3426 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3427
3428 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3429 we would not need to create a new variable here at all, if it
3430 weren't for debug info. Still, we can just use the argument
3431 value. */
3432 if (TREE_READONLY (p)
3433 && !TREE_ADDRESSABLE (p)
3434 && value && !TREE_SIDE_EFFECTS (value)
3435 && !def)
3436 {
3437 /* We may produce non-gimple trees by adding NOPs or introduce
3438 invalid sharing when operand is not really constant.
3439 It is not big deal to prohibit constant propagation here as
3440 we will constant propagate in DOM1 pass anyway. */
3441 if (is_gimple_min_invariant (value)
3442 && useless_type_conversion_p (TREE_TYPE (p),
3443 TREE_TYPE (value))
3444 /* We have to be very careful about ADDR_EXPR. Make sure
3445 the base variable isn't a local variable of the inlined
3446 function, e.g., when doing recursive inlining, direct or
3447 mutually-recursive or whatever, which is why we don't
3448 just test whether fn == current_function_decl. */
3449 && ! self_inlining_addr_expr (value, fn))
3450 {
3451 insert_decl_map (id, p, value);
3452 insert_debug_decl_map (id, p, var);
3453 return insert_init_debug_bind (id, bb, var, value, NULL);
3454 }
3455 }
3456
3457 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3458 that way, when the PARM_DECL is encountered, it will be
3459 automatically replaced by the VAR_DECL. */
3460 insert_decl_map (id, p, var);
3461
3462 /* Even if P was TREE_READONLY, the new VAR should not be.
3463 In the original code, we would have constructed a
3464 temporary, and then the function body would have never
3465 changed the value of P. However, now, we will be
3466 constructing VAR directly. The constructor body may
3467 change its value multiple times as it is being
3468 constructed. Therefore, it must not be TREE_READONLY;
3469 the back-end assumes that TREE_READONLY variable is
3470 assigned to only once. */
3471 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3472 TREE_READONLY (var) = 0;
3473
3474 /* If there is no setup required and we are in SSA, take the easy route
3475 replacing all SSA names representing the function parameter by the
3476 SSA name passed to function.
3477
3478 We need to construct map for the variable anyway as it might be used
3479 in different SSA names when parameter is set in function.
3480
3481 Do replacement at -O0 for const arguments replaced by constant.
3482 This is important for builtin_constant_p and other construct requiring
3483 constant argument to be visible in inlined function body. */
3484 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3485 && (optimize
3486 || (TREE_READONLY (p)
3487 && is_gimple_min_invariant (rhs)))
3488 && (TREE_CODE (rhs) == SSA_NAME
3489 || is_gimple_min_invariant (rhs))
3490 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3491 {
3492 insert_decl_map (id, def, rhs);
3493 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3494 }
3495
3496 /* If the value of argument is never used, don't care about initializing
3497 it. */
3498 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3499 {
3500 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3501 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3502 }
3503
3504 /* Initialize this VAR_DECL from the equivalent argument. Convert
3505 the argument to the proper type in case it was promoted. */
3506 if (value)
3507 {
3508 if (rhs == error_mark_node)
3509 {
3510 insert_decl_map (id, p, var);
3511 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3512 }
3513
3514 STRIP_USELESS_TYPE_CONVERSION (rhs);
3515
3516 /* If we are in SSA form properly remap the default definition
3517 or assign to a dummy SSA name if the parameter is unused and
3518 we are not optimizing. */
3519 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3520 {
3521 if (def)
3522 {
3523 def = remap_ssa_name (def, id);
3524 init_stmt = gimple_build_assign (def, rhs);
3525 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3526 set_ssa_default_def (cfun, var, NULL);
3527 }
3528 else if (!optimize)
3529 {
3530 def = make_ssa_name (var);
3531 init_stmt = gimple_build_assign (def, rhs);
3532 }
3533 }
3534 else
3535 init_stmt = gimple_build_assign (var, rhs);
3536
3537 if (bb && init_stmt)
3538 insert_init_stmt (id, bb, init_stmt);
3539 }
3540 return init_stmt;
3541 }
3542
3543 /* Generate code to initialize the parameters of the function at the
3544 top of the stack in ID from the GIMPLE_CALL STMT. */
3545
3546 static void
3547 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3548 tree fn, basic_block bb)
3549 {
3550 tree parms;
3551 size_t i;
3552 tree p;
3553 tree vars = NULL_TREE;
3554 tree static_chain = gimple_call_chain (stmt);
3555
3556 /* Figure out what the parameters are. */
3557 parms = DECL_ARGUMENTS (fn);
3558
3559 /* Loop through the parameter declarations, replacing each with an
3560 equivalent VAR_DECL, appropriately initialized. */
3561 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3562 {
3563 tree val;
3564 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3565 setup_one_parameter (id, p, val, fn, bb, &vars);
3566 }
3567 /* After remapping parameters remap their types. This has to be done
3568 in a second loop over all parameters to appropriately remap
3569 variable sized arrays when the size is specified in a
3570 parameter following the array. */
3571 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3572 {
3573 tree *varp = id->decl_map->get (p);
3574 if (varp && VAR_P (*varp))
3575 {
3576 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3577 ? ssa_default_def (id->src_cfun, p) : NULL);
3578 tree var = *varp;
3579 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3580 /* Also remap the default definition if it was remapped
3581 to the default definition of the parameter replacement
3582 by the parameter setup. */
3583 if (def)
3584 {
3585 tree *defp = id->decl_map->get (def);
3586 if (defp
3587 && TREE_CODE (*defp) == SSA_NAME
3588 && SSA_NAME_VAR (*defp) == var)
3589 TREE_TYPE (*defp) = TREE_TYPE (var);
3590 }
3591 }
3592 }
3593
3594 /* Initialize the static chain. */
3595 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3596 gcc_assert (fn != current_function_decl);
3597 if (p)
3598 {
3599 /* No static chain? Seems like a bug in tree-nested.c. */
3600 gcc_assert (static_chain);
3601
3602 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3603 }
3604
3605 declare_inline_vars (id->block, vars);
3606 }
3607
3608
3609 /* Declare a return variable to replace the RESULT_DECL for the
3610 function we are calling. An appropriate DECL_STMT is returned.
3611 The USE_STMT is filled to contain a use of the declaration to
3612 indicate the return value of the function.
3613
3614 RETURN_SLOT, if non-null is place where to store the result. It
3615 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3616 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3617
3618 The return value is a (possibly null) value that holds the result
3619 as seen by the caller. */
3620
3621 static tree
3622 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3623 basic_block entry_bb)
3624 {
3625 tree callee = id->src_fn;
3626 tree result = DECL_RESULT (callee);
3627 tree callee_type = TREE_TYPE (result);
3628 tree caller_type;
3629 tree var, use;
3630
3631 /* Handle type-mismatches in the function declaration return type
3632 vs. the call expression. */
3633 if (modify_dest)
3634 caller_type = TREE_TYPE (modify_dest);
3635 else if (return_slot)
3636 caller_type = TREE_TYPE (return_slot);
3637 else /* No LHS on the call. */
3638 caller_type = TREE_TYPE (TREE_TYPE (callee));
3639
3640 /* We don't need to do anything for functions that don't return anything. */
3641 if (VOID_TYPE_P (callee_type))
3642 return NULL_TREE;
3643
3644 /* If there was a return slot, then the return value is the
3645 dereferenced address of that object. */
3646 if (return_slot)
3647 {
3648 /* The front end shouldn't have used both return_slot and
3649 a modify expression. */
3650 gcc_assert (!modify_dest);
3651 if (DECL_BY_REFERENCE (result))
3652 {
3653 tree return_slot_addr = build_fold_addr_expr (return_slot);
3654 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3655
3656 /* We are going to construct *&return_slot and we can't do that
3657 for variables believed to be not addressable.
3658
3659 FIXME: This check possibly can match, because values returned
3660 via return slot optimization are not believed to have address
3661 taken by alias analysis. */
3662 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3663 var = return_slot_addr;
3664 mark_addressable (return_slot);
3665 }
3666 else
3667 {
3668 var = return_slot;
3669 gcc_assert (TREE_CODE (var) != SSA_NAME);
3670 if (TREE_ADDRESSABLE (result))
3671 mark_addressable (var);
3672 }
3673 if (DECL_NOT_GIMPLE_REG_P (result)
3674 && DECL_P (var))
3675 DECL_NOT_GIMPLE_REG_P (var) = 1;
3676
3677 if (!useless_type_conversion_p (callee_type, caller_type))
3678 var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
3679
3680 use = NULL;
3681 goto done;
3682 }
3683
3684 /* All types requiring non-trivial constructors should have been handled. */
3685 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3686
3687 /* Attempt to avoid creating a new temporary variable. */
3688 if (modify_dest
3689 && TREE_CODE (modify_dest) != SSA_NAME)
3690 {
3691 bool use_it = false;
3692
3693 /* We can't use MODIFY_DEST if there's type promotion involved. */
3694 if (!useless_type_conversion_p (callee_type, caller_type))
3695 use_it = false;
3696
3697 /* ??? If we're assigning to a variable sized type, then we must
3698 reuse the destination variable, because we've no good way to
3699 create variable sized temporaries at this point. */
3700 else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
3701 use_it = true;
3702
3703 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3704 reuse it as the result of the call directly. Don't do this if
3705 it would promote MODIFY_DEST to addressable. */
3706 else if (TREE_ADDRESSABLE (result))
3707 use_it = false;
3708 else
3709 {
3710 tree base_m = get_base_address (modify_dest);
3711
3712 /* If the base isn't a decl, then it's a pointer, and we don't
3713 know where that's going to go. */
3714 if (!DECL_P (base_m))
3715 use_it = false;
3716 else if (is_global_var (base_m))
3717 use_it = false;
3718 else if (DECL_NOT_GIMPLE_REG_P (result)
3719 && !DECL_NOT_GIMPLE_REG_P (base_m))
3720 use_it = false;
3721 else if (!TREE_ADDRESSABLE (base_m))
3722 use_it = true;
3723 }
3724
3725 if (use_it)
3726 {
3727 var = modify_dest;
3728 use = NULL;
3729 goto done;
3730 }
3731 }
3732
3733 gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
3734
3735 var = copy_result_decl_to_var (result, id);
3736 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3737
3738 /* Do not have the rest of GCC warn about this variable as it should
3739 not be visible to the user. */
3740 TREE_NO_WARNING (var) = 1;
3741
3742 declare_inline_vars (id->block, var);
3743
3744 /* Build the use expr. If the return type of the function was
3745 promoted, convert it back to the expected type. */
3746 use = var;
3747 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3748 {
3749 /* If we can match up types by promotion/demotion do so. */
3750 if (fold_convertible_p (caller_type, var))
3751 use = fold_convert (caller_type, var);
3752 else
3753 {
3754 /* ??? For valid programs we should not end up here.
3755 Still if we end up with truly mismatched types here, fall back
3756 to using a MEM_REF to not leak invalid GIMPLE to the following
3757 passes. */
3758 /* Prevent var from being written into SSA form. */
3759 if (is_gimple_reg_type (TREE_TYPE (var)))
3760 DECL_NOT_GIMPLE_REG_P (var) = true;
3761 use = fold_build2 (MEM_REF, caller_type,
3762 build_fold_addr_expr (var),
3763 build_int_cst (ptr_type_node, 0));
3764 }
3765 }
3766
3767 STRIP_USELESS_TYPE_CONVERSION (use);
3768
3769 if (DECL_BY_REFERENCE (result))
3770 {
3771 TREE_ADDRESSABLE (var) = 1;
3772 var = build_fold_addr_expr (var);
3773 }
3774
3775 done:
3776 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3777 way, when the RESULT_DECL is encountered, it will be
3778 automatically replaced by the VAR_DECL.
3779
3780 When returning by reference, ensure that RESULT_DECL remaps to
3781 gimple_val. */
3782 if (DECL_BY_REFERENCE (result)
3783 && !is_gimple_val (var))
3784 {
3785 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3786 insert_decl_map (id, result, temp);
3787 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3788 it's default_def SSA_NAME. */
3789 if (gimple_in_ssa_p (id->src_cfun)
3790 && is_gimple_reg (result))
3791 {
3792 temp = make_ssa_name (temp);
3793 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3794 }
3795 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3796 }
3797 else
3798 insert_decl_map (id, result, var);
3799
3800 /* Remember this so we can ignore it in remap_decls. */
3801 id->retvar = var;
3802 return use;
3803 }
3804
3805 /* Determine if the function can be copied. If so return NULL. If
3806 not return a string describng the reason for failure. */
3807
3808 const char *
3809 copy_forbidden (struct function *fun)
3810 {
3811 const char *reason = fun->cannot_be_copied_reason;
3812
3813 /* Only examine the function once. */
3814 if (fun->cannot_be_copied_set)
3815 return reason;
3816
3817 /* We cannot copy a function that receives a non-local goto
3818 because we cannot remap the destination label used in the
3819 function that is performing the non-local goto. */
3820 /* ??? Actually, this should be possible, if we work at it.
3821 No doubt there's just a handful of places that simply
3822 assume it doesn't happen and don't substitute properly. */
3823 if (fun->has_nonlocal_label)
3824 {
3825 reason = G_("function %q+F can never be copied "
3826 "because it receives a non-local goto");
3827 goto fail;
3828 }
3829
3830 if (fun->has_forced_label_in_static)
3831 {
3832 reason = G_("function %q+F can never be copied because it saves "
3833 "address of local label in a static variable");
3834 goto fail;
3835 }
3836
3837 fail:
3838 fun->cannot_be_copied_reason = reason;
3839 fun->cannot_be_copied_set = true;
3840 return reason;
3841 }
3842
3843
3844 static const char *inline_forbidden_reason;
3845
3846 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3847 iff a function cannot be inlined. Also sets the reason why. */
3848
3849 static tree
3850 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3851 struct walk_stmt_info *wip)
3852 {
3853 tree fn = (tree) wip->info;
3854 tree t;
3855 gimple *stmt = gsi_stmt (*gsi);
3856
3857 switch (gimple_code (stmt))
3858 {
3859 case GIMPLE_CALL:
3860 /* Refuse to inline alloca call unless user explicitly forced so as
3861 this may change program's memory overhead drastically when the
3862 function using alloca is called in loop. In GCC present in
3863 SPEC2000 inlining into schedule_block cause it to require 2GB of
3864 RAM instead of 256MB. Don't do so for alloca calls emitted for
3865 VLA objects as those can't cause unbounded growth (they're always
3866 wrapped inside stack_save/stack_restore regions. */
3867 if (gimple_maybe_alloca_call_p (stmt)
3868 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3869 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3870 {
3871 inline_forbidden_reason
3872 = G_("function %q+F can never be inlined because it uses "
3873 "alloca (override using the always_inline attribute)");
3874 *handled_ops_p = true;
3875 return fn;
3876 }
3877
3878 t = gimple_call_fndecl (stmt);
3879 if (t == NULL_TREE)
3880 break;
3881
3882 /* We cannot inline functions that call setjmp. */
3883 if (setjmp_call_p (t))
3884 {
3885 inline_forbidden_reason
3886 = G_("function %q+F can never be inlined because it uses setjmp");
3887 *handled_ops_p = true;
3888 return t;
3889 }
3890
3891 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3892 switch (DECL_FUNCTION_CODE (t))
3893 {
3894 /* We cannot inline functions that take a variable number of
3895 arguments. */
3896 case BUILT_IN_VA_START:
3897 case BUILT_IN_NEXT_ARG:
3898 case BUILT_IN_VA_END:
3899 inline_forbidden_reason
3900 = G_("function %q+F can never be inlined because it "
3901 "uses variable argument lists");
3902 *handled_ops_p = true;
3903 return t;
3904
3905 case BUILT_IN_LONGJMP:
3906 /* We can't inline functions that call __builtin_longjmp at
3907 all. The non-local goto machinery really requires the
3908 destination be in a different function. If we allow the
3909 function calling __builtin_longjmp to be inlined into the
3910 function calling __builtin_setjmp, Things will Go Awry. */
3911 inline_forbidden_reason
3912 = G_("function %q+F can never be inlined because "
3913 "it uses setjmp-longjmp exception handling");
3914 *handled_ops_p = true;
3915 return t;
3916
3917 case BUILT_IN_NONLOCAL_GOTO:
3918 /* Similarly. */
3919 inline_forbidden_reason
3920 = G_("function %q+F can never be inlined because "
3921 "it uses non-local goto");
3922 *handled_ops_p = true;
3923 return t;
3924
3925 case BUILT_IN_RETURN:
3926 case BUILT_IN_APPLY_ARGS:
3927 /* If a __builtin_apply_args caller would be inlined,
3928 it would be saving arguments of the function it has
3929 been inlined into. Similarly __builtin_return would
3930 return from the function the inline has been inlined into. */
3931 inline_forbidden_reason
3932 = G_("function %q+F can never be inlined because "
3933 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3934 *handled_ops_p = true;
3935 return t;
3936
3937 default:
3938 break;
3939 }
3940 break;
3941
3942 case GIMPLE_GOTO:
3943 t = gimple_goto_dest (stmt);
3944
3945 /* We will not inline a function which uses computed goto. The
3946 addresses of its local labels, which may be tucked into
3947 global storage, are of course not constant across
3948 instantiations, which causes unexpected behavior. */
3949 if (TREE_CODE (t) != LABEL_DECL)
3950 {
3951 inline_forbidden_reason
3952 = G_("function %q+F can never be inlined "
3953 "because it contains a computed goto");
3954 *handled_ops_p = true;
3955 return t;
3956 }
3957 break;
3958
3959 default:
3960 break;
3961 }
3962
3963 *handled_ops_p = false;
3964 return NULL_TREE;
3965 }
3966
3967 /* Return true if FNDECL is a function that cannot be inlined into
3968 another one. */
3969
3970 static bool
3971 inline_forbidden_p (tree fndecl)
3972 {
3973 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3974 struct walk_stmt_info wi;
3975 basic_block bb;
3976 bool forbidden_p = false;
3977
3978 /* First check for shared reasons not to copy the code. */
3979 inline_forbidden_reason = copy_forbidden (fun);
3980 if (inline_forbidden_reason != NULL)
3981 return true;
3982
3983 /* Next, walk the statements of the function looking for
3984 constraucts we can't handle, or are non-optimal for inlining. */
3985 hash_set<tree> visited_nodes;
3986 memset (&wi, 0, sizeof (wi));
3987 wi.info = (void *) fndecl;
3988 wi.pset = &visited_nodes;
3989
3990 FOR_EACH_BB_FN (bb, fun)
3991 {
3992 gimple *ret;
3993 gimple_seq seq = bb_seq (bb);
3994 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3995 forbidden_p = (ret != NULL);
3996 if (forbidden_p)
3997 break;
3998 }
3999
4000 return forbidden_p;
4001 }
4002 \f
4003 /* Return false if the function FNDECL cannot be inlined on account of its
4004 attributes, true otherwise. */
4005 static bool
4006 function_attribute_inlinable_p (const_tree fndecl)
4007 {
4008 if (targetm.attribute_table)
4009 {
4010 const_tree a;
4011
4012 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
4013 {
4014 const_tree name = get_attribute_name (a);
4015 int i;
4016
4017 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
4018 if (is_attribute_p (targetm.attribute_table[i].name, name))
4019 return targetm.function_attribute_inlinable_p (fndecl);
4020 }
4021 }
4022
4023 return true;
4024 }
4025
4026 /* Returns nonzero if FN is a function that does not have any
4027 fundamental inline blocking properties. */
4028
4029 bool
4030 tree_inlinable_function_p (tree fn)
4031 {
4032 bool inlinable = true;
4033 bool do_warning;
4034 tree always_inline;
4035
4036 /* If we've already decided this function shouldn't be inlined,
4037 there's no need to check again. */
4038 if (DECL_UNINLINABLE (fn))
4039 return false;
4040
4041 /* We only warn for functions declared `inline' by the user. */
4042 do_warning = (opt_for_fn (fn, warn_inline)
4043 && DECL_DECLARED_INLINE_P (fn)
4044 && !DECL_NO_INLINE_WARNING_P (fn)
4045 && !DECL_IN_SYSTEM_HEADER (fn));
4046
4047 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4048
4049 if (flag_no_inline
4050 && always_inline == NULL)
4051 {
4052 if (do_warning)
4053 warning (OPT_Winline, "function %q+F can never be inlined because it "
4054 "is suppressed using %<-fno-inline%>", fn);
4055 inlinable = false;
4056 }
4057
4058 else if (!function_attribute_inlinable_p (fn))
4059 {
4060 if (do_warning)
4061 warning (OPT_Winline, "function %q+F can never be inlined because it "
4062 "uses attributes conflicting with inlining", fn);
4063 inlinable = false;
4064 }
4065
4066 else if (inline_forbidden_p (fn))
4067 {
4068 /* See if we should warn about uninlinable functions. Previously,
4069 some of these warnings would be issued while trying to expand
4070 the function inline, but that would cause multiple warnings
4071 about functions that would for example call alloca. But since
4072 this a property of the function, just one warning is enough.
4073 As a bonus we can now give more details about the reason why a
4074 function is not inlinable. */
4075 if (always_inline)
4076 error (inline_forbidden_reason, fn);
4077 else if (do_warning)
4078 warning (OPT_Winline, inline_forbidden_reason, fn);
4079
4080 inlinable = false;
4081 }
4082
4083 /* Squirrel away the result so that we don't have to check again. */
4084 DECL_UNINLINABLE (fn) = !inlinable;
4085
4086 return inlinable;
4087 }
4088
4089 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
4090 word size and take possible memcpy call into account and return
4091 cost based on whether optimizing for size or speed according to SPEED_P. */
4092
4093 int
4094 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4095 {
4096 HOST_WIDE_INT size;
4097
4098 gcc_assert (!VOID_TYPE_P (type));
4099
4100 if (TREE_CODE (type) == VECTOR_TYPE)
4101 {
4102 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4103 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4104 int orig_mode_size
4105 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4106 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4107 return ((orig_mode_size + simd_mode_size - 1)
4108 / simd_mode_size);
4109 }
4110
4111 size = int_size_in_bytes (type);
4112
4113 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4114 /* Cost of a memcpy call, 3 arguments and the call. */
4115 return 4;
4116 else
4117 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4118 }
4119
4120 /* Returns cost of operation CODE, according to WEIGHTS */
4121
4122 static int
4123 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4124 tree op1 ATTRIBUTE_UNUSED, tree op2)
4125 {
4126 switch (code)
4127 {
4128 /* These are "free" conversions, or their presumed cost
4129 is folded into other operations. */
4130 case RANGE_EXPR:
4131 CASE_CONVERT:
4132 case COMPLEX_EXPR:
4133 case PAREN_EXPR:
4134 case VIEW_CONVERT_EXPR:
4135 return 0;
4136
4137 /* Assign cost of 1 to usual operations.
4138 ??? We may consider mapping RTL costs to this. */
4139 case COND_EXPR:
4140 case VEC_COND_EXPR:
4141 case VEC_PERM_EXPR:
4142
4143 case PLUS_EXPR:
4144 case POINTER_PLUS_EXPR:
4145 case POINTER_DIFF_EXPR:
4146 case MINUS_EXPR:
4147 case MULT_EXPR:
4148 case MULT_HIGHPART_EXPR:
4149
4150 case ADDR_SPACE_CONVERT_EXPR:
4151 case FIXED_CONVERT_EXPR:
4152 case FIX_TRUNC_EXPR:
4153
4154 case NEGATE_EXPR:
4155 case FLOAT_EXPR:
4156 case MIN_EXPR:
4157 case MAX_EXPR:
4158 case ABS_EXPR:
4159 case ABSU_EXPR:
4160
4161 case LSHIFT_EXPR:
4162 case RSHIFT_EXPR:
4163 case LROTATE_EXPR:
4164 case RROTATE_EXPR:
4165
4166 case BIT_IOR_EXPR:
4167 case BIT_XOR_EXPR:
4168 case BIT_AND_EXPR:
4169 case BIT_NOT_EXPR:
4170
4171 case TRUTH_ANDIF_EXPR:
4172 case TRUTH_ORIF_EXPR:
4173 case TRUTH_AND_EXPR:
4174 case TRUTH_OR_EXPR:
4175 case TRUTH_XOR_EXPR:
4176 case TRUTH_NOT_EXPR:
4177
4178 case LT_EXPR:
4179 case LE_EXPR:
4180 case GT_EXPR:
4181 case GE_EXPR:
4182 case EQ_EXPR:
4183 case NE_EXPR:
4184 case ORDERED_EXPR:
4185 case UNORDERED_EXPR:
4186
4187 case UNLT_EXPR:
4188 case UNLE_EXPR:
4189 case UNGT_EXPR:
4190 case UNGE_EXPR:
4191 case UNEQ_EXPR:
4192 case LTGT_EXPR:
4193
4194 case CONJ_EXPR:
4195
4196 case PREDECREMENT_EXPR:
4197 case PREINCREMENT_EXPR:
4198 case POSTDECREMENT_EXPR:
4199 case POSTINCREMENT_EXPR:
4200
4201 case REALIGN_LOAD_EXPR:
4202
4203 case WIDEN_SUM_EXPR:
4204 case WIDEN_MULT_EXPR:
4205 case DOT_PROD_EXPR:
4206 case SAD_EXPR:
4207 case WIDEN_MULT_PLUS_EXPR:
4208 case WIDEN_MULT_MINUS_EXPR:
4209 case WIDEN_LSHIFT_EXPR:
4210
4211 case VEC_WIDEN_MULT_HI_EXPR:
4212 case VEC_WIDEN_MULT_LO_EXPR:
4213 case VEC_WIDEN_MULT_EVEN_EXPR:
4214 case VEC_WIDEN_MULT_ODD_EXPR:
4215 case VEC_UNPACK_HI_EXPR:
4216 case VEC_UNPACK_LO_EXPR:
4217 case VEC_UNPACK_FLOAT_HI_EXPR:
4218 case VEC_UNPACK_FLOAT_LO_EXPR:
4219 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4220 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4221 case VEC_PACK_TRUNC_EXPR:
4222 case VEC_PACK_SAT_EXPR:
4223 case VEC_PACK_FIX_TRUNC_EXPR:
4224 case VEC_PACK_FLOAT_EXPR:
4225 case VEC_WIDEN_LSHIFT_HI_EXPR:
4226 case VEC_WIDEN_LSHIFT_LO_EXPR:
4227 case VEC_DUPLICATE_EXPR:
4228 case VEC_SERIES_EXPR:
4229
4230 return 1;
4231
4232 /* Few special cases of expensive operations. This is useful
4233 to avoid inlining on functions having too many of these. */
4234 case TRUNC_DIV_EXPR:
4235 case CEIL_DIV_EXPR:
4236 case FLOOR_DIV_EXPR:
4237 case ROUND_DIV_EXPR:
4238 case EXACT_DIV_EXPR:
4239 case TRUNC_MOD_EXPR:
4240 case CEIL_MOD_EXPR:
4241 case FLOOR_MOD_EXPR:
4242 case ROUND_MOD_EXPR:
4243 case RDIV_EXPR:
4244 if (TREE_CODE (op2) != INTEGER_CST)
4245 return weights->div_mod_cost;
4246 return 1;
4247
4248 /* Bit-field insertion needs several shift and mask operations. */
4249 case BIT_INSERT_EXPR:
4250 return 3;
4251
4252 default:
4253 /* We expect a copy assignment with no operator. */
4254 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4255 return 0;
4256 }
4257 }
4258
4259
4260 /* Estimate number of instructions that will be created by expanding
4261 the statements in the statement sequence STMTS.
4262 WEIGHTS contains weights attributed to various constructs. */
4263
4264 int
4265 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4266 {
4267 int cost;
4268 gimple_stmt_iterator gsi;
4269
4270 cost = 0;
4271 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4272 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4273
4274 return cost;
4275 }
4276
4277
4278 /* Estimate number of instructions that will be created by expanding STMT.
4279 WEIGHTS contains weights attributed to various constructs. */
4280
4281 int
4282 estimate_num_insns (gimple *stmt, eni_weights *weights)
4283 {
4284 unsigned cost, i;
4285 enum gimple_code code = gimple_code (stmt);
4286 tree lhs;
4287 tree rhs;
4288
4289 switch (code)
4290 {
4291 case GIMPLE_ASSIGN:
4292 /* Try to estimate the cost of assignments. We have three cases to
4293 deal with:
4294 1) Simple assignments to registers;
4295 2) Stores to things that must live in memory. This includes
4296 "normal" stores to scalars, but also assignments of large
4297 structures, or constructors of big arrays;
4298
4299 Let us look at the first two cases, assuming we have "a = b + C":
4300 <GIMPLE_ASSIGN <var_decl "a">
4301 <plus_expr <var_decl "b"> <constant C>>
4302 If "a" is a GIMPLE register, the assignment to it is free on almost
4303 any target, because "a" usually ends up in a real register. Hence
4304 the only cost of this expression comes from the PLUS_EXPR, and we
4305 can ignore the GIMPLE_ASSIGN.
4306 If "a" is not a GIMPLE register, the assignment to "a" will most
4307 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4308 of moving something into "a", which we compute using the function
4309 estimate_move_cost. */
4310 if (gimple_clobber_p (stmt))
4311 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4312
4313 lhs = gimple_assign_lhs (stmt);
4314 rhs = gimple_assign_rhs1 (stmt);
4315
4316 cost = 0;
4317
4318 /* Account for the cost of moving to / from memory. */
4319 if (gimple_store_p (stmt))
4320 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4321 if (gimple_assign_load_p (stmt))
4322 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4323
4324 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4325 gimple_assign_rhs1 (stmt),
4326 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4327 == GIMPLE_BINARY_RHS
4328 ? gimple_assign_rhs2 (stmt) : NULL);
4329 break;
4330
4331 case GIMPLE_COND:
4332 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4333 gimple_op (stmt, 0),
4334 gimple_op (stmt, 1));
4335 break;
4336
4337 case GIMPLE_SWITCH:
4338 {
4339 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4340 /* Take into account cost of the switch + guess 2 conditional jumps for
4341 each case label.
4342
4343 TODO: once the switch expansion logic is sufficiently separated, we can
4344 do better job on estimating cost of the switch. */
4345 if (weights->time_based)
4346 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4347 else
4348 cost = gimple_switch_num_labels (switch_stmt) * 2;
4349 }
4350 break;
4351
4352 case GIMPLE_CALL:
4353 {
4354 tree decl;
4355
4356 if (gimple_call_internal_p (stmt))
4357 return 0;
4358 else if ((decl = gimple_call_fndecl (stmt))
4359 && fndecl_built_in_p (decl))
4360 {
4361 /* Do not special case builtins where we see the body.
4362 This just confuse inliner. */
4363 struct cgraph_node *node;
4364 if (!(node = cgraph_node::get (decl))
4365 || node->definition)
4366 ;
4367 /* For buitins that are likely expanded to nothing or
4368 inlined do not account operand costs. */
4369 else if (is_simple_builtin (decl))
4370 return 0;
4371 else if (is_inexpensive_builtin (decl))
4372 return weights->target_builtin_call_cost;
4373 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4374 {
4375 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4376 specialize the cheap expansion we do here.
4377 ??? This asks for a more general solution. */
4378 switch (DECL_FUNCTION_CODE (decl))
4379 {
4380 case BUILT_IN_POW:
4381 case BUILT_IN_POWF:
4382 case BUILT_IN_POWL:
4383 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4384 && (real_equal
4385 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4386 &dconst2)))
4387 return estimate_operator_cost
4388 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4389 gimple_call_arg (stmt, 0));
4390 break;
4391
4392 default:
4393 break;
4394 }
4395 }
4396 }
4397
4398 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4399 if (gimple_call_lhs (stmt))
4400 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4401 weights->time_based);
4402 for (i = 0; i < gimple_call_num_args (stmt); i++)
4403 {
4404 tree arg = gimple_call_arg (stmt, i);
4405 cost += estimate_move_cost (TREE_TYPE (arg),
4406 weights->time_based);
4407 }
4408 break;
4409 }
4410
4411 case GIMPLE_RETURN:
4412 return weights->return_cost;
4413
4414 case GIMPLE_GOTO:
4415 case GIMPLE_LABEL:
4416 case GIMPLE_NOP:
4417 case GIMPLE_PHI:
4418 case GIMPLE_PREDICT:
4419 case GIMPLE_DEBUG:
4420 return 0;
4421
4422 case GIMPLE_ASM:
4423 {
4424 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4425 /* 1000 means infinity. This avoids overflows later
4426 with very long asm statements. */
4427 if (count > 1000)
4428 count = 1000;
4429 /* If this asm is asm inline, count anything as minimum size. */
4430 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4431 count = MIN (1, count);
4432 return MAX (1, count);
4433 }
4434
4435 case GIMPLE_RESX:
4436 /* This is either going to be an external function call with one
4437 argument, or two register copy statements plus a goto. */
4438 return 2;
4439
4440 case GIMPLE_EH_DISPATCH:
4441 /* ??? This is going to turn into a switch statement. Ideally
4442 we'd have a look at the eh region and estimate the number of
4443 edges involved. */
4444 return 10;
4445
4446 case GIMPLE_BIND:
4447 return estimate_num_insns_seq (
4448 gimple_bind_body (as_a <gbind *> (stmt)),
4449 weights);
4450
4451 case GIMPLE_EH_FILTER:
4452 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4453
4454 case GIMPLE_CATCH:
4455 return estimate_num_insns_seq (gimple_catch_handler (
4456 as_a <gcatch *> (stmt)),
4457 weights);
4458
4459 case GIMPLE_TRY:
4460 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4461 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4462
4463 /* OMP directives are generally very expensive. */
4464
4465 case GIMPLE_OMP_RETURN:
4466 case GIMPLE_OMP_SECTIONS_SWITCH:
4467 case GIMPLE_OMP_ATOMIC_STORE:
4468 case GIMPLE_OMP_CONTINUE:
4469 /* ...except these, which are cheap. */
4470 return 0;
4471
4472 case GIMPLE_OMP_ATOMIC_LOAD:
4473 return weights->omp_cost;
4474
4475 case GIMPLE_OMP_FOR:
4476 return (weights->omp_cost
4477 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4478 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4479
4480 case GIMPLE_OMP_PARALLEL:
4481 case GIMPLE_OMP_TASK:
4482 case GIMPLE_OMP_CRITICAL:
4483 case GIMPLE_OMP_MASTER:
4484 case GIMPLE_OMP_TASKGROUP:
4485 case GIMPLE_OMP_ORDERED:
4486 case GIMPLE_OMP_SCAN:
4487 case GIMPLE_OMP_SECTION:
4488 case GIMPLE_OMP_SECTIONS:
4489 case GIMPLE_OMP_SINGLE:
4490 case GIMPLE_OMP_TARGET:
4491 case GIMPLE_OMP_TEAMS:
4492 return (weights->omp_cost
4493 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4494
4495 case GIMPLE_TRANSACTION:
4496 return (weights->tm_cost
4497 + estimate_num_insns_seq (gimple_transaction_body (
4498 as_a <gtransaction *> (stmt)),
4499 weights));
4500
4501 default:
4502 gcc_unreachable ();
4503 }
4504
4505 return cost;
4506 }
4507
4508 /* Estimate number of instructions that will be created by expanding
4509 function FNDECL. WEIGHTS contains weights attributed to various
4510 constructs. */
4511
4512 int
4513 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4514 {
4515 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4516 gimple_stmt_iterator bsi;
4517 basic_block bb;
4518 int n = 0;
4519
4520 gcc_assert (my_function && my_function->cfg);
4521 FOR_EACH_BB_FN (bb, my_function)
4522 {
4523 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4524 n += estimate_num_insns (gsi_stmt (bsi), weights);
4525 }
4526
4527 return n;
4528 }
4529
4530
4531 /* Initializes weights used by estimate_num_insns. */
4532
4533 void
4534 init_inline_once (void)
4535 {
4536 eni_size_weights.call_cost = 1;
4537 eni_size_weights.indirect_call_cost = 3;
4538 eni_size_weights.target_builtin_call_cost = 1;
4539 eni_size_weights.div_mod_cost = 1;
4540 eni_size_weights.omp_cost = 40;
4541 eni_size_weights.tm_cost = 10;
4542 eni_size_weights.time_based = false;
4543 eni_size_weights.return_cost = 1;
4544
4545 /* Estimating time for call is difficult, since we have no idea what the
4546 called function does. In the current uses of eni_time_weights,
4547 underestimating the cost does less harm than overestimating it, so
4548 we choose a rather small value here. */
4549 eni_time_weights.call_cost = 10;
4550 eni_time_weights.indirect_call_cost = 15;
4551 eni_time_weights.target_builtin_call_cost = 1;
4552 eni_time_weights.div_mod_cost = 10;
4553 eni_time_weights.omp_cost = 40;
4554 eni_time_weights.tm_cost = 40;
4555 eni_time_weights.time_based = true;
4556 eni_time_weights.return_cost = 2;
4557 }
4558
4559
4560 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4561
4562 static void
4563 prepend_lexical_block (tree current_block, tree new_block)
4564 {
4565 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4566 BLOCK_SUBBLOCKS (current_block) = new_block;
4567 BLOCK_SUPERCONTEXT (new_block) = current_block;
4568 }
4569
4570 /* Add local variables from CALLEE to CALLER. */
4571
4572 static inline void
4573 add_local_variables (struct function *callee, struct function *caller,
4574 copy_body_data *id)
4575 {
4576 tree var;
4577 unsigned ix;
4578
4579 FOR_EACH_LOCAL_DECL (callee, ix, var)
4580 if (!can_be_nonlocal (var, id))
4581 {
4582 tree new_var = remap_decl (var, id);
4583
4584 /* Remap debug-expressions. */
4585 if (VAR_P (new_var)
4586 && DECL_HAS_DEBUG_EXPR_P (var)
4587 && new_var != var)
4588 {
4589 tree tem = DECL_DEBUG_EXPR (var);
4590 bool old_regimplify = id->regimplify;
4591 id->remapping_type_depth++;
4592 walk_tree (&tem, copy_tree_body_r, id, NULL);
4593 id->remapping_type_depth--;
4594 id->regimplify = old_regimplify;
4595 SET_DECL_DEBUG_EXPR (new_var, tem);
4596 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4597 }
4598 add_local_decl (caller, new_var);
4599 }
4600 }
4601
4602 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4603 have brought in or introduced any debug stmts for SRCVAR. */
4604
4605 static inline void
4606 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4607 {
4608 tree *remappedvarp = id->decl_map->get (srcvar);
4609
4610 if (!remappedvarp)
4611 return;
4612
4613 if (!VAR_P (*remappedvarp))
4614 return;
4615
4616 if (*remappedvarp == id->retvar)
4617 return;
4618
4619 tree tvar = target_for_debug_bind (*remappedvarp);
4620 if (!tvar)
4621 return;
4622
4623 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4624 id->call_stmt);
4625 gimple_seq_add_stmt (bindings, stmt);
4626 }
4627
4628 /* For each inlined variable for which we may have debug bind stmts,
4629 add before GSI a final debug stmt resetting it, marking the end of
4630 its life, so that var-tracking knows it doesn't have to compute
4631 further locations for it. */
4632
4633 static inline void
4634 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4635 {
4636 tree var;
4637 unsigned ix;
4638 gimple_seq bindings = NULL;
4639
4640 if (!gimple_in_ssa_p (id->src_cfun))
4641 return;
4642
4643 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4644 return;
4645
4646 for (var = DECL_ARGUMENTS (id->src_fn);
4647 var; var = DECL_CHAIN (var))
4648 reset_debug_binding (id, var, &bindings);
4649
4650 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4651 reset_debug_binding (id, var, &bindings);
4652
4653 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4654 }
4655
4656 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4657
4658 static bool
4659 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4660 bitmap to_purge)
4661 {
4662 tree use_retvar;
4663 tree fn;
4664 hash_map<tree, tree> *dst;
4665 hash_map<tree, tree> *st = NULL;
4666 tree return_slot;
4667 tree modify_dest;
4668 struct cgraph_edge *cg_edge;
4669 cgraph_inline_failed_t reason;
4670 basic_block return_block;
4671 edge e;
4672 gimple_stmt_iterator gsi, stmt_gsi;
4673 bool successfully_inlined = false;
4674 bool purge_dead_abnormal_edges;
4675 gcall *call_stmt;
4676 unsigned int prop_mask, src_properties;
4677 struct function *dst_cfun;
4678 tree simduid;
4679 use_operand_p use;
4680 gimple *simtenter_stmt = NULL;
4681 vec<tree> *simtvars_save;
4682
4683 /* The gimplifier uses input_location in too many places, such as
4684 internal_get_tmp_var (). */
4685 location_t saved_location = input_location;
4686 input_location = gimple_location (stmt);
4687
4688 /* From here on, we're only interested in CALL_EXPRs. */
4689 call_stmt = dyn_cast <gcall *> (stmt);
4690 if (!call_stmt)
4691 goto egress;
4692
4693 cg_edge = id->dst_node->get_edge (stmt);
4694 gcc_checking_assert (cg_edge);
4695 /* First, see if we can figure out what function is being called.
4696 If we cannot, then there is no hope of inlining the function. */
4697 if (cg_edge->indirect_unknown_callee)
4698 goto egress;
4699 fn = cg_edge->callee->decl;
4700 gcc_checking_assert (fn);
4701
4702 /* If FN is a declaration of a function in a nested scope that was
4703 globally declared inline, we don't set its DECL_INITIAL.
4704 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4705 C++ front-end uses it for cdtors to refer to their internal
4706 declarations, that are not real functions. Fortunately those
4707 don't have trees to be saved, so we can tell by checking their
4708 gimple_body. */
4709 if (!DECL_INITIAL (fn)
4710 && DECL_ABSTRACT_ORIGIN (fn)
4711 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4712 fn = DECL_ABSTRACT_ORIGIN (fn);
4713
4714 /* Don't try to inline functions that are not well-suited to inlining. */
4715 if (cg_edge->inline_failed)
4716 {
4717 reason = cg_edge->inline_failed;
4718 /* If this call was originally indirect, we do not want to emit any
4719 inlining related warnings or sorry messages because there are no
4720 guarantees regarding those. */
4721 if (cg_edge->indirect_inlining_edge)
4722 goto egress;
4723
4724 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4725 /* For extern inline functions that get redefined we always
4726 silently ignored always_inline flag. Better behavior would
4727 be to be able to keep both bodies and use extern inline body
4728 for inlining, but we can't do that because frontends overwrite
4729 the body. */
4730 && !cg_edge->callee->redefined_extern_inline
4731 /* During early inline pass, report only when optimization is
4732 not turned on. */
4733 && (symtab->global_info_ready
4734 || !optimize
4735 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4736 /* PR 20090218-1_0.c. Body can be provided by another module. */
4737 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4738 {
4739 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4740 cgraph_inline_failed_string (reason));
4741 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4742 inform (gimple_location (stmt), "called from here");
4743 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4744 inform (DECL_SOURCE_LOCATION (cfun->decl),
4745 "called from this function");
4746 }
4747 else if (opt_for_fn (fn, warn_inline)
4748 && DECL_DECLARED_INLINE_P (fn)
4749 && !DECL_NO_INLINE_WARNING_P (fn)
4750 && !DECL_IN_SYSTEM_HEADER (fn)
4751 && reason != CIF_UNSPECIFIED
4752 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4753 /* Do not warn about not inlined recursive calls. */
4754 && !cg_edge->recursive_p ()
4755 /* Avoid warnings during early inline pass. */
4756 && symtab->global_info_ready)
4757 {
4758 auto_diagnostic_group d;
4759 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4760 fn, _(cgraph_inline_failed_string (reason))))
4761 {
4762 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4763 inform (gimple_location (stmt), "called from here");
4764 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4765 inform (DECL_SOURCE_LOCATION (cfun->decl),
4766 "called from this function");
4767 }
4768 }
4769 goto egress;
4770 }
4771 id->src_node = cg_edge->callee;
4772
4773 /* If callee is thunk, all we need is to adjust the THIS pointer
4774 and redirect to function being thunked. */
4775 if (id->src_node->thunk.thunk_p)
4776 {
4777 cgraph_edge *edge;
4778 tree virtual_offset = NULL;
4779 profile_count count = cg_edge->count;
4780 tree op;
4781 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4782
4783 cgraph_edge::remove (cg_edge);
4784 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4785 gimple_uid (stmt),
4786 profile_count::one (),
4787 profile_count::one (),
4788 true);
4789 edge->count = count;
4790 if (id->src_node->thunk.virtual_offset_p)
4791 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4792 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4793 NULL);
4794 gsi_insert_before (&iter, gimple_build_assign (op,
4795 gimple_call_arg (stmt, 0)),
4796 GSI_NEW_STMT);
4797 gcc_assert (id->src_node->thunk.this_adjusting);
4798 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4799 virtual_offset, id->src_node->thunk.indirect_offset);
4800
4801 gimple_call_set_arg (stmt, 0, op);
4802 gimple_call_set_fndecl (stmt, edge->callee->decl);
4803 update_stmt (stmt);
4804 id->src_node->remove ();
4805 expand_call_inline (bb, stmt, id, to_purge);
4806 maybe_remove_unused_call_args (cfun, stmt);
4807 return true;
4808 }
4809 fn = cg_edge->callee->decl;
4810 cg_edge->callee->get_untransformed_body ();
4811
4812 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4813 cg_edge->callee->verify ();
4814
4815 /* We will be inlining this callee. */
4816 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4817
4818 /* Update the callers EH personality. */
4819 if (DECL_FUNCTION_PERSONALITY (fn))
4820 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4821 = DECL_FUNCTION_PERSONALITY (fn);
4822
4823 /* Split the block before the GIMPLE_CALL. */
4824 stmt_gsi = gsi_for_stmt (stmt);
4825 gsi_prev (&stmt_gsi);
4826 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4827 bb = e->src;
4828 return_block = e->dest;
4829 remove_edge (e);
4830
4831 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4832 been the source of abnormal edges. In this case, schedule
4833 the removal of dead abnormal edges. */
4834 gsi = gsi_start_bb (return_block);
4835 gsi_next (&gsi);
4836 purge_dead_abnormal_edges = gsi_end_p (gsi);
4837
4838 stmt_gsi = gsi_start_bb (return_block);
4839
4840 /* Build a block containing code to initialize the arguments, the
4841 actual inline expansion of the body, and a label for the return
4842 statements within the function to jump to. The type of the
4843 statement expression is the return type of the function call.
4844 ??? If the call does not have an associated block then we will
4845 remap all callee blocks to NULL, effectively dropping most of
4846 its debug information. This should only happen for calls to
4847 artificial decls inserted by the compiler itself. We need to
4848 either link the inlined blocks into the caller block tree or
4849 not refer to them in any way to not break GC for locations. */
4850 if (tree block = gimple_block (stmt))
4851 {
4852 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4853 to make inlined_function_outer_scope_p return true on this BLOCK. */
4854 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4855 if (loc == UNKNOWN_LOCATION)
4856 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4857 if (loc == UNKNOWN_LOCATION)
4858 loc = BUILTINS_LOCATION;
4859 id->block = make_node (BLOCK);
4860 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4861 BLOCK_SOURCE_LOCATION (id->block) = loc;
4862 prepend_lexical_block (block, id->block);
4863 }
4864
4865 /* Local declarations will be replaced by their equivalents in this map. */
4866 st = id->decl_map;
4867 id->decl_map = new hash_map<tree, tree>;
4868 dst = id->debug_map;
4869 id->debug_map = NULL;
4870 if (flag_stack_reuse != SR_NONE)
4871 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4872
4873 /* Record the function we are about to inline. */
4874 id->src_fn = fn;
4875 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4876 id->reset_location = DECL_IGNORED_P (fn);
4877 id->call_stmt = call_stmt;
4878
4879 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4880 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4881 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4882 simtvars_save = id->dst_simt_vars;
4883 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4884 && (simduid = bb->loop_father->simduid) != NULL_TREE
4885 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4886 && single_imm_use (simduid, &use, &simtenter_stmt)
4887 && is_gimple_call (simtenter_stmt)
4888 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4889 vec_alloc (id->dst_simt_vars, 0);
4890 else
4891 id->dst_simt_vars = NULL;
4892
4893 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4894 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4895
4896 /* If the src function contains an IFN_VA_ARG, then so will the dst
4897 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4898 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4899 src_properties = id->src_cfun->curr_properties & prop_mask;
4900 if (src_properties != prop_mask)
4901 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4902 dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
4903 id->dst_node->calls_declare_variant_alt
4904 |= id->src_node->calls_declare_variant_alt;
4905
4906 gcc_assert (!id->src_cfun->after_inlining);
4907
4908 id->entry_bb = bb;
4909 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4910 {
4911 gimple_stmt_iterator si = gsi_last_bb (bb);
4912 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4913 NOT_TAKEN),
4914 GSI_NEW_STMT);
4915 }
4916 initialize_inlined_parameters (id, stmt, fn, bb);
4917 if (debug_nonbind_markers_p && debug_inline_points && id->block
4918 && inlined_function_outer_scope_p (id->block))
4919 {
4920 gimple_stmt_iterator si = gsi_last_bb (bb);
4921 gsi_insert_after (&si, gimple_build_debug_inline_entry
4922 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4923 GSI_NEW_STMT);
4924 }
4925
4926 if (DECL_INITIAL (fn))
4927 {
4928 if (gimple_block (stmt))
4929 {
4930 tree *var;
4931
4932 prepend_lexical_block (id->block,
4933 remap_blocks (DECL_INITIAL (fn), id));
4934 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4935 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4936 == NULL_TREE));
4937 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4938 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4939 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4940 under it. The parameters can be then evaluated in the debugger,
4941 but don't show in backtraces. */
4942 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4943 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4944 {
4945 tree v = *var;
4946 *var = TREE_CHAIN (v);
4947 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4948 BLOCK_VARS (id->block) = v;
4949 }
4950 else
4951 var = &TREE_CHAIN (*var);
4952 }
4953 else
4954 remap_blocks_to_null (DECL_INITIAL (fn), id);
4955 }
4956
4957 /* Return statements in the function body will be replaced by jumps
4958 to the RET_LABEL. */
4959 gcc_assert (DECL_INITIAL (fn));
4960 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4961
4962 /* Find the LHS to which the result of this call is assigned. */
4963 return_slot = NULL;
4964 if (gimple_call_lhs (stmt))
4965 {
4966 modify_dest = gimple_call_lhs (stmt);
4967
4968 /* The function which we are inlining might not return a value,
4969 in which case we should issue a warning that the function
4970 does not return a value. In that case the optimizers will
4971 see that the variable to which the value is assigned was not
4972 initialized. We do not want to issue a warning about that
4973 uninitialized variable. */
4974 if (DECL_P (modify_dest))
4975 TREE_NO_WARNING (modify_dest) = 1;
4976
4977 if (gimple_call_return_slot_opt_p (call_stmt))
4978 {
4979 return_slot = modify_dest;
4980 modify_dest = NULL;
4981 }
4982 }
4983 else
4984 modify_dest = NULL;
4985
4986 /* If we are inlining a call to the C++ operator new, we don't want
4987 to use type based alias analysis on the return value. Otherwise
4988 we may get confused if the compiler sees that the inlined new
4989 function returns a pointer which was just deleted. See bug
4990 33407. */
4991 if (DECL_IS_OPERATOR_NEW_P (fn))
4992 {
4993 return_slot = NULL;
4994 modify_dest = NULL;
4995 }
4996
4997 /* Declare the return variable for the function. */
4998 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4999
5000 /* Add local vars in this inlined callee to caller. */
5001 add_local_variables (id->src_cfun, cfun, id);
5002
5003 if (id->src_node->clone.performed_splits)
5004 {
5005 /* Any calls from the inlined function will be turned into calls from the
5006 function we inline into. We must preserve notes about how to split
5007 parameters such calls should be redirected/updated. */
5008 unsigned len = vec_safe_length (id->src_node->clone.performed_splits);
5009 for (unsigned i = 0; i < len; i++)
5010 {
5011 ipa_param_performed_split ps
5012 = (*id->src_node->clone.performed_splits)[i];
5013 ps.dummy_decl = remap_decl (ps.dummy_decl, id);
5014 vec_safe_push (id->dst_node->clone.performed_splits, ps);
5015 }
5016
5017 if (flag_checking)
5018 {
5019 len = vec_safe_length (id->dst_node->clone.performed_splits);
5020 for (unsigned i = 0; i < len; i++)
5021 {
5022 ipa_param_performed_split *ps1
5023 = &(*id->dst_node->clone.performed_splits)[i];
5024 for (unsigned j = i + 1; j < len; j++)
5025 {
5026 ipa_param_performed_split *ps2
5027 = &(*id->dst_node->clone.performed_splits)[j];
5028 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
5029 || ps1->unit_offset != ps2->unit_offset);
5030 }
5031 }
5032 }
5033 }
5034
5035 if (dump_enabled_p ())
5036 {
5037 char buf[128];
5038 snprintf (buf, sizeof(buf), "%4.2f",
5039 cg_edge->sreal_frequency ().to_double ());
5040 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5041 call_stmt,
5042 "Inlining %C to %C with frequency %s\n",
5043 id->src_node, id->dst_node, buf);
5044 if (dump_file && (dump_flags & TDF_DETAILS))
5045 {
5046 id->src_node->dump (dump_file);
5047 id->dst_node->dump (dump_file);
5048 }
5049 }
5050
5051 /* This is it. Duplicate the callee body. Assume callee is
5052 pre-gimplified. Note that we must not alter the caller
5053 function in any way before this point, as this CALL_EXPR may be
5054 a self-referential call; if we're calling ourselves, we need to
5055 duplicate our body before altering anything. */
5056 copy_body (id, bb, return_block, NULL);
5057
5058 reset_debug_bindings (id, stmt_gsi);
5059
5060 if (flag_stack_reuse != SR_NONE)
5061 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5062 if (!TREE_THIS_VOLATILE (p))
5063 {
5064 tree *varp = id->decl_map->get (p);
5065 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
5066 {
5067 tree clobber = build_clobber (TREE_TYPE (*varp));
5068 gimple *clobber_stmt;
5069 clobber_stmt = gimple_build_assign (*varp, clobber);
5070 gimple_set_location (clobber_stmt, gimple_location (stmt));
5071 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5072 }
5073 }
5074
5075 /* Reset the escaped solution. */
5076 if (cfun->gimple_df)
5077 pt_solution_reset (&cfun->gimple_df->escaped);
5078
5079 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
5080 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5081 {
5082 size_t nargs = gimple_call_num_args (simtenter_stmt);
5083 vec<tree> *vars = id->dst_simt_vars;
5084 auto_vec<tree> newargs (nargs + vars->length ());
5085 for (size_t i = 0; i < nargs; i++)
5086 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5087 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5088 {
5089 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5090 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5091 }
5092 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5093 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5094 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5095 gsi_replace (&gsi, g, false);
5096 }
5097 vec_free (id->dst_simt_vars);
5098 id->dst_simt_vars = simtvars_save;
5099
5100 /* Clean up. */
5101 if (id->debug_map)
5102 {
5103 delete id->debug_map;
5104 id->debug_map = dst;
5105 }
5106 delete id->decl_map;
5107 id->decl_map = st;
5108
5109 /* Unlink the calls virtual operands before replacing it. */
5110 unlink_stmt_vdef (stmt);
5111 if (gimple_vdef (stmt)
5112 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5113 release_ssa_name (gimple_vdef (stmt));
5114
5115 /* If the inlined function returns a result that we care about,
5116 substitute the GIMPLE_CALL with an assignment of the return
5117 variable to the LHS of the call. That is, if STMT was
5118 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
5119 if (use_retvar && gimple_call_lhs (stmt))
5120 {
5121 gimple *old_stmt = stmt;
5122 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
5123 gimple_set_location (stmt, gimple_location (old_stmt));
5124 gsi_replace (&stmt_gsi, stmt, false);
5125 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5126 /* Append a clobber for id->retvar if easily possible. */
5127 if (flag_stack_reuse != SR_NONE
5128 && id->retvar
5129 && VAR_P (id->retvar)
5130 && id->retvar != return_slot
5131 && id->retvar != modify_dest
5132 && !TREE_THIS_VOLATILE (id->retvar)
5133 && !is_gimple_reg (id->retvar)
5134 && !stmt_ends_bb_p (stmt))
5135 {
5136 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5137 gimple *clobber_stmt;
5138 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5139 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5140 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5141 }
5142 }
5143 else
5144 {
5145 /* Handle the case of inlining a function with no return
5146 statement, which causes the return value to become undefined. */
5147 if (gimple_call_lhs (stmt)
5148 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5149 {
5150 tree name = gimple_call_lhs (stmt);
5151 tree var = SSA_NAME_VAR (name);
5152 tree def = var ? ssa_default_def (cfun, var) : NULL;
5153
5154 if (def)
5155 {
5156 /* If the variable is used undefined, make this name
5157 undefined via a move. */
5158 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5159 gsi_replace (&stmt_gsi, stmt, true);
5160 }
5161 else
5162 {
5163 if (!var)
5164 {
5165 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5166 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5167 }
5168 /* Otherwise make this variable undefined. */
5169 gsi_remove (&stmt_gsi, true);
5170 set_ssa_default_def (cfun, var, name);
5171 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5172 }
5173 }
5174 /* Replace with a clobber for id->retvar. */
5175 else if (flag_stack_reuse != SR_NONE
5176 && id->retvar
5177 && VAR_P (id->retvar)
5178 && id->retvar != return_slot
5179 && id->retvar != modify_dest
5180 && !TREE_THIS_VOLATILE (id->retvar)
5181 && !is_gimple_reg (id->retvar))
5182 {
5183 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5184 gimple *clobber_stmt;
5185 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5186 gimple_set_location (clobber_stmt, gimple_location (stmt));
5187 gsi_replace (&stmt_gsi, clobber_stmt, false);
5188 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5189 }
5190 else
5191 gsi_remove (&stmt_gsi, true);
5192 }
5193
5194 if (purge_dead_abnormal_edges)
5195 bitmap_set_bit (to_purge, return_block->index);
5196
5197 /* If the value of the new expression is ignored, that's OK. We
5198 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5199 the equivalent inlined version either. */
5200 if (is_gimple_assign (stmt))
5201 {
5202 gcc_assert (gimple_assign_single_p (stmt)
5203 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5204 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5205 }
5206
5207 id->add_clobbers_to_eh_landing_pads = 0;
5208
5209 /* Output the inlining info for this abstract function, since it has been
5210 inlined. If we don't do this now, we can lose the information about the
5211 variables in the function when the blocks get blown away as soon as we
5212 remove the cgraph node. */
5213 if (gimple_block (stmt))
5214 (*debug_hooks->outlining_inline_function) (fn);
5215
5216 /* Update callgraph if needed. */
5217 cg_edge->callee->remove ();
5218
5219 id->block = NULL_TREE;
5220 id->retvar = NULL_TREE;
5221 successfully_inlined = true;
5222
5223 egress:
5224 input_location = saved_location;
5225 return successfully_inlined;
5226 }
5227
5228 /* Expand call statements reachable from STMT_P.
5229 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5230 in a MODIFY_EXPR. */
5231
5232 static bool
5233 gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5234 bitmap to_purge)
5235 {
5236 gimple_stmt_iterator gsi;
5237 bool inlined = false;
5238
5239 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5240 {
5241 gimple *stmt = gsi_stmt (gsi);
5242 gsi_prev (&gsi);
5243
5244 if (is_gimple_call (stmt)
5245 && !gimple_call_internal_p (stmt))
5246 inlined |= expand_call_inline (bb, stmt, id, to_purge);
5247 }
5248
5249 return inlined;
5250 }
5251
5252
5253 /* Walk all basic blocks created after FIRST and try to fold every statement
5254 in the STATEMENTS pointer set. */
5255
5256 static void
5257 fold_marked_statements (int first, hash_set<gimple *> *statements)
5258 {
5259 auto_bitmap to_purge;
5260
5261 auto_vec<edge, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
5262 auto_sbitmap visited (last_basic_block_for_fn (cfun));
5263 bitmap_clear (visited);
5264
5265 stack.quick_push (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5266 while (!stack.is_empty ())
5267 {
5268 /* Look at the edge on the top of the stack. */
5269 edge e = stack.pop ();
5270 basic_block dest = e->dest;
5271
5272 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
5273 || bitmap_bit_p (visited, dest->index))
5274 continue;
5275
5276 bitmap_set_bit (visited, dest->index);
5277
5278 if (dest->index >= first)
5279 for (gimple_stmt_iterator gsi = gsi_start_bb (dest);
5280 !gsi_end_p (gsi); gsi_next (&gsi))
5281 {
5282 if (!statements->contains (gsi_stmt (gsi)))
5283 continue;
5284
5285 gimple *old_stmt = gsi_stmt (gsi);
5286 tree old_decl = (is_gimple_call (old_stmt)
5287 ? gimple_call_fndecl (old_stmt) : 0);
5288 if (old_decl && fndecl_built_in_p (old_decl))
5289 {
5290 /* Folding builtins can create multiple instructions,
5291 we need to look at all of them. */
5292 gimple_stmt_iterator i2 = gsi;
5293 gsi_prev (&i2);
5294 if (fold_stmt (&gsi))
5295 {
5296 gimple *new_stmt;
5297 /* If a builtin at the end of a bb folded into nothing,
5298 the following loop won't work. */
5299 if (gsi_end_p (gsi))
5300 {
5301 cgraph_update_edges_for_call_stmt (old_stmt,
5302 old_decl, NULL);
5303 break;
5304 }
5305 if (gsi_end_p (i2))
5306 i2 = gsi_start_bb (dest);
5307 else
5308 gsi_next (&i2);
5309 while (1)
5310 {
5311 new_stmt = gsi_stmt (i2);
5312 update_stmt (new_stmt);
5313 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5314 new_stmt);
5315
5316 if (new_stmt == gsi_stmt (gsi))
5317 {
5318 /* It is okay to check only for the very last
5319 of these statements. If it is a throwing
5320 statement nothing will change. If it isn't
5321 this can remove EH edges. If that weren't
5322 correct then because some intermediate stmts
5323 throw, but not the last one. That would mean
5324 we'd have to split the block, which we can't
5325 here and we'd loose anyway. And as builtins
5326 probably never throw, this all
5327 is mood anyway. */
5328 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5329 new_stmt))
5330 bitmap_set_bit (to_purge, dest->index);
5331 break;
5332 }
5333 gsi_next (&i2);
5334 }
5335 }
5336 }
5337 else if (fold_stmt (&gsi))
5338 {
5339 /* Re-read the statement from GSI as fold_stmt() may
5340 have changed it. */
5341 gimple *new_stmt = gsi_stmt (gsi);
5342 update_stmt (new_stmt);
5343
5344 if (is_gimple_call (old_stmt)
5345 || is_gimple_call (new_stmt))
5346 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5347 new_stmt);
5348
5349 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5350 bitmap_set_bit (to_purge, dest->index);
5351 }
5352 }
5353
5354 if (EDGE_COUNT (dest->succs) > 0)
5355 {
5356 /* Avoid warnings emitted from folding statements that
5357 became unreachable because of inlined function parameter
5358 propagation. */
5359 e = find_taken_edge (dest, NULL_TREE);
5360 if (e)
5361 stack.quick_push (e);
5362 else
5363 {
5364 edge_iterator ei;
5365 FOR_EACH_EDGE (e, ei, dest->succs)
5366 stack.safe_push (e);
5367 }
5368 }
5369 }
5370
5371 gimple_purge_all_dead_eh_edges (to_purge);
5372 }
5373
5374 /* Expand calls to inline functions in the body of FN. */
5375
5376 unsigned int
5377 optimize_inline_calls (tree fn)
5378 {
5379 copy_body_data id;
5380 basic_block bb;
5381 int last = n_basic_blocks_for_fn (cfun);
5382 bool inlined_p = false;
5383
5384 /* Clear out ID. */
5385 memset (&id, 0, sizeof (id));
5386
5387 id.src_node = id.dst_node = cgraph_node::get (fn);
5388 gcc_assert (id.dst_node->definition);
5389 id.dst_fn = fn;
5390 /* Or any functions that aren't finished yet. */
5391 if (current_function_decl)
5392 id.dst_fn = current_function_decl;
5393
5394 id.copy_decl = copy_decl_maybe_to_var;
5395 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5396 id.transform_new_cfg = false;
5397 id.transform_return_to_modify = true;
5398 id.transform_parameter = true;
5399 id.transform_lang_insert_block = NULL;
5400 id.statements_to_fold = new hash_set<gimple *>;
5401
5402 push_gimplify_context ();
5403
5404 /* We make no attempts to keep dominance info up-to-date. */
5405 free_dominance_info (CDI_DOMINATORS);
5406 free_dominance_info (CDI_POST_DOMINATORS);
5407
5408 /* Register specific gimple functions. */
5409 gimple_register_cfg_hooks ();
5410
5411 /* Reach the trees by walking over the CFG, and note the
5412 enclosing basic-blocks in the call edges. */
5413 /* We walk the blocks going forward, because inlined function bodies
5414 will split id->current_basic_block, and the new blocks will
5415 follow it; we'll trudge through them, processing their CALL_EXPRs
5416 along the way. */
5417 auto_bitmap to_purge;
5418 FOR_EACH_BB_FN (bb, cfun)
5419 inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5420
5421 pop_gimplify_context (NULL);
5422
5423 if (flag_checking)
5424 {
5425 struct cgraph_edge *e;
5426
5427 id.dst_node->verify ();
5428
5429 /* Double check that we inlined everything we are supposed to inline. */
5430 for (e = id.dst_node->callees; e; e = e->next_callee)
5431 gcc_assert (e->inline_failed);
5432 }
5433
5434 /* If we didn't inline into the function there is nothing to do. */
5435 if (!inlined_p)
5436 {
5437 delete id.statements_to_fold;
5438 return 0;
5439 }
5440
5441 /* Fold queued statements. */
5442 update_max_bb_count ();
5443 fold_marked_statements (last, id.statements_to_fold);
5444 delete id.statements_to_fold;
5445
5446 /* Finally purge EH and abnormal edges from the call stmts we inlined.
5447 We need to do this after fold_marked_statements since that may walk
5448 the SSA use-def chain. */
5449 unsigned i;
5450 bitmap_iterator bi;
5451 EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5452 {
5453 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5454 if (bb)
5455 {
5456 gimple_purge_dead_eh_edges (bb);
5457 gimple_purge_dead_abnormal_call_edges (bb);
5458 }
5459 }
5460
5461 gcc_assert (!id.debug_stmts.exists ());
5462
5463 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5464 number_blocks (fn);
5465
5466 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5467 id.dst_node->calls_comdat_local = id.dst_node->check_calls_comdat_local_p ();
5468
5469 if (flag_checking)
5470 id.dst_node->verify ();
5471
5472 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5473 not possible yet - the IPA passes might make various functions to not
5474 throw and they don't care to proactively update local EH info. This is
5475 done later in fixup_cfg pass that also execute the verification. */
5476 return (TODO_update_ssa
5477 | TODO_cleanup_cfg
5478 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5479 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5480 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5481 ? TODO_rebuild_frequencies : 0));
5482 }
5483
5484 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5485
5486 tree
5487 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5488 {
5489 enum tree_code code = TREE_CODE (*tp);
5490 enum tree_code_class cl = TREE_CODE_CLASS (code);
5491
5492 /* We make copies of most nodes. */
5493 if (IS_EXPR_CODE_CLASS (cl)
5494 || code == TREE_LIST
5495 || code == TREE_VEC
5496 || code == TYPE_DECL
5497 || code == OMP_CLAUSE)
5498 {
5499 /* Because the chain gets clobbered when we make a copy, we save it
5500 here. */
5501 tree chain = NULL_TREE, new_tree;
5502
5503 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5504 chain = TREE_CHAIN (*tp);
5505
5506 /* Copy the node. */
5507 new_tree = copy_node (*tp);
5508
5509 *tp = new_tree;
5510
5511 /* Now, restore the chain, if appropriate. That will cause
5512 walk_tree to walk into the chain as well. */
5513 if (code == PARM_DECL
5514 || code == TREE_LIST
5515 || code == OMP_CLAUSE)
5516 TREE_CHAIN (*tp) = chain;
5517
5518 /* For now, we don't update BLOCKs when we make copies. So, we
5519 have to nullify all BIND_EXPRs. */
5520 if (TREE_CODE (*tp) == BIND_EXPR)
5521 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5522 }
5523 else if (code == CONSTRUCTOR)
5524 {
5525 /* CONSTRUCTOR nodes need special handling because
5526 we need to duplicate the vector of elements. */
5527 tree new_tree;
5528
5529 new_tree = copy_node (*tp);
5530 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5531 *tp = new_tree;
5532 }
5533 else if (code == STATEMENT_LIST)
5534 /* We used to just abort on STATEMENT_LIST, but we can run into them
5535 with statement-expressions (c++/40975). */
5536 copy_statement_list (tp);
5537 else if (TREE_CODE_CLASS (code) == tcc_type)
5538 *walk_subtrees = 0;
5539 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5540 *walk_subtrees = 0;
5541 else if (TREE_CODE_CLASS (code) == tcc_constant)
5542 *walk_subtrees = 0;
5543 return NULL_TREE;
5544 }
5545
5546 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5547 information indicating to what new SAVE_EXPR this one should be mapped,
5548 use that one. Otherwise, create a new node and enter it in ST. FN is
5549 the function into which the copy will be placed. */
5550
5551 static void
5552 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5553 {
5554 tree *n;
5555 tree t;
5556
5557 /* See if we already encountered this SAVE_EXPR. */
5558 n = st->get (*tp);
5559
5560 /* If we didn't already remap this SAVE_EXPR, do so now. */
5561 if (!n)
5562 {
5563 t = copy_node (*tp);
5564
5565 /* Remember this SAVE_EXPR. */
5566 st->put (*tp, t);
5567 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5568 st->put (t, t);
5569 }
5570 else
5571 {
5572 /* We've already walked into this SAVE_EXPR; don't do it again. */
5573 *walk_subtrees = 0;
5574 t = *n;
5575 }
5576
5577 /* Replace this SAVE_EXPR with the copy. */
5578 *tp = t;
5579 }
5580
5581 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5582 label, copies the declaration and enters it in the splay_tree in DATA (which
5583 is really a 'copy_body_data *'. */
5584
5585 static tree
5586 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5587 bool *handled_ops_p ATTRIBUTE_UNUSED,
5588 struct walk_stmt_info *wi)
5589 {
5590 copy_body_data *id = (copy_body_data *) wi->info;
5591 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5592
5593 if (stmt)
5594 {
5595 tree decl = gimple_label_label (stmt);
5596
5597 /* Copy the decl and remember the copy. */
5598 insert_decl_map (id, decl, id->copy_decl (decl, id));
5599 }
5600
5601 return NULL_TREE;
5602 }
5603
5604 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5605 struct walk_stmt_info *wi);
5606
5607 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5608 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5609 remaps all local declarations to appropriate replacements in gimple
5610 operands. */
5611
5612 static tree
5613 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5614 {
5615 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5616 copy_body_data *id = (copy_body_data *) wi->info;
5617 hash_map<tree, tree> *st = id->decl_map;
5618 tree *n;
5619 tree expr = *tp;
5620
5621 /* For recursive invocations this is no longer the LHS itself. */
5622 bool is_lhs = wi->is_lhs;
5623 wi->is_lhs = false;
5624
5625 if (TREE_CODE (expr) == SSA_NAME)
5626 {
5627 *tp = remap_ssa_name (*tp, id);
5628 *walk_subtrees = 0;
5629 if (is_lhs)
5630 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5631 }
5632 /* Only a local declaration (variable or label). */
5633 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5634 || TREE_CODE (expr) == LABEL_DECL)
5635 {
5636 /* Lookup the declaration. */
5637 n = st->get (expr);
5638
5639 /* If it's there, remap it. */
5640 if (n)
5641 *tp = *n;
5642 *walk_subtrees = 0;
5643 }
5644 else if (TREE_CODE (expr) == STATEMENT_LIST
5645 || TREE_CODE (expr) == BIND_EXPR
5646 || TREE_CODE (expr) == SAVE_EXPR)
5647 gcc_unreachable ();
5648 else if (TREE_CODE (expr) == TARGET_EXPR)
5649 {
5650 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5651 It's OK for this to happen if it was part of a subtree that
5652 isn't immediately expanded, such as operand 2 of another
5653 TARGET_EXPR. */
5654 if (!TREE_OPERAND (expr, 1))
5655 {
5656 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5657 TREE_OPERAND (expr, 3) = NULL_TREE;
5658 }
5659 }
5660 else if (TREE_CODE (expr) == OMP_CLAUSE)
5661 {
5662 /* Before the omplower pass completes, some OMP clauses can contain
5663 sequences that are neither copied by gimple_seq_copy nor walked by
5664 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5665 in those situations, we have to copy and process them explicitely. */
5666
5667 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5668 {
5669 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5670 seq = duplicate_remap_omp_clause_seq (seq, wi);
5671 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5672 }
5673 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5674 {
5675 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5676 seq = duplicate_remap_omp_clause_seq (seq, wi);
5677 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5678 }
5679 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5680 {
5681 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5682 seq = duplicate_remap_omp_clause_seq (seq, wi);
5683 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5684 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5685 seq = duplicate_remap_omp_clause_seq (seq, wi);
5686 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5687 }
5688 }
5689
5690 /* Keep iterating. */
5691 return NULL_TREE;
5692 }
5693
5694
5695 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5696 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5697 remaps all local declarations to appropriate replacements in gimple
5698 statements. */
5699
5700 static tree
5701 replace_locals_stmt (gimple_stmt_iterator *gsip,
5702 bool *handled_ops_p ATTRIBUTE_UNUSED,
5703 struct walk_stmt_info *wi)
5704 {
5705 copy_body_data *id = (copy_body_data *) wi->info;
5706 gimple *gs = gsi_stmt (*gsip);
5707
5708 if (gbind *stmt = dyn_cast <gbind *> (gs))
5709 {
5710 tree block = gimple_bind_block (stmt);
5711
5712 if (block)
5713 {
5714 remap_block (&block, id);
5715 gimple_bind_set_block (stmt, block);
5716 }
5717
5718 /* This will remap a lot of the same decls again, but this should be
5719 harmless. */
5720 if (gimple_bind_vars (stmt))
5721 {
5722 tree old_var, decls = gimple_bind_vars (stmt);
5723
5724 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5725 if (!can_be_nonlocal (old_var, id)
5726 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5727 remap_decl (old_var, id);
5728
5729 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5730 id->prevent_decl_creation_for_types = true;
5731 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5732 id->prevent_decl_creation_for_types = false;
5733 }
5734 }
5735
5736 /* Keep iterating. */
5737 return NULL_TREE;
5738 }
5739
5740 /* Create a copy of SEQ and remap all decls in it. */
5741
5742 static gimple_seq
5743 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5744 {
5745 if (!seq)
5746 return NULL;
5747
5748 /* If there are any labels in OMP sequences, they can be only referred to in
5749 the sequence itself and therefore we can do both here. */
5750 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5751 gimple_seq copy = gimple_seq_copy (seq);
5752 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5753 return copy;
5754 }
5755
5756 /* Copies everything in SEQ and replaces variables and labels local to
5757 current_function_decl. */
5758
5759 gimple_seq
5760 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5761 {
5762 copy_body_data id;
5763 struct walk_stmt_info wi;
5764 gimple_seq copy;
5765
5766 /* There's nothing to do for NULL_TREE. */
5767 if (seq == NULL)
5768 return seq;
5769
5770 /* Set up ID. */
5771 memset (&id, 0, sizeof (id));
5772 id.src_fn = current_function_decl;
5773 id.dst_fn = current_function_decl;
5774 id.src_cfun = cfun;
5775 id.decl_map = new hash_map<tree, tree>;
5776 id.debug_map = NULL;
5777
5778 id.copy_decl = copy_decl_no_change;
5779 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5780 id.transform_new_cfg = false;
5781 id.transform_return_to_modify = false;
5782 id.transform_parameter = false;
5783 id.transform_lang_insert_block = NULL;
5784
5785 /* Walk the tree once to find local labels. */
5786 memset (&wi, 0, sizeof (wi));
5787 hash_set<tree> visited;
5788 wi.info = &id;
5789 wi.pset = &visited;
5790 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5791
5792 copy = gimple_seq_copy (seq);
5793
5794 /* Walk the copy, remapping decls. */
5795 memset (&wi, 0, sizeof (wi));
5796 wi.info = &id;
5797 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5798
5799 /* Clean up. */
5800 delete id.decl_map;
5801 if (id.debug_map)
5802 delete id.debug_map;
5803 if (id.dependence_map)
5804 {
5805 delete id.dependence_map;
5806 id.dependence_map = NULL;
5807 }
5808
5809 return copy;
5810 }
5811
5812
5813 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5814
5815 static tree
5816 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5817 {
5818 if (*tp == data)
5819 return (tree) data;
5820 else
5821 return NULL;
5822 }
5823
5824 DEBUG_FUNCTION bool
5825 debug_find_tree (tree top, tree search)
5826 {
5827 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5828 }
5829
5830
5831 /* Declare the variables created by the inliner. Add all the variables in
5832 VARS to BIND_EXPR. */
5833
5834 static void
5835 declare_inline_vars (tree block, tree vars)
5836 {
5837 tree t;
5838 for (t = vars; t; t = DECL_CHAIN (t))
5839 {
5840 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5841 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5842 add_local_decl (cfun, t);
5843 }
5844
5845 if (block)
5846 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5847 }
5848
5849 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5850 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5851 VAR_DECL translation. */
5852
5853 tree
5854 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5855 {
5856 /* Don't generate debug information for the copy if we wouldn't have
5857 generated it for the copy either. */
5858 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5859 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5860
5861 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5862 declaration inspired this copy. */
5863 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5864
5865 /* The new variable/label has no RTL, yet. */
5866 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5867 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5868 SET_DECL_RTL (copy, 0);
5869 /* For vector typed decls make sure to update DECL_MODE according
5870 to the new function context. */
5871 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5872 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5873
5874 /* These args would always appear unused, if not for this. */
5875 TREE_USED (copy) = 1;
5876
5877 /* Set the context for the new declaration. */
5878 if (!DECL_CONTEXT (decl))
5879 /* Globals stay global. */
5880 ;
5881 else if (DECL_CONTEXT (decl) != id->src_fn)
5882 /* Things that weren't in the scope of the function we're inlining
5883 from aren't in the scope we're inlining to, either. */
5884 ;
5885 else if (TREE_STATIC (decl))
5886 /* Function-scoped static variables should stay in the original
5887 function. */
5888 ;
5889 else
5890 {
5891 /* Ordinary automatic local variables are now in the scope of the
5892 new function. */
5893 DECL_CONTEXT (copy) = id->dst_fn;
5894 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5895 {
5896 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5897 DECL_ATTRIBUTES (copy)
5898 = tree_cons (get_identifier ("omp simt private"), NULL,
5899 DECL_ATTRIBUTES (copy));
5900 id->dst_simt_vars->safe_push (copy);
5901 }
5902 }
5903
5904 return copy;
5905 }
5906
5907 /* Create a new VAR_DECL that is indentical in all respect to DECL except that
5908 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
5909 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
5910
5911 tree
5912 copy_decl_to_var (tree decl, copy_body_data *id)
5913 {
5914 tree copy, type;
5915
5916 gcc_assert (TREE_CODE (decl) == PARM_DECL
5917 || TREE_CODE (decl) == RESULT_DECL);
5918
5919 type = TREE_TYPE (decl);
5920
5921 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5922 VAR_DECL, DECL_NAME (decl), type);
5923 if (DECL_PT_UID_SET_P (decl))
5924 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5925 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5926 TREE_READONLY (copy) = TREE_READONLY (decl);
5927 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5928 DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
5929 DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
5930
5931 return copy_decl_for_dup_finish (id, decl, copy);
5932 }
5933
5934 /* Like copy_decl_to_var, but create a return slot object instead of a
5935 pointer variable for return by invisible reference. */
5936
5937 static tree
5938 copy_result_decl_to_var (tree decl, copy_body_data *id)
5939 {
5940 tree copy, type;
5941
5942 gcc_assert (TREE_CODE (decl) == PARM_DECL
5943 || TREE_CODE (decl) == RESULT_DECL);
5944
5945 type = TREE_TYPE (decl);
5946 if (DECL_BY_REFERENCE (decl))
5947 type = TREE_TYPE (type);
5948
5949 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5950 VAR_DECL, DECL_NAME (decl), type);
5951 if (DECL_PT_UID_SET_P (decl))
5952 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5953 TREE_READONLY (copy) = TREE_READONLY (decl);
5954 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5955 if (!DECL_BY_REFERENCE (decl))
5956 {
5957 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5958 DECL_NOT_GIMPLE_REG_P (copy)
5959 = (DECL_NOT_GIMPLE_REG_P (decl)
5960 /* RESULT_DECLs are treated special by needs_to_live_in_memory,
5961 mirror that to the created VAR_DECL. */
5962 || (TREE_CODE (decl) == RESULT_DECL
5963 && aggregate_value_p (decl, id->src_fn)));
5964 }
5965
5966 return copy_decl_for_dup_finish (id, decl, copy);
5967 }
5968
5969 tree
5970 copy_decl_no_change (tree decl, copy_body_data *id)
5971 {
5972 tree copy;
5973
5974 copy = copy_node (decl);
5975
5976 /* The COPY is not abstract; it will be generated in DST_FN. */
5977 DECL_ABSTRACT_P (copy) = false;
5978 lang_hooks.dup_lang_specific_decl (copy);
5979
5980 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5981 been taken; it's for internal bookkeeping in expand_goto_internal. */
5982 if (TREE_CODE (copy) == LABEL_DECL)
5983 {
5984 TREE_ADDRESSABLE (copy) = 0;
5985 LABEL_DECL_UID (copy) = -1;
5986 }
5987
5988 return copy_decl_for_dup_finish (id, decl, copy);
5989 }
5990
5991 static tree
5992 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5993 {
5994 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5995 return copy_decl_to_var (decl, id);
5996 else
5997 return copy_decl_no_change (decl, id);
5998 }
5999
6000 /* Return a copy of the function's argument tree without any modifications. */
6001
6002 static tree
6003 copy_arguments_nochange (tree orig_parm, copy_body_data * id)
6004 {
6005 tree arg, *parg;
6006 tree new_parm = NULL;
6007
6008 parg = &new_parm;
6009 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
6010 {
6011 tree new_tree = remap_decl (arg, id);
6012 if (TREE_CODE (new_tree) != PARM_DECL)
6013 new_tree = id->copy_decl (arg, id);
6014 lang_hooks.dup_lang_specific_decl (new_tree);
6015 *parg = new_tree;
6016 parg = &DECL_CHAIN (new_tree);
6017 }
6018 return new_parm;
6019 }
6020
6021 /* Return a copy of the function's static chain. */
6022 static tree
6023 copy_static_chain (tree static_chain, copy_body_data * id)
6024 {
6025 tree *chain_copy, *pvar;
6026
6027 chain_copy = &static_chain;
6028 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
6029 {
6030 tree new_tree = remap_decl (*pvar, id);
6031 lang_hooks.dup_lang_specific_decl (new_tree);
6032 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
6033 *pvar = new_tree;
6034 }
6035 return static_chain;
6036 }
6037
6038 /* Return true if the function is allowed to be versioned.
6039 This is a guard for the versioning functionality. */
6040
6041 bool
6042 tree_versionable_function_p (tree fndecl)
6043 {
6044 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
6045 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
6046 }
6047
6048 /* Update clone info after duplication. */
6049
6050 static void
6051 update_clone_info (copy_body_data * id)
6052 {
6053 vec<ipa_param_performed_split, va_gc> *cur_performed_splits
6054 = id->dst_node->clone.performed_splits;
6055 if (cur_performed_splits)
6056 {
6057 unsigned len = cur_performed_splits->length ();
6058 for (unsigned i = 0; i < len; i++)
6059 {
6060 ipa_param_performed_split *ps = &(*cur_performed_splits)[i];
6061 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6062 }
6063 }
6064
6065 struct cgraph_node *node;
6066 if (!id->dst_node->clones)
6067 return;
6068 for (node = id->dst_node->clones; node != id->dst_node;)
6069 {
6070 /* First update replace maps to match the new body. */
6071 if (node->clone.tree_map)
6072 {
6073 unsigned int i;
6074 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
6075 {
6076 struct ipa_replace_map *replace_info;
6077 replace_info = (*node->clone.tree_map)[i];
6078 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
6079 }
6080 }
6081 if (node->clone.performed_splits)
6082 {
6083 unsigned len = vec_safe_length (node->clone.performed_splits);
6084 for (unsigned i = 0; i < len; i++)
6085 {
6086 ipa_param_performed_split *ps
6087 = &(*node->clone.performed_splits)[i];
6088 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6089 }
6090 }
6091 if (unsigned len = vec_safe_length (cur_performed_splits))
6092 {
6093 /* We do not want to add current performed splits when we are saving
6094 a copy of function body for later during inlining, that would just
6095 duplicate all entries. So let's have a look whether anything
6096 referring to the first dummy_decl is present. */
6097 unsigned dst_len = vec_safe_length (node->clone.performed_splits);
6098 ipa_param_performed_split *first = &(*cur_performed_splits)[0];
6099 for (unsigned i = 0; i < dst_len; i++)
6100 if ((*node->clone.performed_splits)[i].dummy_decl
6101 == first->dummy_decl)
6102 {
6103 len = 0;
6104 break;
6105 }
6106
6107 for (unsigned i = 0; i < len; i++)
6108 vec_safe_push (node->clone.performed_splits,
6109 (*cur_performed_splits)[i]);
6110 if (flag_checking)
6111 {
6112 for (unsigned i = 0; i < dst_len; i++)
6113 {
6114 ipa_param_performed_split *ps1
6115 = &(*node->clone.performed_splits)[i];
6116 for (unsigned j = i + 1; j < dst_len; j++)
6117 {
6118 ipa_param_performed_split *ps2
6119 = &(*node->clone.performed_splits)[j];
6120 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
6121 || ps1->unit_offset != ps2->unit_offset);
6122 }
6123 }
6124 }
6125 }
6126
6127 if (node->clones)
6128 node = node->clones;
6129 else if (node->next_sibling_clone)
6130 node = node->next_sibling_clone;
6131 else
6132 {
6133 while (node != id->dst_node && !node->next_sibling_clone)
6134 node = node->clone_of;
6135 if (node != id->dst_node)
6136 node = node->next_sibling_clone;
6137 }
6138 }
6139 }
6140
6141 /* Create a copy of a function's tree.
6142 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6143 of the original function and the new copied function
6144 respectively. In case we want to replace a DECL
6145 tree with another tree while duplicating the function's
6146 body, TREE_MAP represents the mapping between these
6147 trees. If UPDATE_CLONES is set, the call_stmt fields
6148 of edges of clones of the function will be updated.
6149
6150 If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6151 function parameters and return value) should be modified).
6152 If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6153 If non_NULL NEW_ENTRY determine new entry BB of the clone.
6154 */
6155 void
6156 tree_function_versioning (tree old_decl, tree new_decl,
6157 vec<ipa_replace_map *, va_gc> *tree_map,
6158 ipa_param_adjustments *param_adjustments,
6159 bool update_clones, bitmap blocks_to_copy,
6160 basic_block new_entry)
6161 {
6162 struct cgraph_node *old_version_node;
6163 struct cgraph_node *new_version_node;
6164 copy_body_data id;
6165 tree p;
6166 unsigned i;
6167 struct ipa_replace_map *replace_info;
6168 basic_block old_entry_block, bb;
6169 auto_vec<gimple *, 10> init_stmts;
6170 tree vars = NULL_TREE;
6171
6172 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6173 && TREE_CODE (new_decl) == FUNCTION_DECL);
6174 DECL_POSSIBLY_INLINED (old_decl) = 1;
6175
6176 old_version_node = cgraph_node::get (old_decl);
6177 gcc_checking_assert (old_version_node);
6178 new_version_node = cgraph_node::get (new_decl);
6179 gcc_checking_assert (new_version_node);
6180
6181 /* Copy over debug args. */
6182 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6183 {
6184 vec<tree, va_gc> **new_debug_args, **old_debug_args;
6185 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6186 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6187 old_debug_args = decl_debug_args_lookup (old_decl);
6188 if (old_debug_args)
6189 {
6190 new_debug_args = decl_debug_args_insert (new_decl);
6191 *new_debug_args = vec_safe_copy (*old_debug_args);
6192 }
6193 }
6194
6195 /* Output the inlining info for this abstract function, since it has been
6196 inlined. If we don't do this now, we can lose the information about the
6197 variables in the function when the blocks get blown away as soon as we
6198 remove the cgraph node. */
6199 (*debug_hooks->outlining_inline_function) (old_decl);
6200
6201 DECL_ARTIFICIAL (new_decl) = 1;
6202 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6203 if (DECL_ORIGIN (old_decl) == old_decl)
6204 old_version_node->used_as_abstract_origin = true;
6205 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6206
6207 /* Prepare the data structures for the tree copy. */
6208 memset (&id, 0, sizeof (id));
6209
6210 /* Generate a new name for the new version. */
6211 id.statements_to_fold = new hash_set<gimple *>;
6212
6213 id.decl_map = new hash_map<tree, tree>;
6214 id.debug_map = NULL;
6215 id.src_fn = old_decl;
6216 id.dst_fn = new_decl;
6217 id.src_node = old_version_node;
6218 id.dst_node = new_version_node;
6219 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6220 id.blocks_to_copy = blocks_to_copy;
6221
6222 id.copy_decl = copy_decl_no_change;
6223 id.transform_call_graph_edges
6224 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6225 id.transform_new_cfg = true;
6226 id.transform_return_to_modify = false;
6227 id.transform_parameter = false;
6228 id.transform_lang_insert_block = NULL;
6229
6230 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
6231 (DECL_STRUCT_FUNCTION (old_decl));
6232 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6233 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6234 initialize_cfun (new_decl, old_decl,
6235 new_entry ? new_entry->count : old_entry_block->count);
6236 new_version_node->calls_declare_variant_alt
6237 = old_version_node->calls_declare_variant_alt;
6238 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6239 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6240 = id.src_cfun->gimple_df->ipa_pta;
6241
6242 /* Copy the function's static chain. */
6243 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6244 if (p)
6245 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6246 = copy_static_chain (p, &id);
6247
6248 auto_vec<int, 16> new_param_indices;
6249 ipa_param_adjustments *old_param_adjustments
6250 = old_version_node->clone.param_adjustments;
6251 if (old_param_adjustments)
6252 old_param_adjustments->get_updated_indices (&new_param_indices);
6253
6254 /* If there's a tree_map, prepare for substitution. */
6255 if (tree_map)
6256 for (i = 0; i < tree_map->length (); i++)
6257 {
6258 gimple *init;
6259 replace_info = (*tree_map)[i];
6260
6261 int p = replace_info->parm_num;
6262 if (old_param_adjustments)
6263 p = new_param_indices[p];
6264
6265 tree parm;
6266 for (parm = DECL_ARGUMENTS (old_decl); p;
6267 parm = DECL_CHAIN (parm))
6268 p--;
6269 gcc_assert (parm);
6270 init = setup_one_parameter (&id, parm, replace_info->new_tree,
6271 id.src_fn, NULL, &vars);
6272 if (init)
6273 init_stmts.safe_push (init);
6274 }
6275
6276 ipa_param_body_adjustments *param_body_adjs = NULL;
6277 if (param_adjustments)
6278 {
6279 param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6280 new_decl, old_decl,
6281 &id, &vars, tree_map);
6282 id.param_body_adjs = param_body_adjs;
6283 DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6284 }
6285 else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6286 DECL_ARGUMENTS (new_decl)
6287 = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6288
6289 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6290 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6291
6292 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6293
6294 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6295 /* Add local vars. */
6296 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6297
6298 if (DECL_RESULT (old_decl) == NULL_TREE)
6299 ;
6300 else if (param_adjustments && param_adjustments->m_skip_return
6301 && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6302 {
6303 tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6304 &id);
6305 declare_inline_vars (NULL, resdecl_repl);
6306 insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6307
6308 DECL_RESULT (new_decl)
6309 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6310 RESULT_DECL, NULL_TREE, void_type_node);
6311 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6312 DECL_IS_MALLOC (new_decl) = false;
6313 cfun->returns_struct = 0;
6314 cfun->returns_pcc_struct = 0;
6315 }
6316 else
6317 {
6318 tree old_name;
6319 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6320 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6321 if (gimple_in_ssa_p (id.src_cfun)
6322 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6323 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6324 {
6325 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6326 insert_decl_map (&id, old_name, new_name);
6327 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6328 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6329 }
6330 }
6331
6332 /* Set up the destination functions loop tree. */
6333 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6334 {
6335 cfun->curr_properties &= ~PROP_loops;
6336 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6337 cfun->curr_properties |= PROP_loops;
6338 }
6339
6340 /* Copy the Function's body. */
6341 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6342 new_entry);
6343
6344 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6345 number_blocks (new_decl);
6346
6347 /* We want to create the BB unconditionally, so that the addition of
6348 debug stmts doesn't affect BB count, which may in the end cause
6349 codegen differences. */
6350 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6351 while (init_stmts.length ())
6352 insert_init_stmt (&id, bb, init_stmts.pop ());
6353 update_clone_info (&id);
6354
6355 /* Remap the nonlocal_goto_save_area, if any. */
6356 if (cfun->nonlocal_goto_save_area)
6357 {
6358 struct walk_stmt_info wi;
6359
6360 memset (&wi, 0, sizeof (wi));
6361 wi.info = &id;
6362 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6363 }
6364
6365 /* Clean up. */
6366 delete id.decl_map;
6367 if (id.debug_map)
6368 delete id.debug_map;
6369 free_dominance_info (CDI_DOMINATORS);
6370 free_dominance_info (CDI_POST_DOMINATORS);
6371
6372 update_max_bb_count ();
6373 fold_marked_statements (0, id.statements_to_fold);
6374 delete id.statements_to_fold;
6375 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6376 if (id.dst_node->definition)
6377 cgraph_edge::rebuild_references ();
6378 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6379 {
6380 calculate_dominance_info (CDI_DOMINATORS);
6381 fix_loop_structure (NULL);
6382 }
6383 update_ssa (TODO_update_ssa);
6384
6385 /* After partial cloning we need to rescale frequencies, so they are
6386 within proper range in the cloned function. */
6387 if (new_entry)
6388 {
6389 struct cgraph_edge *e;
6390 rebuild_frequencies ();
6391
6392 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6393 for (e = new_version_node->callees; e; e = e->next_callee)
6394 {
6395 basic_block bb = gimple_bb (e->call_stmt);
6396 e->count = bb->count;
6397 }
6398 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6399 {
6400 basic_block bb = gimple_bb (e->call_stmt);
6401 e->count = bb->count;
6402 }
6403 }
6404
6405 if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6406 {
6407 vec<tree, va_gc> **debug_args = NULL;
6408 unsigned int len = 0;
6409 unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6410
6411 for (i = 0; i < reset_len; i++)
6412 {
6413 tree parm = param_body_adjs->m_reset_debug_decls[i];
6414 gcc_assert (is_gimple_reg (parm));
6415 tree ddecl;
6416
6417 if (debug_args == NULL)
6418 {
6419 debug_args = decl_debug_args_insert (new_decl);
6420 len = vec_safe_length (*debug_args);
6421 }
6422 ddecl = make_node (DEBUG_EXPR_DECL);
6423 DECL_ARTIFICIAL (ddecl) = 1;
6424 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6425 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6426 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6427 vec_safe_push (*debug_args, ddecl);
6428 }
6429 if (debug_args != NULL)
6430 {
6431 /* On the callee side, add
6432 DEBUG D#Y s=> parm
6433 DEBUG var => D#Y
6434 stmts to the first bb where var is a VAR_DECL created for the
6435 optimized away parameter in DECL_INITIAL block. This hints
6436 in the debug info that var (whole DECL_ORIGIN is the parm
6437 PARM_DECL) is optimized away, but could be looked up at the
6438 call site as value of D#X there. */
6439 tree vexpr;
6440 gimple_stmt_iterator cgsi
6441 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6442 gimple *def_temp;
6443 tree var = vars;
6444 i = vec_safe_length (*debug_args);
6445 do
6446 {
6447 i -= 2;
6448 while (var != NULL_TREE
6449 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6450 var = TREE_CHAIN (var);
6451 if (var == NULL_TREE)
6452 break;
6453 vexpr = make_node (DEBUG_EXPR_DECL);
6454 tree parm = (**debug_args)[i];
6455 DECL_ARTIFICIAL (vexpr) = 1;
6456 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6457 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6458 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6459 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6460 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6461 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6462 }
6463 while (i > len);
6464 }
6465 }
6466 delete param_body_adjs;
6467 free_dominance_info (CDI_DOMINATORS);
6468 free_dominance_info (CDI_POST_DOMINATORS);
6469
6470 gcc_assert (!id.debug_stmts.exists ());
6471 pop_cfun ();
6472 return;
6473 }
6474
6475 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6476 the callee and return the inlined body on success. */
6477
6478 tree
6479 maybe_inline_call_in_expr (tree exp)
6480 {
6481 tree fn = get_callee_fndecl (exp);
6482
6483 /* We can only try to inline "const" functions. */
6484 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6485 {
6486 call_expr_arg_iterator iter;
6487 copy_body_data id;
6488 tree param, arg, t;
6489 hash_map<tree, tree> decl_map;
6490
6491 /* Remap the parameters. */
6492 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6493 param;
6494 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6495 decl_map.put (param, arg);
6496
6497 memset (&id, 0, sizeof (id));
6498 id.src_fn = fn;
6499 id.dst_fn = current_function_decl;
6500 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6501 id.decl_map = &decl_map;
6502
6503 id.copy_decl = copy_decl_no_change;
6504 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6505 id.transform_new_cfg = false;
6506 id.transform_return_to_modify = true;
6507 id.transform_parameter = true;
6508 id.transform_lang_insert_block = NULL;
6509
6510 /* Make sure not to unshare trees behind the front-end's back
6511 since front-end specific mechanisms may rely on sharing. */
6512 id.regimplify = false;
6513 id.do_not_unshare = true;
6514
6515 /* We're not inside any EH region. */
6516 id.eh_lp_nr = 0;
6517
6518 t = copy_tree_body (&id);
6519
6520 /* We can only return something suitable for use in a GENERIC
6521 expression tree. */
6522 if (TREE_CODE (t) == MODIFY_EXPR)
6523 return TREE_OPERAND (t, 1);
6524 }
6525
6526 return NULL_TREE;
6527 }
6528
6529 /* Duplicate a type, fields and all. */
6530
6531 tree
6532 build_duplicate_type (tree type)
6533 {
6534 struct copy_body_data id;
6535
6536 memset (&id, 0, sizeof (id));
6537 id.src_fn = current_function_decl;
6538 id.dst_fn = current_function_decl;
6539 id.src_cfun = cfun;
6540 id.decl_map = new hash_map<tree, tree>;
6541 id.debug_map = NULL;
6542 id.copy_decl = copy_decl_no_change;
6543
6544 type = remap_type_1 (type, &id);
6545
6546 delete id.decl_map;
6547 if (id.debug_map)
6548 delete id.debug_map;
6549
6550 TYPE_CANONICAL (type) = type;
6551
6552 return type;
6553 }
6554
6555 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6556 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6557 evaluation. */
6558
6559 tree
6560 copy_fn (tree fn, tree& parms, tree& result)
6561 {
6562 copy_body_data id;
6563 tree param;
6564 hash_map<tree, tree> decl_map;
6565
6566 tree *p = &parms;
6567 *p = NULL_TREE;
6568
6569 memset (&id, 0, sizeof (id));
6570 id.src_fn = fn;
6571 id.dst_fn = current_function_decl;
6572 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6573 id.decl_map = &decl_map;
6574
6575 id.copy_decl = copy_decl_no_change;
6576 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6577 id.transform_new_cfg = false;
6578 id.transform_return_to_modify = false;
6579 id.transform_parameter = true;
6580 id.transform_lang_insert_block = NULL;
6581
6582 /* Make sure not to unshare trees behind the front-end's back
6583 since front-end specific mechanisms may rely on sharing. */
6584 id.regimplify = false;
6585 id.do_not_unshare = true;
6586 id.do_not_fold = true;
6587
6588 /* We're not inside any EH region. */
6589 id.eh_lp_nr = 0;
6590
6591 /* Remap the parameters and result and return them to the caller. */
6592 for (param = DECL_ARGUMENTS (fn);
6593 param;
6594 param = DECL_CHAIN (param))
6595 {
6596 *p = remap_decl (param, &id);
6597 p = &DECL_CHAIN (*p);
6598 }
6599
6600 if (DECL_RESULT (fn))
6601 result = remap_decl (DECL_RESULT (fn), &id);
6602 else
6603 result = NULL_TREE;
6604
6605 return copy_tree_body (&id);
6606 }