]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-inline.c
PR c++/61339 - add mismatch between struct and class [-Wmismatched-tags] to non-bugs
[thirdparty/gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "sreal.h"
63 #include "tree-cfgcleanup.h"
64 #include "tree-ssa-live.h"
65
66 /* I'm not real happy about this, but we need to handle gimple and
67 non-gimple trees. */
68
69 /* Inlining, Cloning, Versioning, Parallelization
70
71 Inlining: a function body is duplicated, but the PARM_DECLs are
72 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
73 MODIFY_EXPRs that store to a dedicated returned-value variable.
74 The duplicated eh_region info of the copy will later be appended
75 to the info for the caller; the eh_region info in copied throwing
76 statements and RESX statements are adjusted accordingly.
77
78 Cloning: (only in C++) We have one body for a con/de/structor, and
79 multiple function decls, each with a unique parameter list.
80 Duplicate the body, using the given splay tree; some parameters
81 will become constants (like 0 or 1).
82
83 Versioning: a function body is duplicated and the result is a new
84 function rather than into blocks of an existing function as with
85 inlining. Some parameters will become constants.
86
87 Parallelization: a region of a function is duplicated resulting in
88 a new function. Variables may be replaced with complex expressions
89 to enable shared variable semantics.
90
91 All of these will simultaneously lookup any callgraph edges. If
92 we're going to inline the duplicated function body, and the given
93 function has some cloned callgraph nodes (one for each place this
94 function will be inlined) those callgraph edges will be duplicated.
95 If we're cloning the body, those callgraph edges will be
96 updated to point into the new body. (Note that the original
97 callgraph node and edge list will not be altered.)
98
99 See the CALL_EXPR handling case in copy_tree_body_r (). */
100
101 /* To Do:
102
103 o In order to make inlining-on-trees work, we pessimized
104 function-local static constants. In particular, they are now
105 always output, even when not addressed. Fix this by treating
106 function-local static constants just like global static
107 constants; the back-end already knows not to output them if they
108 are not needed.
109
110 o Provide heuristics to clamp inlining of recursive template
111 calls? */
112
113
114 /* Weights that estimate_num_insns uses to estimate the size of the
115 produced code. */
116
117 eni_weights eni_size_weights;
118
119 /* Weights that estimate_num_insns uses to estimate the time necessary
120 to execute the produced code. */
121
122 eni_weights eni_time_weights;
123
124 /* Prototypes. */
125
126 static tree declare_return_variable (copy_body_data *, tree, tree,
127 basic_block);
128 static void remap_block (tree *, copy_body_data *);
129 static void copy_bind_expr (tree *, int *, copy_body_data *);
130 static void declare_inline_vars (tree, tree);
131 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
132 static void prepend_lexical_block (tree current_block, tree new_block);
133 static tree copy_decl_to_var (tree, copy_body_data *);
134 static tree copy_result_decl_to_var (tree, copy_body_data *);
135 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
136 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
137 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
138
139 /* Insert a tree->tree mapping for ID. Despite the name suggests
140 that the trees should be variables, it is used for more than that. */
141
142 void
143 insert_decl_map (copy_body_data *id, tree key, tree value)
144 {
145 id->decl_map->put (key, value);
146
147 /* Always insert an identity map as well. If we see this same new
148 node again, we won't want to duplicate it a second time. */
149 if (key != value)
150 id->decl_map->put (value, value);
151 }
152
153 /* Insert a tree->tree mapping for ID. This is only used for
154 variables. */
155
156 static void
157 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
158 {
159 if (!gimple_in_ssa_p (id->src_cfun))
160 return;
161
162 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
163 return;
164
165 if (!target_for_debug_bind (key))
166 return;
167
168 gcc_assert (TREE_CODE (key) == PARM_DECL);
169 gcc_assert (VAR_P (value));
170
171 if (!id->debug_map)
172 id->debug_map = new hash_map<tree, tree>;
173
174 id->debug_map->put (key, value);
175 }
176
177 /* If nonzero, we're remapping the contents of inlined debug
178 statements. If negative, an error has occurred, such as a
179 reference to a variable that isn't available in the inlined
180 context. */
181 static int processing_debug_stmt = 0;
182
183 /* Construct new SSA name for old NAME. ID is the inline context. */
184
185 static tree
186 remap_ssa_name (tree name, copy_body_data *id)
187 {
188 tree new_tree, var;
189 tree *n;
190
191 gcc_assert (TREE_CODE (name) == SSA_NAME);
192
193 n = id->decl_map->get (name);
194 if (n)
195 return unshare_expr (*n);
196
197 if (processing_debug_stmt)
198 {
199 if (SSA_NAME_IS_DEFAULT_DEF (name)
200 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
201 && id->entry_bb == NULL
202 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
203 {
204 tree vexpr = make_node (DEBUG_EXPR_DECL);
205 gimple *def_temp;
206 gimple_stmt_iterator gsi;
207 tree val = SSA_NAME_VAR (name);
208
209 n = id->decl_map->get (val);
210 if (n != NULL)
211 val = *n;
212 if (TREE_CODE (val) != PARM_DECL
213 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
214 {
215 processing_debug_stmt = -1;
216 return name;
217 }
218 n = id->decl_map->get (val);
219 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
220 return *n;
221 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
222 DECL_ARTIFICIAL (vexpr) = 1;
223 TREE_TYPE (vexpr) = TREE_TYPE (name);
224 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
225 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
226 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
227 insert_decl_map (id, val, vexpr);
228 return vexpr;
229 }
230
231 processing_debug_stmt = -1;
232 return name;
233 }
234
235 /* Remap anonymous SSA names or SSA names of anonymous decls. */
236 var = SSA_NAME_VAR (name);
237 if (!var
238 || (!SSA_NAME_IS_DEFAULT_DEF (name)
239 && VAR_P (var)
240 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
241 && DECL_ARTIFICIAL (var)
242 && DECL_IGNORED_P (var)
243 && !DECL_NAME (var)))
244 {
245 struct ptr_info_def *pi;
246 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
247 if (!var && SSA_NAME_IDENTIFIER (name))
248 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
249 insert_decl_map (id, name, new_tree);
250 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
251 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
252 /* At least IPA points-to info can be directly transferred. */
253 if (id->src_cfun->gimple_df
254 && id->src_cfun->gimple_df->ipa_pta
255 && POINTER_TYPE_P (TREE_TYPE (name))
256 && (pi = SSA_NAME_PTR_INFO (name))
257 && !pi->pt.anything)
258 {
259 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
260 new_pi->pt = pi->pt;
261 }
262 /* So can range-info. */
263 if (!POINTER_TYPE_P (TREE_TYPE (name))
264 && SSA_NAME_RANGE_INFO (name))
265 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
266 SSA_NAME_RANGE_INFO (name));
267 return new_tree;
268 }
269
270 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
271 in copy_bb. */
272 new_tree = remap_decl (var, id);
273
274 /* We might've substituted constant or another SSA_NAME for
275 the variable.
276
277 Replace the SSA name representing RESULT_DECL by variable during
278 inlining: this saves us from need to introduce PHI node in a case
279 return value is just partly initialized. */
280 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
281 && (!SSA_NAME_VAR (name)
282 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
283 || !id->transform_return_to_modify))
284 {
285 struct ptr_info_def *pi;
286 new_tree = make_ssa_name (new_tree);
287 insert_decl_map (id, name, new_tree);
288 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
289 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
290 /* At least IPA points-to info can be directly transferred. */
291 if (id->src_cfun->gimple_df
292 && id->src_cfun->gimple_df->ipa_pta
293 && POINTER_TYPE_P (TREE_TYPE (name))
294 && (pi = SSA_NAME_PTR_INFO (name))
295 && !pi->pt.anything)
296 {
297 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
298 new_pi->pt = pi->pt;
299 }
300 /* So can range-info. */
301 if (!POINTER_TYPE_P (TREE_TYPE (name))
302 && SSA_NAME_RANGE_INFO (name))
303 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
304 SSA_NAME_RANGE_INFO (name));
305 if (SSA_NAME_IS_DEFAULT_DEF (name))
306 {
307 /* By inlining function having uninitialized variable, we might
308 extend the lifetime (variable might get reused). This cause
309 ICE in the case we end up extending lifetime of SSA name across
310 abnormal edge, but also increase register pressure.
311
312 We simply initialize all uninitialized vars by 0 except
313 for case we are inlining to very first BB. We can avoid
314 this for all BBs that are not inside strongly connected
315 regions of the CFG, but this is expensive to test. */
316 if (id->entry_bb
317 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
318 && (!SSA_NAME_VAR (name)
319 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
320 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
321 0)->dest
322 || EDGE_COUNT (id->entry_bb->preds) != 1))
323 {
324 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
325 gimple *init_stmt;
326 tree zero = build_zero_cst (TREE_TYPE (new_tree));
327
328 init_stmt = gimple_build_assign (new_tree, zero);
329 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
330 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
331 }
332 else
333 {
334 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
335 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
336 }
337 }
338 }
339 else
340 insert_decl_map (id, name, new_tree);
341 return new_tree;
342 }
343
344 /* Remap DECL during the copying of the BLOCK tree for the function. */
345
346 tree
347 remap_decl (tree decl, copy_body_data *id)
348 {
349 tree *n;
350
351 /* We only remap local variables in the current function. */
352
353 /* See if we have remapped this declaration. */
354
355 n = id->decl_map->get (decl);
356
357 if (!n && processing_debug_stmt)
358 {
359 processing_debug_stmt = -1;
360 return decl;
361 }
362
363 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
364 necessary DECLs have already been remapped and we do not want to duplicate
365 a decl coming from outside of the sequence we are copying. */
366 if (!n
367 && id->prevent_decl_creation_for_types
368 && id->remapping_type_depth > 0
369 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
370 return decl;
371
372 /* If we didn't already have an equivalent for this declaration, create one
373 now. */
374 if (!n)
375 {
376 /* Make a copy of the variable or label. */
377 tree t = id->copy_decl (decl, id);
378
379 /* Remember it, so that if we encounter this local entity again
380 we can reuse this copy. Do this early because remap_type may
381 need this decl for TYPE_STUB_DECL. */
382 insert_decl_map (id, decl, t);
383
384 if (!DECL_P (t))
385 return t;
386
387 /* Remap types, if necessary. */
388 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
389 if (TREE_CODE (t) == TYPE_DECL)
390 {
391 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
392
393 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
394 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
395 is not set on the TYPE_DECL, for example in LTO mode. */
396 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
397 {
398 tree x = build_variant_type_copy (TREE_TYPE (t));
399 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
400 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
401 DECL_ORIGINAL_TYPE (t) = x;
402 }
403 }
404
405 /* Remap sizes as necessary. */
406 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
407 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
408
409 /* If fields, do likewise for offset and qualifier. */
410 if (TREE_CODE (t) == FIELD_DECL)
411 {
412 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
413 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
414 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
415 }
416
417 return t;
418 }
419
420 if (id->do_not_unshare)
421 return *n;
422 else
423 return unshare_expr (*n);
424 }
425
426 static tree
427 remap_type_1 (tree type, copy_body_data *id)
428 {
429 tree new_tree, t;
430
431 /* We do need a copy. build and register it now. If this is a pointer or
432 reference type, remap the designated type and make a new pointer or
433 reference type. */
434 if (TREE_CODE (type) == POINTER_TYPE)
435 {
436 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
437 TYPE_MODE (type),
438 TYPE_REF_CAN_ALIAS_ALL (type));
439 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
440 new_tree = build_type_attribute_qual_variant (new_tree,
441 TYPE_ATTRIBUTES (type),
442 TYPE_QUALS (type));
443 insert_decl_map (id, type, new_tree);
444 return new_tree;
445 }
446 else if (TREE_CODE (type) == REFERENCE_TYPE)
447 {
448 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
449 TYPE_MODE (type),
450 TYPE_REF_CAN_ALIAS_ALL (type));
451 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
452 new_tree = build_type_attribute_qual_variant (new_tree,
453 TYPE_ATTRIBUTES (type),
454 TYPE_QUALS (type));
455 insert_decl_map (id, type, new_tree);
456 return new_tree;
457 }
458 else
459 new_tree = copy_node (type);
460
461 insert_decl_map (id, type, new_tree);
462
463 /* This is a new type, not a copy of an old type. Need to reassociate
464 variants. We can handle everything except the main variant lazily. */
465 t = TYPE_MAIN_VARIANT (type);
466 if (type != t)
467 {
468 t = remap_type (t, id);
469 TYPE_MAIN_VARIANT (new_tree) = t;
470 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
471 TYPE_NEXT_VARIANT (t) = new_tree;
472 }
473 else
474 {
475 TYPE_MAIN_VARIANT (new_tree) = new_tree;
476 TYPE_NEXT_VARIANT (new_tree) = NULL;
477 }
478
479 if (TYPE_STUB_DECL (type))
480 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
481
482 /* Lazily create pointer and reference types. */
483 TYPE_POINTER_TO (new_tree) = NULL;
484 TYPE_REFERENCE_TO (new_tree) = NULL;
485
486 /* Copy all types that may contain references to local variables; be sure to
487 preserve sharing in between type and its main variant when possible. */
488 switch (TREE_CODE (new_tree))
489 {
490 case INTEGER_TYPE:
491 case REAL_TYPE:
492 case FIXED_POINT_TYPE:
493 case ENUMERAL_TYPE:
494 case BOOLEAN_TYPE:
495 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
496 {
497 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
498 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
499
500 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
501 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
502 }
503 else
504 {
505 t = TYPE_MIN_VALUE (new_tree);
506 if (t && TREE_CODE (t) != INTEGER_CST)
507 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
508
509 t = TYPE_MAX_VALUE (new_tree);
510 if (t && TREE_CODE (t) != INTEGER_CST)
511 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
512 }
513 return new_tree;
514
515 case FUNCTION_TYPE:
516 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
517 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
518 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
519 else
520 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
521 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
522 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
523 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
524 else
525 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
526 return new_tree;
527
528 case ARRAY_TYPE:
529 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
530 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
531 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
532 else
533 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
534
535 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
536 {
537 gcc_checking_assert (TYPE_DOMAIN (type)
538 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
539 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
540 }
541 else
542 {
543 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
544 /* For array bounds where we have decided not to copy over the bounds
545 variable which isn't used in OpenMP/OpenACC region, change them to
546 an uninitialized VAR_DECL temporary. */
547 if (TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
548 && id->adjust_array_error_bounds
549 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
550 {
551 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
552 DECL_ATTRIBUTES (v)
553 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
554 DECL_ATTRIBUTES (v));
555 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
556 }
557 }
558 break;
559
560 case RECORD_TYPE:
561 case UNION_TYPE:
562 case QUAL_UNION_TYPE:
563 if (TYPE_MAIN_VARIANT (type) != type
564 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
565 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
566 else
567 {
568 tree f, nf = NULL;
569
570 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
571 {
572 t = remap_decl (f, id);
573 DECL_CONTEXT (t) = new_tree;
574 DECL_CHAIN (t) = nf;
575 nf = t;
576 }
577 TYPE_FIELDS (new_tree) = nreverse (nf);
578 }
579 break;
580
581 case OFFSET_TYPE:
582 default:
583 /* Shouldn't have been thought variable sized. */
584 gcc_unreachable ();
585 }
586
587 /* All variants of type share the same size, so use the already remaped data. */
588 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
589 {
590 tree s = TYPE_SIZE (type);
591 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
592 tree su = TYPE_SIZE_UNIT (type);
593 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
594 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
595 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
596 || s == mvs);
597 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
598 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
599 || su == mvsu);
600 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
601 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
602 }
603 else
604 {
605 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
606 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
607 }
608
609 return new_tree;
610 }
611
612 /* Helper function for remap_type_2, called through walk_tree. */
613
614 static tree
615 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
616 {
617 copy_body_data *id = (copy_body_data *) data;
618
619 if (TYPE_P (*tp))
620 *walk_subtrees = 0;
621
622 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
623 return *tp;
624
625 return NULL_TREE;
626 }
627
628 /* Return true if TYPE needs to be remapped because remap_decl on any
629 needed embedded decl returns something other than that decl. */
630
631 static bool
632 remap_type_2 (tree type, copy_body_data *id)
633 {
634 tree t;
635
636 #define RETURN_TRUE_IF_VAR(T) \
637 do \
638 { \
639 tree _t = (T); \
640 if (_t) \
641 { \
642 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
643 return true; \
644 if (!TYPE_SIZES_GIMPLIFIED (type) \
645 && walk_tree (&_t, remap_type_3, id, NULL)) \
646 return true; \
647 } \
648 } \
649 while (0)
650
651 switch (TREE_CODE (type))
652 {
653 case POINTER_TYPE:
654 case REFERENCE_TYPE:
655 case FUNCTION_TYPE:
656 case METHOD_TYPE:
657 return remap_type_2 (TREE_TYPE (type), id);
658
659 case INTEGER_TYPE:
660 case REAL_TYPE:
661 case FIXED_POINT_TYPE:
662 case ENUMERAL_TYPE:
663 case BOOLEAN_TYPE:
664 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
665 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
666 return false;
667
668 case ARRAY_TYPE:
669 if (remap_type_2 (TREE_TYPE (type), id)
670 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
671 return true;
672 break;
673
674 case RECORD_TYPE:
675 case UNION_TYPE:
676 case QUAL_UNION_TYPE:
677 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
678 if (TREE_CODE (t) == FIELD_DECL)
679 {
680 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
681 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
682 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
683 if (TREE_CODE (type) == QUAL_UNION_TYPE)
684 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
685 }
686 break;
687
688 default:
689 return false;
690 }
691
692 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
693 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
694 return false;
695 #undef RETURN_TRUE_IF_VAR
696 }
697
698 tree
699 remap_type (tree type, copy_body_data *id)
700 {
701 tree *node;
702 tree tmp;
703
704 if (type == NULL)
705 return type;
706
707 /* See if we have remapped this type. */
708 node = id->decl_map->get (type);
709 if (node)
710 return *node;
711
712 /* The type only needs remapping if it's variably modified. */
713 if (! variably_modified_type_p (type, id->src_fn)
714 /* Don't remap if copy_decl method doesn't always return a new
715 decl and for all embedded decls returns the passed in decl. */
716 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
717 {
718 insert_decl_map (id, type, type);
719 return type;
720 }
721
722 id->remapping_type_depth++;
723 tmp = remap_type_1 (type, id);
724 id->remapping_type_depth--;
725
726 return tmp;
727 }
728
729 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
730
731 static bool
732 can_be_nonlocal (tree decl, copy_body_data *id)
733 {
734 /* We cannot duplicate function decls. */
735 if (TREE_CODE (decl) == FUNCTION_DECL)
736 return true;
737
738 /* Local static vars must be non-local or we get multiple declaration
739 problems. */
740 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
741 return true;
742
743 return false;
744 }
745
746 static tree
747 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
748 copy_body_data *id)
749 {
750 tree old_var;
751 tree new_decls = NULL_TREE;
752
753 /* Remap its variables. */
754 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
755 {
756 tree new_var;
757
758 if (can_be_nonlocal (old_var, id))
759 {
760 /* We need to add this variable to the local decls as otherwise
761 nothing else will do so. */
762 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
763 add_local_decl (cfun, old_var);
764 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
765 && !DECL_IGNORED_P (old_var)
766 && nonlocalized_list)
767 vec_safe_push (*nonlocalized_list, old_var);
768 continue;
769 }
770
771 /* Remap the variable. */
772 new_var = remap_decl (old_var, id);
773
774 /* If we didn't remap this variable, we can't mess with its
775 TREE_CHAIN. If we remapped this variable to the return slot, it's
776 already declared somewhere else, so don't declare it here. */
777
778 if (new_var == id->retvar)
779 ;
780 else if (!new_var)
781 {
782 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
783 && !DECL_IGNORED_P (old_var)
784 && nonlocalized_list)
785 vec_safe_push (*nonlocalized_list, old_var);
786 }
787 else
788 {
789 gcc_assert (DECL_P (new_var));
790 DECL_CHAIN (new_var) = new_decls;
791 new_decls = new_var;
792
793 /* Also copy value-expressions. */
794 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
795 {
796 tree tem = DECL_VALUE_EXPR (new_var);
797 bool old_regimplify = id->regimplify;
798 id->remapping_type_depth++;
799 walk_tree (&tem, copy_tree_body_r, id, NULL);
800 id->remapping_type_depth--;
801 id->regimplify = old_regimplify;
802 SET_DECL_VALUE_EXPR (new_var, tem);
803 }
804 }
805 }
806
807 return nreverse (new_decls);
808 }
809
810 /* Copy the BLOCK to contain remapped versions of the variables
811 therein. And hook the new block into the block-tree. */
812
813 static void
814 remap_block (tree *block, copy_body_data *id)
815 {
816 tree old_block;
817 tree new_block;
818
819 /* Make the new block. */
820 old_block = *block;
821 new_block = make_node (BLOCK);
822 TREE_USED (new_block) = TREE_USED (old_block);
823 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
824 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
825 BLOCK_NONLOCALIZED_VARS (new_block)
826 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
827 *block = new_block;
828
829 /* Remap its variables. */
830 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
831 &BLOCK_NONLOCALIZED_VARS (new_block),
832 id);
833
834 if (id->transform_lang_insert_block)
835 id->transform_lang_insert_block (new_block);
836
837 /* Remember the remapped block. */
838 insert_decl_map (id, old_block, new_block);
839 }
840
841 /* Copy the whole block tree and root it in id->block. */
842
843 static tree
844 remap_blocks (tree block, copy_body_data *id)
845 {
846 tree t;
847 tree new_tree = block;
848
849 if (!block)
850 return NULL;
851
852 remap_block (&new_tree, id);
853 gcc_assert (new_tree != block);
854 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
855 prepend_lexical_block (new_tree, remap_blocks (t, id));
856 /* Blocks are in arbitrary order, but make things slightly prettier and do
857 not swap order when producing a copy. */
858 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
859 return new_tree;
860 }
861
862 /* Remap the block tree rooted at BLOCK to nothing. */
863
864 static void
865 remap_blocks_to_null (tree block, copy_body_data *id)
866 {
867 tree t;
868 insert_decl_map (id, block, NULL_TREE);
869 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
870 remap_blocks_to_null (t, id);
871 }
872
873 /* Remap the location info pointed to by LOCUS. */
874
875 static location_t
876 remap_location (location_t locus, copy_body_data *id)
877 {
878 if (LOCATION_BLOCK (locus))
879 {
880 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
881 gcc_assert (n);
882 if (*n)
883 return set_block (locus, *n);
884 }
885
886 locus = LOCATION_LOCUS (locus);
887
888 if (locus != UNKNOWN_LOCATION && id->block)
889 return set_block (locus, id->block);
890
891 return locus;
892 }
893
894 static void
895 copy_statement_list (tree *tp)
896 {
897 tree_stmt_iterator oi, ni;
898 tree new_tree;
899
900 new_tree = alloc_stmt_list ();
901 ni = tsi_start (new_tree);
902 oi = tsi_start (*tp);
903 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
904 *tp = new_tree;
905
906 for (; !tsi_end_p (oi); tsi_next (&oi))
907 {
908 tree stmt = tsi_stmt (oi);
909 if (TREE_CODE (stmt) == STATEMENT_LIST)
910 /* This copy is not redundant; tsi_link_after will smash this
911 STATEMENT_LIST into the end of the one we're building, and we
912 don't want to do that with the original. */
913 copy_statement_list (&stmt);
914 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
915 }
916 }
917
918 static void
919 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
920 {
921 tree block = BIND_EXPR_BLOCK (*tp);
922 /* Copy (and replace) the statement. */
923 copy_tree_r (tp, walk_subtrees, NULL);
924 if (block)
925 {
926 remap_block (&block, id);
927 BIND_EXPR_BLOCK (*tp) = block;
928 }
929
930 if (BIND_EXPR_VARS (*tp))
931 /* This will remap a lot of the same decls again, but this should be
932 harmless. */
933 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
934 }
935
936
937 /* Create a new gimple_seq by remapping all the statements in BODY
938 using the inlining information in ID. */
939
940 static gimple_seq
941 remap_gimple_seq (gimple_seq body, copy_body_data *id)
942 {
943 gimple_stmt_iterator si;
944 gimple_seq new_body = NULL;
945
946 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
947 {
948 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
949 gimple_seq_add_seq (&new_body, new_stmts);
950 }
951
952 return new_body;
953 }
954
955
956 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
957 block using the mapping information in ID. */
958
959 static gimple *
960 copy_gimple_bind (gbind *stmt, copy_body_data *id)
961 {
962 gimple *new_bind;
963 tree new_block, new_vars;
964 gimple_seq body, new_body;
965
966 /* Copy the statement. Note that we purposely don't use copy_stmt
967 here because we need to remap statements as we copy. */
968 body = gimple_bind_body (stmt);
969 new_body = remap_gimple_seq (body, id);
970
971 new_block = gimple_bind_block (stmt);
972 if (new_block)
973 remap_block (&new_block, id);
974
975 /* This will remap a lot of the same decls again, but this should be
976 harmless. */
977 new_vars = gimple_bind_vars (stmt);
978 if (new_vars)
979 new_vars = remap_decls (new_vars, NULL, id);
980
981 new_bind = gimple_build_bind (new_vars, new_body, new_block);
982
983 return new_bind;
984 }
985
986 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
987
988 static bool
989 is_parm (tree decl)
990 {
991 if (TREE_CODE (decl) == SSA_NAME)
992 {
993 decl = SSA_NAME_VAR (decl);
994 if (!decl)
995 return false;
996 }
997
998 return (TREE_CODE (decl) == PARM_DECL);
999 }
1000
1001 /* Remap the dependence CLIQUE from the source to the destination function
1002 as specified in ID. */
1003
1004 static unsigned short
1005 remap_dependence_clique (copy_body_data *id, unsigned short clique)
1006 {
1007 if (clique == 0 || processing_debug_stmt)
1008 return 0;
1009 if (!id->dependence_map)
1010 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1011 bool existed;
1012 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1013 if (!existed)
1014 {
1015 /* Clique 1 is reserved for local ones set by PTA. */
1016 if (cfun->last_clique == 0)
1017 cfun->last_clique = 1;
1018 newc = ++cfun->last_clique;
1019 }
1020 return newc;
1021 }
1022
1023 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1024 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1025 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1026 recursing into the children nodes of *TP. */
1027
1028 static tree
1029 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1030 {
1031 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1032 copy_body_data *id = (copy_body_data *) wi_p->info;
1033 tree fn = id->src_fn;
1034
1035 /* For recursive invocations this is no longer the LHS itself. */
1036 bool is_lhs = wi_p->is_lhs;
1037 wi_p->is_lhs = false;
1038
1039 if (TREE_CODE (*tp) == SSA_NAME)
1040 {
1041 *tp = remap_ssa_name (*tp, id);
1042 *walk_subtrees = 0;
1043 if (is_lhs)
1044 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1045 return NULL;
1046 }
1047 else if (auto_var_in_fn_p (*tp, fn))
1048 {
1049 /* Local variables and labels need to be replaced by equivalent
1050 variables. We don't want to copy static variables; there's
1051 only one of those, no matter how many times we inline the
1052 containing function. Similarly for globals from an outer
1053 function. */
1054 tree new_decl;
1055
1056 /* Remap the declaration. */
1057 new_decl = remap_decl (*tp, id);
1058 gcc_assert (new_decl);
1059 /* Replace this variable with the copy. */
1060 STRIP_TYPE_NOPS (new_decl);
1061 /* ??? The C++ frontend uses void * pointer zero to initialize
1062 any other type. This confuses the middle-end type verification.
1063 As cloned bodies do not go through gimplification again the fixup
1064 there doesn't trigger. */
1065 if (TREE_CODE (new_decl) == INTEGER_CST
1066 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1067 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1068 *tp = new_decl;
1069 *walk_subtrees = 0;
1070 }
1071 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1072 gcc_unreachable ();
1073 else if (TREE_CODE (*tp) == SAVE_EXPR)
1074 gcc_unreachable ();
1075 else if (TREE_CODE (*tp) == LABEL_DECL
1076 && (!DECL_CONTEXT (*tp)
1077 || decl_function_context (*tp) == id->src_fn))
1078 /* These may need to be remapped for EH handling. */
1079 *tp = remap_decl (*tp, id);
1080 else if (TREE_CODE (*tp) == FIELD_DECL)
1081 {
1082 /* If the enclosing record type is variably_modified_type_p, the field
1083 has already been remapped. Otherwise, it need not be. */
1084 tree *n = id->decl_map->get (*tp);
1085 if (n)
1086 *tp = *n;
1087 *walk_subtrees = 0;
1088 }
1089 else if (TYPE_P (*tp))
1090 /* Types may need remapping as well. */
1091 *tp = remap_type (*tp, id);
1092 else if (CONSTANT_CLASS_P (*tp))
1093 {
1094 /* If this is a constant, we have to copy the node iff the type
1095 will be remapped. copy_tree_r will not copy a constant. */
1096 tree new_type = remap_type (TREE_TYPE (*tp), id);
1097
1098 if (new_type == TREE_TYPE (*tp))
1099 *walk_subtrees = 0;
1100
1101 else if (TREE_CODE (*tp) == INTEGER_CST)
1102 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1103 else
1104 {
1105 *tp = copy_node (*tp);
1106 TREE_TYPE (*tp) = new_type;
1107 }
1108 }
1109 else
1110 {
1111 /* Otherwise, just copy the node. Note that copy_tree_r already
1112 knows not to copy VAR_DECLs, etc., so this is safe. */
1113
1114 if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1115 {
1116 /* We need to re-canonicalize MEM_REFs from inline substitutions
1117 that can happen when a pointer argument is an ADDR_EXPR.
1118 Recurse here manually to allow that. */
1119 tree ptr = TREE_OPERAND (*tp, 0);
1120 tree type = remap_type (TREE_TYPE (*tp), id);
1121 tree old = *tp;
1122 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1123 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1124 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1125 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1126 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1127 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1128 {
1129 MR_DEPENDENCE_CLIQUE (*tp)
1130 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1131 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1132 }
1133 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1134 remapped a parameter as the property might be valid only
1135 for the parameter itself. */
1136 if (TREE_THIS_NOTRAP (old)
1137 && (!is_parm (TREE_OPERAND (old, 0))
1138 || (!id->transform_parameter && is_parm (ptr))))
1139 TREE_THIS_NOTRAP (*tp) = 1;
1140 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1141 *walk_subtrees = 0;
1142 return NULL;
1143 }
1144
1145 /* Here is the "usual case". Copy this tree node, and then
1146 tweak some special cases. */
1147 copy_tree_r (tp, walk_subtrees, NULL);
1148
1149 if (TREE_CODE (*tp) != OMP_CLAUSE)
1150 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1151
1152 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1153 {
1154 /* The copied TARGET_EXPR has never been expanded, even if the
1155 original node was expanded already. */
1156 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1157 TREE_OPERAND (*tp, 3) = NULL_TREE;
1158 }
1159 else if (TREE_CODE (*tp) == ADDR_EXPR)
1160 {
1161 /* Variable substitution need not be simple. In particular,
1162 the MEM_REF substitution above. Make sure that
1163 TREE_CONSTANT and friends are up-to-date. */
1164 int invariant = is_gimple_min_invariant (*tp);
1165 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1166 recompute_tree_invariant_for_addr_expr (*tp);
1167
1168 /* If this used to be invariant, but is not any longer,
1169 then regimplification is probably needed. */
1170 if (invariant && !is_gimple_min_invariant (*tp))
1171 id->regimplify = true;
1172
1173 *walk_subtrees = 0;
1174 }
1175 }
1176
1177 /* Update the TREE_BLOCK for the cloned expr. */
1178 if (EXPR_P (*tp))
1179 {
1180 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1181 tree old_block = TREE_BLOCK (*tp);
1182 if (old_block)
1183 {
1184 tree *n;
1185 n = id->decl_map->get (TREE_BLOCK (*tp));
1186 if (n)
1187 new_block = *n;
1188 }
1189 TREE_SET_BLOCK (*tp, new_block);
1190 }
1191
1192 /* Keep iterating. */
1193 return NULL_TREE;
1194 }
1195
1196
1197 /* Called from copy_body_id via walk_tree. DATA is really a
1198 `copy_body_data *'. */
1199
1200 tree
1201 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1202 {
1203 copy_body_data *id = (copy_body_data *) data;
1204 tree fn = id->src_fn;
1205 tree new_block;
1206
1207 /* Begin by recognizing trees that we'll completely rewrite for the
1208 inlining context. Our output for these trees is completely
1209 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1210 into an edge). Further down, we'll handle trees that get
1211 duplicated and/or tweaked. */
1212
1213 /* When requested, RETURN_EXPRs should be transformed to just the
1214 contained MODIFY_EXPR. The branch semantics of the return will
1215 be handled elsewhere by manipulating the CFG rather than a statement. */
1216 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1217 {
1218 tree assignment = TREE_OPERAND (*tp, 0);
1219
1220 /* If we're returning something, just turn that into an
1221 assignment into the equivalent of the original RESULT_DECL.
1222 If the "assignment" is just the result decl, the result
1223 decl has already been set (e.g. a recent "foo (&result_decl,
1224 ...)"); just toss the entire RETURN_EXPR. */
1225 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1226 {
1227 /* Replace the RETURN_EXPR with (a copy of) the
1228 MODIFY_EXPR hanging underneath. */
1229 *tp = copy_node (assignment);
1230 }
1231 else /* Else the RETURN_EXPR returns no value. */
1232 {
1233 *tp = NULL;
1234 return (tree) (void *)1;
1235 }
1236 }
1237 else if (TREE_CODE (*tp) == SSA_NAME)
1238 {
1239 *tp = remap_ssa_name (*tp, id);
1240 *walk_subtrees = 0;
1241 return NULL;
1242 }
1243
1244 /* Local variables and labels need to be replaced by equivalent
1245 variables. We don't want to copy static variables; there's only
1246 one of those, no matter how many times we inline the containing
1247 function. Similarly for globals from an outer function. */
1248 else if (auto_var_in_fn_p (*tp, fn))
1249 {
1250 tree new_decl;
1251
1252 /* Remap the declaration. */
1253 new_decl = remap_decl (*tp, id);
1254 gcc_assert (new_decl);
1255 /* Replace this variable with the copy. */
1256 STRIP_TYPE_NOPS (new_decl);
1257 *tp = new_decl;
1258 *walk_subtrees = 0;
1259 }
1260 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1261 copy_statement_list (tp);
1262 else if (TREE_CODE (*tp) == SAVE_EXPR
1263 || TREE_CODE (*tp) == TARGET_EXPR)
1264 remap_save_expr (tp, id->decl_map, walk_subtrees);
1265 else if (TREE_CODE (*tp) == LABEL_DECL
1266 && (! DECL_CONTEXT (*tp)
1267 || decl_function_context (*tp) == id->src_fn))
1268 /* These may need to be remapped for EH handling. */
1269 *tp = remap_decl (*tp, id);
1270 else if (TREE_CODE (*tp) == BIND_EXPR)
1271 copy_bind_expr (tp, walk_subtrees, id);
1272 /* Types may need remapping as well. */
1273 else if (TYPE_P (*tp))
1274 *tp = remap_type (*tp, id);
1275
1276 /* If this is a constant, we have to copy the node iff the type will be
1277 remapped. copy_tree_r will not copy a constant. */
1278 else if (CONSTANT_CLASS_P (*tp))
1279 {
1280 tree new_type = remap_type (TREE_TYPE (*tp), id);
1281
1282 if (new_type == TREE_TYPE (*tp))
1283 *walk_subtrees = 0;
1284
1285 else if (TREE_CODE (*tp) == INTEGER_CST)
1286 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1287 else
1288 {
1289 *tp = copy_node (*tp);
1290 TREE_TYPE (*tp) = new_type;
1291 }
1292 }
1293
1294 /* Otherwise, just copy the node. Note that copy_tree_r already
1295 knows not to copy VAR_DECLs, etc., so this is safe. */
1296 else
1297 {
1298 /* Here we handle trees that are not completely rewritten.
1299 First we detect some inlining-induced bogosities for
1300 discarding. */
1301 if (TREE_CODE (*tp) == MODIFY_EXPR
1302 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1303 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1304 {
1305 /* Some assignments VAR = VAR; don't generate any rtl code
1306 and thus don't count as variable modification. Avoid
1307 keeping bogosities like 0 = 0. */
1308 tree decl = TREE_OPERAND (*tp, 0), value;
1309 tree *n;
1310
1311 n = id->decl_map->get (decl);
1312 if (n)
1313 {
1314 value = *n;
1315 STRIP_TYPE_NOPS (value);
1316 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1317 {
1318 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1319 return copy_tree_body_r (tp, walk_subtrees, data);
1320 }
1321 }
1322 }
1323 else if (TREE_CODE (*tp) == INDIRECT_REF)
1324 {
1325 /* Get rid of *& from inline substitutions that can happen when a
1326 pointer argument is an ADDR_EXPR. */
1327 tree decl = TREE_OPERAND (*tp, 0);
1328 tree *n = id->decl_map->get (decl);
1329 if (n)
1330 {
1331 /* If we happen to get an ADDR_EXPR in n->value, strip
1332 it manually here as we'll eventually get ADDR_EXPRs
1333 which lie about their types pointed to. In this case
1334 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1335 but we absolutely rely on that. As fold_indirect_ref
1336 does other useful transformations, try that first, though. */
1337 tree type = TREE_TYPE (*tp);
1338 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1339 tree old = *tp;
1340 *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1341 if (! *tp)
1342 {
1343 type = remap_type (type, id);
1344 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1345 {
1346 *tp
1347 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1348 /* ??? We should either assert here or build
1349 a VIEW_CONVERT_EXPR instead of blindly leaking
1350 incompatible types to our IL. */
1351 if (! *tp)
1352 *tp = TREE_OPERAND (ptr, 0);
1353 }
1354 else
1355 {
1356 *tp = build1 (INDIRECT_REF, type, ptr);
1357 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1358 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1359 TREE_READONLY (*tp) = TREE_READONLY (old);
1360 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1361 have remapped a parameter as the property might be
1362 valid only for the parameter itself. */
1363 if (TREE_THIS_NOTRAP (old)
1364 && (!is_parm (TREE_OPERAND (old, 0))
1365 || (!id->transform_parameter && is_parm (ptr))))
1366 TREE_THIS_NOTRAP (*tp) = 1;
1367 }
1368 }
1369 *walk_subtrees = 0;
1370 return NULL;
1371 }
1372 }
1373 else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1374 {
1375 /* We need to re-canonicalize MEM_REFs from inline substitutions
1376 that can happen when a pointer argument is an ADDR_EXPR.
1377 Recurse here manually to allow that. */
1378 tree ptr = TREE_OPERAND (*tp, 0);
1379 tree type = remap_type (TREE_TYPE (*tp), id);
1380 tree old = *tp;
1381 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1382 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1383 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1384 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1385 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1386 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1387 {
1388 MR_DEPENDENCE_CLIQUE (*tp)
1389 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1390 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1391 }
1392 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1393 remapped a parameter as the property might be valid only
1394 for the parameter itself. */
1395 if (TREE_THIS_NOTRAP (old)
1396 && (!is_parm (TREE_OPERAND (old, 0))
1397 || (!id->transform_parameter && is_parm (ptr))))
1398 TREE_THIS_NOTRAP (*tp) = 1;
1399 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1400 *walk_subtrees = 0;
1401 return NULL;
1402 }
1403
1404 /* Here is the "usual case". Copy this tree node, and then
1405 tweak some special cases. */
1406 copy_tree_r (tp, walk_subtrees, NULL);
1407
1408 /* If EXPR has block defined, map it to newly constructed block.
1409 When inlining we want EXPRs without block appear in the block
1410 of function call if we are not remapping a type. */
1411 if (EXPR_P (*tp))
1412 {
1413 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1414 if (TREE_BLOCK (*tp))
1415 {
1416 tree *n;
1417 n = id->decl_map->get (TREE_BLOCK (*tp));
1418 if (n)
1419 new_block = *n;
1420 }
1421 TREE_SET_BLOCK (*tp, new_block);
1422 }
1423
1424 if (TREE_CODE (*tp) != OMP_CLAUSE)
1425 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1426
1427 /* The copied TARGET_EXPR has never been expanded, even if the
1428 original node was expanded already. */
1429 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1430 {
1431 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1432 TREE_OPERAND (*tp, 3) = NULL_TREE;
1433 }
1434
1435 /* Variable substitution need not be simple. In particular, the
1436 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1437 and friends are up-to-date. */
1438 else if (TREE_CODE (*tp) == ADDR_EXPR)
1439 {
1440 int invariant = is_gimple_min_invariant (*tp);
1441 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1442
1443 /* Handle the case where we substituted an INDIRECT_REF
1444 into the operand of the ADDR_EXPR. */
1445 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1446 && !id->do_not_fold)
1447 {
1448 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1449 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1450 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1451 *tp = t;
1452 }
1453 else
1454 recompute_tree_invariant_for_addr_expr (*tp);
1455
1456 /* If this used to be invariant, but is not any longer,
1457 then regimplification is probably needed. */
1458 if (invariant && !is_gimple_min_invariant (*tp))
1459 id->regimplify = true;
1460
1461 *walk_subtrees = 0;
1462 }
1463 }
1464
1465 /* Keep iterating. */
1466 return NULL_TREE;
1467 }
1468
1469 /* Helper for remap_gimple_stmt. Given an EH region number for the
1470 source function, map that to the duplicate EH region number in
1471 the destination function. */
1472
1473 static int
1474 remap_eh_region_nr (int old_nr, copy_body_data *id)
1475 {
1476 eh_region old_r, new_r;
1477
1478 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1479 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1480
1481 return new_r->index;
1482 }
1483
1484 /* Similar, but operate on INTEGER_CSTs. */
1485
1486 static tree
1487 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1488 {
1489 int old_nr, new_nr;
1490
1491 old_nr = tree_to_shwi (old_t_nr);
1492 new_nr = remap_eh_region_nr (old_nr, id);
1493
1494 return build_int_cst (integer_type_node, new_nr);
1495 }
1496
1497 /* Helper for copy_bb. Remap statement STMT using the inlining
1498 information in ID. Return the new statement copy. */
1499
1500 static gimple_seq
1501 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1502 {
1503 gimple *copy = NULL;
1504 struct walk_stmt_info wi;
1505 bool skip_first = false;
1506 gimple_seq stmts = NULL;
1507
1508 if (is_gimple_debug (stmt)
1509 && (gimple_debug_nonbind_marker_p (stmt)
1510 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1511 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1512 return NULL;
1513
1514 /* Begin by recognizing trees that we'll completely rewrite for the
1515 inlining context. Our output for these trees is completely
1516 different from our input (e.g. RETURN_EXPR is deleted and morphs
1517 into an edge). Further down, we'll handle trees that get
1518 duplicated and/or tweaked. */
1519
1520 /* When requested, GIMPLE_RETURN should be transformed to just the
1521 contained GIMPLE_ASSIGN. The branch semantics of the return will
1522 be handled elsewhere by manipulating the CFG rather than the
1523 statement. */
1524 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1525 {
1526 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1527
1528 /* If we're returning something, just turn that into an
1529 assignment to the equivalent of the original RESULT_DECL.
1530 If RETVAL is just the result decl, the result decl has
1531 already been set (e.g. a recent "foo (&result_decl, ...)");
1532 just toss the entire GIMPLE_RETURN. */
1533 if (retval
1534 && (TREE_CODE (retval) != RESULT_DECL
1535 && (TREE_CODE (retval) != SSA_NAME
1536 || ! SSA_NAME_VAR (retval)
1537 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1538 {
1539 copy = gimple_build_assign (id->do_not_unshare
1540 ? id->retvar : unshare_expr (id->retvar),
1541 retval);
1542 /* id->retvar is already substituted. Skip it on later remapping. */
1543 skip_first = true;
1544 }
1545 else
1546 return NULL;
1547 }
1548 else if (gimple_has_substatements (stmt))
1549 {
1550 gimple_seq s1, s2;
1551
1552 /* When cloning bodies from the C++ front end, we will be handed bodies
1553 in High GIMPLE form. Handle here all the High GIMPLE statements that
1554 have embedded statements. */
1555 switch (gimple_code (stmt))
1556 {
1557 case GIMPLE_BIND:
1558 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1559 break;
1560
1561 case GIMPLE_CATCH:
1562 {
1563 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1564 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1565 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1566 }
1567 break;
1568
1569 case GIMPLE_EH_FILTER:
1570 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1571 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1572 break;
1573
1574 case GIMPLE_TRY:
1575 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1576 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1577 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1578 break;
1579
1580 case GIMPLE_WITH_CLEANUP_EXPR:
1581 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1582 copy = gimple_build_wce (s1);
1583 break;
1584
1585 case GIMPLE_OMP_PARALLEL:
1586 {
1587 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1588 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1589 copy = gimple_build_omp_parallel
1590 (s1,
1591 gimple_omp_parallel_clauses (omp_par_stmt),
1592 gimple_omp_parallel_child_fn (omp_par_stmt),
1593 gimple_omp_parallel_data_arg (omp_par_stmt));
1594 }
1595 break;
1596
1597 case GIMPLE_OMP_TASK:
1598 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1599 copy = gimple_build_omp_task
1600 (s1,
1601 gimple_omp_task_clauses (stmt),
1602 gimple_omp_task_child_fn (stmt),
1603 gimple_omp_task_data_arg (stmt),
1604 gimple_omp_task_copy_fn (stmt),
1605 gimple_omp_task_arg_size (stmt),
1606 gimple_omp_task_arg_align (stmt));
1607 break;
1608
1609 case GIMPLE_OMP_FOR:
1610 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1611 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1612 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1613 gimple_omp_for_clauses (stmt),
1614 gimple_omp_for_collapse (stmt), s2);
1615 {
1616 size_t i;
1617 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1618 {
1619 gimple_omp_for_set_index (copy, i,
1620 gimple_omp_for_index (stmt, i));
1621 gimple_omp_for_set_initial (copy, i,
1622 gimple_omp_for_initial (stmt, i));
1623 gimple_omp_for_set_final (copy, i,
1624 gimple_omp_for_final (stmt, i));
1625 gimple_omp_for_set_incr (copy, i,
1626 gimple_omp_for_incr (stmt, i));
1627 gimple_omp_for_set_cond (copy, i,
1628 gimple_omp_for_cond (stmt, i));
1629 }
1630 }
1631 break;
1632
1633 case GIMPLE_OMP_MASTER:
1634 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1635 copy = gimple_build_omp_master (s1);
1636 break;
1637
1638 case GIMPLE_OMP_TASKGROUP:
1639 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1640 copy = gimple_build_omp_taskgroup
1641 (s1, gimple_omp_taskgroup_clauses (stmt));
1642 break;
1643
1644 case GIMPLE_OMP_ORDERED:
1645 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1646 copy = gimple_build_omp_ordered
1647 (s1,
1648 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1649 break;
1650
1651 case GIMPLE_OMP_SCAN:
1652 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1653 copy = gimple_build_omp_scan
1654 (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1655 break;
1656
1657 case GIMPLE_OMP_SECTION:
1658 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1659 copy = gimple_build_omp_section (s1);
1660 break;
1661
1662 case GIMPLE_OMP_SECTIONS:
1663 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1664 copy = gimple_build_omp_sections
1665 (s1, gimple_omp_sections_clauses (stmt));
1666 break;
1667
1668 case GIMPLE_OMP_SINGLE:
1669 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1670 copy = gimple_build_omp_single
1671 (s1, gimple_omp_single_clauses (stmt));
1672 break;
1673
1674 case GIMPLE_OMP_TARGET:
1675 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1676 copy = gimple_build_omp_target
1677 (s1, gimple_omp_target_kind (stmt),
1678 gimple_omp_target_clauses (stmt));
1679 break;
1680
1681 case GIMPLE_OMP_TEAMS:
1682 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1683 copy = gimple_build_omp_teams
1684 (s1, gimple_omp_teams_clauses (stmt));
1685 break;
1686
1687 case GIMPLE_OMP_CRITICAL:
1688 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1689 copy = gimple_build_omp_critical (s1,
1690 gimple_omp_critical_name
1691 (as_a <gomp_critical *> (stmt)),
1692 gimple_omp_critical_clauses
1693 (as_a <gomp_critical *> (stmt)));
1694 break;
1695
1696 case GIMPLE_TRANSACTION:
1697 {
1698 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1699 gtransaction *new_trans_stmt;
1700 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1701 id);
1702 copy = new_trans_stmt = gimple_build_transaction (s1);
1703 gimple_transaction_set_subcode (new_trans_stmt,
1704 gimple_transaction_subcode (old_trans_stmt));
1705 gimple_transaction_set_label_norm (new_trans_stmt,
1706 gimple_transaction_label_norm (old_trans_stmt));
1707 gimple_transaction_set_label_uninst (new_trans_stmt,
1708 gimple_transaction_label_uninst (old_trans_stmt));
1709 gimple_transaction_set_label_over (new_trans_stmt,
1710 gimple_transaction_label_over (old_trans_stmt));
1711 }
1712 break;
1713
1714 default:
1715 gcc_unreachable ();
1716 }
1717 }
1718 else
1719 {
1720 if (gimple_assign_copy_p (stmt)
1721 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1722 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1723 {
1724 /* Here we handle statements that are not completely rewritten.
1725 First we detect some inlining-induced bogosities for
1726 discarding. */
1727
1728 /* Some assignments VAR = VAR; don't generate any rtl code
1729 and thus don't count as variable modification. Avoid
1730 keeping bogosities like 0 = 0. */
1731 tree decl = gimple_assign_lhs (stmt), value;
1732 tree *n;
1733
1734 n = id->decl_map->get (decl);
1735 if (n)
1736 {
1737 value = *n;
1738 STRIP_TYPE_NOPS (value);
1739 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1740 return NULL;
1741 }
1742 }
1743
1744 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1745 in a block that we aren't copying during tree_function_versioning,
1746 just drop the clobber stmt. */
1747 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1748 {
1749 tree lhs = gimple_assign_lhs (stmt);
1750 if (TREE_CODE (lhs) == MEM_REF
1751 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1752 {
1753 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1754 if (gimple_bb (def_stmt)
1755 && !bitmap_bit_p (id->blocks_to_copy,
1756 gimple_bb (def_stmt)->index))
1757 return NULL;
1758 }
1759 }
1760
1761 /* We do not allow CLOBBERs of handled components. In case
1762 returned value is stored via such handled component, remove
1763 the clobber so stmt verifier is happy. */
1764 if (gimple_clobber_p (stmt)
1765 && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1766 {
1767 tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1768 if (!DECL_P (remapped)
1769 && TREE_CODE (remapped) != MEM_REF)
1770 return NULL;
1771 }
1772
1773 if (gimple_debug_bind_p (stmt))
1774 {
1775 gdebug *copy
1776 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1777 gimple_debug_bind_get_value (stmt),
1778 stmt);
1779 if (id->reset_location)
1780 gimple_set_location (copy, input_location);
1781 id->debug_stmts.safe_push (copy);
1782 gimple_seq_add_stmt (&stmts, copy);
1783 return stmts;
1784 }
1785 if (gimple_debug_source_bind_p (stmt))
1786 {
1787 gdebug *copy = gimple_build_debug_source_bind
1788 (gimple_debug_source_bind_get_var (stmt),
1789 gimple_debug_source_bind_get_value (stmt),
1790 stmt);
1791 if (id->reset_location)
1792 gimple_set_location (copy, input_location);
1793 id->debug_stmts.safe_push (copy);
1794 gimple_seq_add_stmt (&stmts, copy);
1795 return stmts;
1796 }
1797 if (gimple_debug_nonbind_marker_p (stmt))
1798 {
1799 /* If the inlined function has too many debug markers,
1800 don't copy them. */
1801 if (id->src_cfun->debug_marker_count
1802 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1803 return stmts;
1804
1805 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1806 if (id->reset_location)
1807 gimple_set_location (copy, input_location);
1808 id->debug_stmts.safe_push (copy);
1809 gimple_seq_add_stmt (&stmts, copy);
1810 return stmts;
1811 }
1812
1813 /* Create a new deep copy of the statement. */
1814 copy = gimple_copy (stmt);
1815
1816 /* Clear flags that need revisiting. */
1817 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1818 {
1819 if (gimple_call_tail_p (call_stmt))
1820 gimple_call_set_tail (call_stmt, false);
1821 if (gimple_call_from_thunk_p (call_stmt))
1822 gimple_call_set_from_thunk (call_stmt, false);
1823 if (gimple_call_internal_p (call_stmt))
1824 switch (gimple_call_internal_fn (call_stmt))
1825 {
1826 case IFN_GOMP_SIMD_LANE:
1827 case IFN_GOMP_SIMD_VF:
1828 case IFN_GOMP_SIMD_LAST_LANE:
1829 case IFN_GOMP_SIMD_ORDERED_START:
1830 case IFN_GOMP_SIMD_ORDERED_END:
1831 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1832 break;
1833 default:
1834 break;
1835 }
1836 }
1837
1838 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1839 RESX and EH_DISPATCH. */
1840 if (id->eh_map)
1841 switch (gimple_code (copy))
1842 {
1843 case GIMPLE_CALL:
1844 {
1845 tree r, fndecl = gimple_call_fndecl (copy);
1846 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1847 switch (DECL_FUNCTION_CODE (fndecl))
1848 {
1849 case BUILT_IN_EH_COPY_VALUES:
1850 r = gimple_call_arg (copy, 1);
1851 r = remap_eh_region_tree_nr (r, id);
1852 gimple_call_set_arg (copy, 1, r);
1853 /* FALLTHRU */
1854
1855 case BUILT_IN_EH_POINTER:
1856 case BUILT_IN_EH_FILTER:
1857 r = gimple_call_arg (copy, 0);
1858 r = remap_eh_region_tree_nr (r, id);
1859 gimple_call_set_arg (copy, 0, r);
1860 break;
1861
1862 default:
1863 break;
1864 }
1865
1866 /* Reset alias info if we didn't apply measures to
1867 keep it valid over inlining by setting DECL_PT_UID. */
1868 if (!id->src_cfun->gimple_df
1869 || !id->src_cfun->gimple_df->ipa_pta)
1870 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1871 }
1872 break;
1873
1874 case GIMPLE_RESX:
1875 {
1876 gresx *resx_stmt = as_a <gresx *> (copy);
1877 int r = gimple_resx_region (resx_stmt);
1878 r = remap_eh_region_nr (r, id);
1879 gimple_resx_set_region (resx_stmt, r);
1880 }
1881 break;
1882
1883 case GIMPLE_EH_DISPATCH:
1884 {
1885 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1886 int r = gimple_eh_dispatch_region (eh_dispatch);
1887 r = remap_eh_region_nr (r, id);
1888 gimple_eh_dispatch_set_region (eh_dispatch, r);
1889 }
1890 break;
1891
1892 default:
1893 break;
1894 }
1895 }
1896
1897 /* If STMT has a block defined, map it to the newly constructed block. */
1898 if (tree block = gimple_block (copy))
1899 {
1900 tree *n;
1901 n = id->decl_map->get (block);
1902 gcc_assert (n);
1903 gimple_set_block (copy, *n);
1904 }
1905
1906 if (id->reset_location)
1907 gimple_set_location (copy, input_location);
1908
1909 /* Debug statements ought to be rebuilt and not copied. */
1910 gcc_checking_assert (!is_gimple_debug (copy));
1911
1912 /* Remap all the operands in COPY. */
1913 memset (&wi, 0, sizeof (wi));
1914 wi.info = id;
1915 if (skip_first)
1916 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1917 else
1918 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1919
1920 /* Clear the copied virtual operands. We are not remapping them here
1921 but are going to recreate them from scratch. */
1922 if (gimple_has_mem_ops (copy))
1923 {
1924 gimple_set_vdef (copy, NULL_TREE);
1925 gimple_set_vuse (copy, NULL_TREE);
1926 }
1927
1928 gimple_seq_add_stmt (&stmts, copy);
1929 return stmts;
1930 }
1931
1932
1933 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1934 later */
1935
1936 static basic_block
1937 copy_bb (copy_body_data *id, basic_block bb,
1938 profile_count num, profile_count den)
1939 {
1940 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1941 basic_block copy_basic_block;
1942 tree decl;
1943 basic_block prev;
1944
1945 profile_count::adjust_for_ipa_scaling (&num, &den);
1946
1947 /* Search for previous copied basic block. */
1948 prev = bb->prev_bb;
1949 while (!prev->aux)
1950 prev = prev->prev_bb;
1951
1952 /* create_basic_block() will append every new block to
1953 basic_block_info automatically. */
1954 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1955 copy_basic_block->count = bb->count.apply_scale (num, den);
1956
1957 copy_gsi = gsi_start_bb (copy_basic_block);
1958
1959 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1960 {
1961 gimple_seq stmts;
1962 gimple *stmt = gsi_stmt (gsi);
1963 gimple *orig_stmt = stmt;
1964 gimple_stmt_iterator stmts_gsi;
1965 bool stmt_added = false;
1966
1967 id->regimplify = false;
1968 stmts = remap_gimple_stmt (stmt, id);
1969
1970 if (gimple_seq_empty_p (stmts))
1971 continue;
1972
1973 seq_gsi = copy_gsi;
1974
1975 for (stmts_gsi = gsi_start (stmts);
1976 !gsi_end_p (stmts_gsi); )
1977 {
1978 stmt = gsi_stmt (stmts_gsi);
1979
1980 /* Advance iterator now before stmt is moved to seq_gsi. */
1981 gsi_next (&stmts_gsi);
1982
1983 if (gimple_nop_p (stmt))
1984 continue;
1985
1986 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1987 orig_stmt);
1988
1989 /* With return slot optimization we can end up with
1990 non-gimple (foo *)&this->m, fix that here. */
1991 if (is_gimple_assign (stmt)
1992 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1993 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1994 {
1995 tree new_rhs;
1996 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1997 gimple_assign_rhs1 (stmt),
1998 true, NULL, false,
1999 GSI_CONTINUE_LINKING);
2000 gimple_assign_set_rhs1 (stmt, new_rhs);
2001 id->regimplify = false;
2002 }
2003
2004 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2005
2006 if (id->regimplify)
2007 gimple_regimplify_operands (stmt, &seq_gsi);
2008
2009 stmt_added = true;
2010 }
2011
2012 if (!stmt_added)
2013 continue;
2014
2015 /* If copy_basic_block has been empty at the start of this iteration,
2016 call gsi_start_bb again to get at the newly added statements. */
2017 if (gsi_end_p (copy_gsi))
2018 copy_gsi = gsi_start_bb (copy_basic_block);
2019 else
2020 gsi_next (&copy_gsi);
2021
2022 /* Process the new statement. The call to gimple_regimplify_operands
2023 possibly turned the statement into multiple statements, we
2024 need to process all of them. */
2025 do
2026 {
2027 tree fn;
2028 gcall *call_stmt;
2029
2030 stmt = gsi_stmt (copy_gsi);
2031 call_stmt = dyn_cast <gcall *> (stmt);
2032 if (call_stmt
2033 && gimple_call_va_arg_pack_p (call_stmt)
2034 && id->call_stmt
2035 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2036 {
2037 /* __builtin_va_arg_pack () should be replaced by
2038 all arguments corresponding to ... in the caller. */
2039 tree p;
2040 gcall *new_call;
2041 vec<tree> argarray;
2042 size_t nargs = gimple_call_num_args (id->call_stmt);
2043 size_t n;
2044
2045 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2046 nargs--;
2047
2048 /* Create the new array of arguments. */
2049 n = nargs + gimple_call_num_args (call_stmt);
2050 argarray.create (n);
2051 argarray.safe_grow_cleared (n);
2052
2053 /* Copy all the arguments before '...' */
2054 memcpy (argarray.address (),
2055 gimple_call_arg_ptr (call_stmt, 0),
2056 gimple_call_num_args (call_stmt) * sizeof (tree));
2057
2058 /* Append the arguments passed in '...' */
2059 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2060 gimple_call_arg_ptr (id->call_stmt, 0)
2061 + (gimple_call_num_args (id->call_stmt) - nargs),
2062 nargs * sizeof (tree));
2063
2064 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2065 argarray);
2066
2067 argarray.release ();
2068
2069 /* Copy all GIMPLE_CALL flags, location and block, except
2070 GF_CALL_VA_ARG_PACK. */
2071 gimple_call_copy_flags (new_call, call_stmt);
2072 gimple_call_set_va_arg_pack (new_call, false);
2073 /* location includes block. */
2074 gimple_set_location (new_call, gimple_location (stmt));
2075 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2076
2077 gsi_replace (&copy_gsi, new_call, false);
2078 stmt = new_call;
2079 }
2080 else if (call_stmt
2081 && id->call_stmt
2082 && (decl = gimple_call_fndecl (stmt))
2083 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2084 {
2085 /* __builtin_va_arg_pack_len () should be replaced by
2086 the number of anonymous arguments. */
2087 size_t nargs = gimple_call_num_args (id->call_stmt);
2088 tree count, p;
2089 gimple *new_stmt;
2090
2091 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2092 nargs--;
2093
2094 if (!gimple_call_lhs (stmt))
2095 {
2096 /* Drop unused calls. */
2097 gsi_remove (&copy_gsi, false);
2098 continue;
2099 }
2100 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2101 {
2102 count = build_int_cst (integer_type_node, nargs);
2103 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2104 gsi_replace (&copy_gsi, new_stmt, false);
2105 stmt = new_stmt;
2106 }
2107 else if (nargs != 0)
2108 {
2109 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2110 count = build_int_cst (integer_type_node, nargs);
2111 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2112 PLUS_EXPR, newlhs, count);
2113 gimple_call_set_lhs (stmt, newlhs);
2114 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2115 }
2116 }
2117 else if (call_stmt
2118 && id->call_stmt
2119 && gimple_call_internal_p (stmt)
2120 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2121 {
2122 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2123 gsi_remove (&copy_gsi, false);
2124 continue;
2125 }
2126
2127 /* Statements produced by inlining can be unfolded, especially
2128 when we constant propagated some operands. We can't fold
2129 them right now for two reasons:
2130 1) folding require SSA_NAME_DEF_STMTs to be correct
2131 2) we can't change function calls to builtins.
2132 So we just mark statement for later folding. We mark
2133 all new statements, instead just statements that has changed
2134 by some nontrivial substitution so even statements made
2135 foldable indirectly are updated. If this turns out to be
2136 expensive, copy_body can be told to watch for nontrivial
2137 changes. */
2138 if (id->statements_to_fold)
2139 id->statements_to_fold->add (stmt);
2140
2141 /* We're duplicating a CALL_EXPR. Find any corresponding
2142 callgraph edges and update or duplicate them. */
2143 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2144 {
2145 struct cgraph_edge *edge;
2146
2147 switch (id->transform_call_graph_edges)
2148 {
2149 case CB_CGE_DUPLICATE:
2150 edge = id->src_node->get_edge (orig_stmt);
2151 if (edge)
2152 {
2153 struct cgraph_edge *old_edge = edge;
2154 profile_count old_cnt = edge->count;
2155 edge = edge->clone (id->dst_node, call_stmt,
2156 gimple_uid (stmt),
2157 num, den,
2158 true);
2159
2160 /* Speculative calls consist of two edges - direct and
2161 indirect. Duplicate the whole thing and distribute
2162 frequencies accordingly. */
2163 if (edge->speculative)
2164 {
2165 struct cgraph_edge *direct, *indirect;
2166 struct ipa_ref *ref;
2167
2168 gcc_assert (!edge->indirect_unknown_callee);
2169 old_edge->speculative_call_info (direct, indirect, ref);
2170
2171 profile_count indir_cnt = indirect->count;
2172 indirect = indirect->clone (id->dst_node, call_stmt,
2173 gimple_uid (stmt),
2174 num, den,
2175 true);
2176
2177 profile_probability prob
2178 = indir_cnt.probability_in (old_cnt + indir_cnt);
2179 indirect->count
2180 = copy_basic_block->count.apply_probability (prob);
2181 edge->count = copy_basic_block->count - indirect->count;
2182 id->dst_node->clone_reference (ref, stmt);
2183 }
2184 else
2185 edge->count = copy_basic_block->count;
2186 }
2187 break;
2188
2189 case CB_CGE_MOVE_CLONES:
2190 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2191 call_stmt);
2192 edge = id->dst_node->get_edge (stmt);
2193 break;
2194
2195 case CB_CGE_MOVE:
2196 edge = id->dst_node->get_edge (orig_stmt);
2197 if (edge)
2198 edge->set_call_stmt (call_stmt);
2199 break;
2200
2201 default:
2202 gcc_unreachable ();
2203 }
2204
2205 /* Constant propagation on argument done during inlining
2206 may create new direct call. Produce an edge for it. */
2207 if ((!edge
2208 || (edge->indirect_inlining_edge
2209 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2210 && id->dst_node->definition
2211 && (fn = gimple_call_fndecl (stmt)) != NULL)
2212 {
2213 struct cgraph_node *dest = cgraph_node::get_create (fn);
2214
2215 /* We have missing edge in the callgraph. This can happen
2216 when previous inlining turned an indirect call into a
2217 direct call by constant propagating arguments or we are
2218 producing dead clone (for further cloning). In all
2219 other cases we hit a bug (incorrect node sharing is the
2220 most common reason for missing edges). */
2221 gcc_assert (!dest->definition
2222 || dest->address_taken
2223 || !id->src_node->definition
2224 || !id->dst_node->definition);
2225 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2226 id->dst_node->create_edge_including_clones
2227 (dest, orig_stmt, call_stmt, bb->count,
2228 CIF_ORIGINALLY_INDIRECT_CALL);
2229 else
2230 id->dst_node->create_edge (dest, call_stmt,
2231 bb->count)->inline_failed
2232 = CIF_ORIGINALLY_INDIRECT_CALL;
2233 if (dump_file)
2234 {
2235 fprintf (dump_file, "Created new direct edge to %s\n",
2236 dest->name ());
2237 }
2238 }
2239
2240 notice_special_calls (as_a <gcall *> (stmt));
2241 }
2242
2243 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2244 id->eh_map, id->eh_lp_nr);
2245
2246 gsi_next (&copy_gsi);
2247 }
2248 while (!gsi_end_p (copy_gsi));
2249
2250 copy_gsi = gsi_last_bb (copy_basic_block);
2251 }
2252
2253 return copy_basic_block;
2254 }
2255
2256 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2257 form is quite easy, since dominator relationship for old basic blocks does
2258 not change.
2259
2260 There is however exception where inlining might change dominator relation
2261 across EH edges from basic block within inlined functions destinating
2262 to landing pads in function we inline into.
2263
2264 The function fills in PHI_RESULTs of such PHI nodes if they refer
2265 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2266 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2267 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2268 set, and this means that there will be no overlapping live ranges
2269 for the underlying symbol.
2270
2271 This might change in future if we allow redirecting of EH edges and
2272 we might want to change way build CFG pre-inlining to include
2273 all the possible edges then. */
2274 static void
2275 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2276 bool can_throw, bool nonlocal_goto)
2277 {
2278 edge e;
2279 edge_iterator ei;
2280
2281 FOR_EACH_EDGE (e, ei, bb->succs)
2282 if (!e->dest->aux
2283 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2284 {
2285 gphi *phi;
2286 gphi_iterator si;
2287
2288 if (!nonlocal_goto)
2289 gcc_assert (e->flags & EDGE_EH);
2290
2291 if (!can_throw)
2292 gcc_assert (!(e->flags & EDGE_EH));
2293
2294 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2295 {
2296 edge re;
2297
2298 phi = si.phi ();
2299
2300 /* For abnormal goto/call edges the receiver can be the
2301 ENTRY_BLOCK. Do not assert this cannot happen. */
2302
2303 gcc_assert ((e->flags & EDGE_EH)
2304 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2305
2306 re = find_edge (ret_bb, e->dest);
2307 gcc_checking_assert (re);
2308 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2309 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2310
2311 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2312 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2313 }
2314 }
2315 }
2316
2317 /* Insert clobbers for automatic variables of inlined ID->src_fn
2318 function at the start of basic block ID->eh_landing_pad_dest. */
2319
2320 static void
2321 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2322 {
2323 tree var;
2324 basic_block bb = id->eh_landing_pad_dest;
2325 live_vars_map *vars = NULL;
2326 unsigned int cnt = 0;
2327 unsigned int i;
2328 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2329 if (VAR_P (var)
2330 && !DECL_HARD_REGISTER (var)
2331 && !TREE_THIS_VOLATILE (var)
2332 && !DECL_HAS_VALUE_EXPR_P (var)
2333 && !is_gimple_reg (var)
2334 && auto_var_in_fn_p (var, id->src_fn)
2335 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2336 {
2337 tree *t = id->decl_map->get (var);
2338 if (!t)
2339 continue;
2340 tree new_var = *t;
2341 if (VAR_P (new_var)
2342 && !DECL_HARD_REGISTER (new_var)
2343 && !TREE_THIS_VOLATILE (new_var)
2344 && !DECL_HAS_VALUE_EXPR_P (new_var)
2345 && !is_gimple_reg (new_var)
2346 && auto_var_in_fn_p (new_var, id->dst_fn))
2347 {
2348 if (vars == NULL)
2349 vars = new live_vars_map;
2350 vars->put (DECL_UID (var), cnt++);
2351 }
2352 }
2353 if (vars == NULL)
2354 return;
2355
2356 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2357 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2358 if (VAR_P (var))
2359 {
2360 edge e;
2361 edge_iterator ei;
2362 bool needed = false;
2363 unsigned int *v = vars->get (DECL_UID (var));
2364 if (v == NULL)
2365 continue;
2366 FOR_EACH_EDGE (e, ei, bb->preds)
2367 if ((e->flags & EDGE_EH) != 0
2368 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2369 {
2370 basic_block src_bb = (basic_block) e->src->aux;
2371
2372 if (bitmap_bit_p (&live[src_bb->index], *v))
2373 {
2374 needed = true;
2375 break;
2376 }
2377 }
2378 if (needed)
2379 {
2380 tree new_var = *id->decl_map->get (var);
2381 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2382 tree clobber = build_clobber (TREE_TYPE (new_var));
2383 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2384 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2385 }
2386 }
2387 destroy_live_vars (live);
2388 delete vars;
2389 }
2390
2391 /* Copy edges from BB into its copy constructed earlier, scale profile
2392 accordingly. Edges will be taken care of later. Assume aux
2393 pointers to point to the copies of each BB. Return true if any
2394 debug stmts are left after a statement that must end the basic block. */
2395
2396 static bool
2397 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2398 basic_block ret_bb, basic_block abnormal_goto_dest,
2399 copy_body_data *id)
2400 {
2401 basic_block new_bb = (basic_block) bb->aux;
2402 edge_iterator ei;
2403 edge old_edge;
2404 gimple_stmt_iterator si;
2405 bool need_debug_cleanup = false;
2406
2407 /* Use the indices from the original blocks to create edges for the
2408 new ones. */
2409 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2410 if (!(old_edge->flags & EDGE_EH))
2411 {
2412 edge new_edge;
2413 int flags = old_edge->flags;
2414 location_t locus = old_edge->goto_locus;
2415
2416 /* Return edges do get a FALLTHRU flag when they get inlined. */
2417 if (old_edge->dest->index == EXIT_BLOCK
2418 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2419 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2420 flags |= EDGE_FALLTHRU;
2421
2422 new_edge
2423 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2424 new_edge->probability = old_edge->probability;
2425 if (!id->reset_location)
2426 new_edge->goto_locus = remap_location (locus, id);
2427 }
2428
2429 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2430 return false;
2431
2432 /* When doing function splitting, we must decrease count of the return block
2433 which was previously reachable by block we did not copy. */
2434 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2435 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2436 if (old_edge->src->index != ENTRY_BLOCK
2437 && !old_edge->src->aux)
2438 new_bb->count -= old_edge->count ().apply_scale (num, den);
2439
2440 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2441 {
2442 gimple *copy_stmt;
2443 bool can_throw, nonlocal_goto;
2444
2445 copy_stmt = gsi_stmt (si);
2446 if (!is_gimple_debug (copy_stmt))
2447 update_stmt (copy_stmt);
2448
2449 /* Do this before the possible split_block. */
2450 gsi_next (&si);
2451
2452 /* If this tree could throw an exception, there are two
2453 cases where we need to add abnormal edge(s): the
2454 tree wasn't in a region and there is a "current
2455 region" in the caller; or the original tree had
2456 EH edges. In both cases split the block after the tree,
2457 and add abnormal edge(s) as needed; we need both
2458 those from the callee and the caller.
2459 We check whether the copy can throw, because the const
2460 propagation can change an INDIRECT_REF which throws
2461 into a COMPONENT_REF which doesn't. If the copy
2462 can throw, the original could also throw. */
2463 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2464 nonlocal_goto
2465 = (stmt_can_make_abnormal_goto (copy_stmt)
2466 && !computed_goto_p (copy_stmt));
2467
2468 if (can_throw || nonlocal_goto)
2469 {
2470 if (!gsi_end_p (si))
2471 {
2472 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2473 gsi_next (&si);
2474 if (gsi_end_p (si))
2475 need_debug_cleanup = true;
2476 }
2477 if (!gsi_end_p (si))
2478 /* Note that bb's predecessor edges aren't necessarily
2479 right at this point; split_block doesn't care. */
2480 {
2481 edge e = split_block (new_bb, copy_stmt);
2482
2483 new_bb = e->dest;
2484 new_bb->aux = e->src->aux;
2485 si = gsi_start_bb (new_bb);
2486 }
2487 }
2488
2489 bool update_probs = false;
2490
2491 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2492 {
2493 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2494 update_probs = true;
2495 }
2496 else if (can_throw)
2497 {
2498 make_eh_edges (copy_stmt);
2499 update_probs = true;
2500 }
2501
2502 /* EH edges may not match old edges. Copy as much as possible. */
2503 if (update_probs)
2504 {
2505 edge e;
2506 edge_iterator ei;
2507 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2508
2509 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2510 if ((old_edge->flags & EDGE_EH)
2511 && (e = find_edge (copy_stmt_bb,
2512 (basic_block) old_edge->dest->aux))
2513 && (e->flags & EDGE_EH))
2514 e->probability = old_edge->probability;
2515
2516 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2517 if (e->flags & EDGE_EH)
2518 {
2519 if (!e->probability.initialized_p ())
2520 e->probability = profile_probability::never ();
2521 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2522 {
2523 if (id->eh_landing_pad_dest == NULL)
2524 id->eh_landing_pad_dest = e->dest;
2525 else
2526 gcc_assert (id->eh_landing_pad_dest == e->dest);
2527 }
2528 }
2529 }
2530
2531
2532 /* If the call we inline cannot make abnormal goto do not add
2533 additional abnormal edges but only retain those already present
2534 in the original function body. */
2535 if (abnormal_goto_dest == NULL)
2536 nonlocal_goto = false;
2537 if (nonlocal_goto)
2538 {
2539 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2540
2541 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2542 nonlocal_goto = false;
2543 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2544 in OpenMP regions which aren't allowed to be left abnormally.
2545 So, no need to add abnormal edge in that case. */
2546 else if (is_gimple_call (copy_stmt)
2547 && gimple_call_internal_p (copy_stmt)
2548 && (gimple_call_internal_fn (copy_stmt)
2549 == IFN_ABNORMAL_DISPATCHER)
2550 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2551 nonlocal_goto = false;
2552 else
2553 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2554 EDGE_ABNORMAL);
2555 }
2556
2557 if ((can_throw || nonlocal_goto)
2558 && gimple_in_ssa_p (cfun))
2559 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2560 can_throw, nonlocal_goto);
2561 }
2562 return need_debug_cleanup;
2563 }
2564
2565 /* Copy the PHIs. All blocks and edges are copied, some blocks
2566 was possibly split and new outgoing EH edges inserted.
2567 BB points to the block of original function and AUX pointers links
2568 the original and newly copied blocks. */
2569
2570 static void
2571 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2572 {
2573 basic_block const new_bb = (basic_block) bb->aux;
2574 edge_iterator ei;
2575 gphi *phi;
2576 gphi_iterator si;
2577 edge new_edge;
2578 bool inserted = false;
2579
2580 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2581 {
2582 tree res, new_res;
2583 gphi *new_phi;
2584
2585 phi = si.phi ();
2586 res = PHI_RESULT (phi);
2587 new_res = res;
2588 if (!virtual_operand_p (res))
2589 {
2590 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2591 if (EDGE_COUNT (new_bb->preds) == 0)
2592 {
2593 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2594 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2595 }
2596 else
2597 {
2598 new_phi = create_phi_node (new_res, new_bb);
2599 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2600 {
2601 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2602 bb);
2603 tree arg;
2604 tree new_arg;
2605 edge_iterator ei2;
2606 location_t locus;
2607
2608 /* When doing partial cloning, we allow PHIs on the entry
2609 block as long as all the arguments are the same.
2610 Find any input edge to see argument to copy. */
2611 if (!old_edge)
2612 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2613 if (!old_edge->src->aux)
2614 break;
2615
2616 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2617 new_arg = arg;
2618 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2619 gcc_assert (new_arg);
2620 /* With return slot optimization we can end up with
2621 non-gimple (foo *)&this->m, fix that here. */
2622 if (TREE_CODE (new_arg) != SSA_NAME
2623 && TREE_CODE (new_arg) != FUNCTION_DECL
2624 && !is_gimple_val (new_arg))
2625 {
2626 gimple_seq stmts = NULL;
2627 new_arg = force_gimple_operand (new_arg, &stmts, true,
2628 NULL);
2629 gsi_insert_seq_on_edge (new_edge, stmts);
2630 inserted = true;
2631 }
2632 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2633 if (id->reset_location)
2634 locus = input_location;
2635 else
2636 locus = remap_location (locus, id);
2637 add_phi_arg (new_phi, new_arg, new_edge, locus);
2638 }
2639 }
2640 }
2641 }
2642
2643 /* Commit the delayed edge insertions. */
2644 if (inserted)
2645 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2646 gsi_commit_one_edge_insert (new_edge, NULL);
2647 }
2648
2649
2650 /* Wrapper for remap_decl so it can be used as a callback. */
2651
2652 static tree
2653 remap_decl_1 (tree decl, void *data)
2654 {
2655 return remap_decl (decl, (copy_body_data *) data);
2656 }
2657
2658 /* Build struct function and associated datastructures for the new clone
2659 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2660 the cfun to the function of new_fndecl (and current_function_decl too). */
2661
2662 static void
2663 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2664 {
2665 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2666
2667 if (!DECL_ARGUMENTS (new_fndecl))
2668 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2669 if (!DECL_RESULT (new_fndecl))
2670 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2671
2672 /* Register specific tree functions. */
2673 gimple_register_cfg_hooks ();
2674
2675 /* Get clean struct function. */
2676 push_struct_function (new_fndecl);
2677
2678 /* We will rebuild these, so just sanity check that they are empty. */
2679 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2680 gcc_assert (cfun->local_decls == NULL);
2681 gcc_assert (cfun->cfg == NULL);
2682 gcc_assert (cfun->decl == new_fndecl);
2683
2684 /* Copy items we preserve during cloning. */
2685 cfun->static_chain_decl = src_cfun->static_chain_decl;
2686 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2687 cfun->function_end_locus = src_cfun->function_end_locus;
2688 cfun->curr_properties = src_cfun->curr_properties;
2689 cfun->last_verified = src_cfun->last_verified;
2690 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2691 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2692 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2693 cfun->calls_eh_return = src_cfun->calls_eh_return;
2694 cfun->stdarg = src_cfun->stdarg;
2695 cfun->after_inlining = src_cfun->after_inlining;
2696 cfun->can_throw_non_call_exceptions
2697 = src_cfun->can_throw_non_call_exceptions;
2698 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2699 cfun->returns_struct = src_cfun->returns_struct;
2700 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2701
2702 init_empty_tree_cfg ();
2703
2704 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2705
2706 profile_count num = count;
2707 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2708 profile_count::adjust_for_ipa_scaling (&num, &den);
2709
2710 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2711 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2712 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2713 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2714 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2715 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2716 if (src_cfun->eh)
2717 init_eh_for_function ();
2718
2719 if (src_cfun->gimple_df)
2720 {
2721 init_tree_ssa (cfun);
2722 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2723 if (cfun->gimple_df->in_ssa_p)
2724 init_ssa_operands (cfun);
2725 }
2726 }
2727
2728 /* Helper function for copy_cfg_body. Move debug stmts from the end
2729 of NEW_BB to the beginning of successor basic blocks when needed. If the
2730 successor has multiple predecessors, reset them, otherwise keep
2731 their value. */
2732
2733 static void
2734 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2735 {
2736 edge e;
2737 edge_iterator ei;
2738 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2739
2740 if (gsi_end_p (si)
2741 || gsi_one_before_end_p (si)
2742 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2743 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2744 return;
2745
2746 FOR_EACH_EDGE (e, ei, new_bb->succs)
2747 {
2748 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2749 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2750 while (is_gimple_debug (gsi_stmt (ssi)))
2751 {
2752 gimple *stmt = gsi_stmt (ssi);
2753 gdebug *new_stmt;
2754 tree var;
2755 tree value;
2756
2757 /* For the last edge move the debug stmts instead of copying
2758 them. */
2759 if (ei_one_before_end_p (ei))
2760 {
2761 si = ssi;
2762 gsi_prev (&ssi);
2763 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2764 {
2765 gimple_debug_bind_reset_value (stmt);
2766 gimple_set_location (stmt, UNKNOWN_LOCATION);
2767 }
2768 gsi_remove (&si, false);
2769 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2770 continue;
2771 }
2772
2773 if (gimple_debug_bind_p (stmt))
2774 {
2775 var = gimple_debug_bind_get_var (stmt);
2776 if (single_pred_p (e->dest))
2777 {
2778 value = gimple_debug_bind_get_value (stmt);
2779 value = unshare_expr (value);
2780 new_stmt = gimple_build_debug_bind (var, value, stmt);
2781 }
2782 else
2783 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2784 }
2785 else if (gimple_debug_source_bind_p (stmt))
2786 {
2787 var = gimple_debug_source_bind_get_var (stmt);
2788 value = gimple_debug_source_bind_get_value (stmt);
2789 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2790 }
2791 else if (gimple_debug_nonbind_marker_p (stmt))
2792 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2793 else
2794 gcc_unreachable ();
2795 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2796 id->debug_stmts.safe_push (new_stmt);
2797 gsi_prev (&ssi);
2798 }
2799 }
2800 }
2801
2802 /* Make a copy of the sub-loops of SRC_PARENT and place them
2803 as siblings of DEST_PARENT. */
2804
2805 static void
2806 copy_loops (copy_body_data *id,
2807 class loop *dest_parent, class loop *src_parent)
2808 {
2809 class loop *src_loop = src_parent->inner;
2810 while (src_loop)
2811 {
2812 if (!id->blocks_to_copy
2813 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2814 {
2815 class loop *dest_loop = alloc_loop ();
2816
2817 /* Assign the new loop its header and latch and associate
2818 those with the new loop. */
2819 dest_loop->header = (basic_block)src_loop->header->aux;
2820 dest_loop->header->loop_father = dest_loop;
2821 if (src_loop->latch != NULL)
2822 {
2823 dest_loop->latch = (basic_block)src_loop->latch->aux;
2824 dest_loop->latch->loop_father = dest_loop;
2825 }
2826
2827 /* Copy loop meta-data. */
2828 copy_loop_info (src_loop, dest_loop);
2829 if (dest_loop->unroll)
2830 cfun->has_unroll = true;
2831 if (dest_loop->force_vectorize)
2832 cfun->has_force_vectorize_loops = true;
2833 if (id->src_cfun->last_clique != 0)
2834 dest_loop->owned_clique
2835 = remap_dependence_clique (id,
2836 src_loop->owned_clique
2837 ? src_loop->owned_clique : 1);
2838
2839 /* Finally place it into the loop array and the loop tree. */
2840 place_new_loop (cfun, dest_loop);
2841 flow_loop_tree_node_add (dest_parent, dest_loop);
2842
2843 if (src_loop->simduid)
2844 {
2845 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2846 cfun->has_simduid_loops = true;
2847 }
2848
2849 /* Recurse. */
2850 copy_loops (id, dest_loop, src_loop);
2851 }
2852 src_loop = src_loop->next;
2853 }
2854 }
2855
2856 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2857
2858 void
2859 redirect_all_calls (copy_body_data * id, basic_block bb)
2860 {
2861 gimple_stmt_iterator si;
2862 gimple *last = last_stmt (bb);
2863 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2864 {
2865 gimple *stmt = gsi_stmt (si);
2866 if (is_gimple_call (stmt))
2867 {
2868 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2869 if (edge)
2870 {
2871 edge->redirect_call_stmt_to_callee ();
2872 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2873 gimple_purge_dead_eh_edges (bb);
2874 }
2875 }
2876 }
2877 }
2878
2879 /* Make a copy of the body of FN so that it can be inserted inline in
2880 another function. Walks FN via CFG, returns new fndecl. */
2881
2882 static tree
2883 copy_cfg_body (copy_body_data * id,
2884 basic_block entry_block_map, basic_block exit_block_map,
2885 basic_block new_entry)
2886 {
2887 tree callee_fndecl = id->src_fn;
2888 /* Original cfun for the callee, doesn't change. */
2889 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2890 struct function *cfun_to_copy;
2891 basic_block bb;
2892 tree new_fndecl = NULL;
2893 bool need_debug_cleanup = false;
2894 int last;
2895 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2896 profile_count num = entry_block_map->count;
2897
2898 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2899
2900 /* Register specific tree functions. */
2901 gimple_register_cfg_hooks ();
2902
2903 /* If we are inlining just region of the function, make sure to connect
2904 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2905 part of loop, we must compute frequency and probability of
2906 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2907 probabilities of edges incoming from nonduplicated region. */
2908 if (new_entry)
2909 {
2910 edge e;
2911 edge_iterator ei;
2912 den = profile_count::zero ();
2913
2914 FOR_EACH_EDGE (e, ei, new_entry->preds)
2915 if (!e->src->aux)
2916 den += e->count ();
2917 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2918 }
2919
2920 profile_count::adjust_for_ipa_scaling (&num, &den);
2921
2922 /* Must have a CFG here at this point. */
2923 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2924 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2925
2926
2927 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2928 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2929 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2930 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2931
2932 /* Duplicate any exception-handling regions. */
2933 if (cfun->eh)
2934 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2935 remap_decl_1, id);
2936
2937 /* Use aux pointers to map the original blocks to copy. */
2938 FOR_EACH_BB_FN (bb, cfun_to_copy)
2939 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2940 {
2941 basic_block new_bb = copy_bb (id, bb, num, den);
2942 bb->aux = new_bb;
2943 new_bb->aux = bb;
2944 new_bb->loop_father = entry_block_map->loop_father;
2945 }
2946
2947 last = last_basic_block_for_fn (cfun);
2948
2949 /* Now that we've duplicated the blocks, duplicate their edges. */
2950 basic_block abnormal_goto_dest = NULL;
2951 if (id->call_stmt
2952 && stmt_can_make_abnormal_goto (id->call_stmt))
2953 {
2954 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2955
2956 bb = gimple_bb (id->call_stmt);
2957 gsi_next (&gsi);
2958 if (gsi_end_p (gsi))
2959 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2960 }
2961 FOR_ALL_BB_FN (bb, cfun_to_copy)
2962 if (!id->blocks_to_copy
2963 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2964 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2965 abnormal_goto_dest, id);
2966
2967 if (id->eh_landing_pad_dest)
2968 {
2969 add_clobbers_to_eh_landing_pad (id);
2970 id->eh_landing_pad_dest = NULL;
2971 }
2972
2973 if (new_entry)
2974 {
2975 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2976 EDGE_FALLTHRU);
2977 e->probability = profile_probability::always ();
2978 }
2979
2980 /* Duplicate the loop tree, if available and wanted. */
2981 if (loops_for_fn (src_cfun) != NULL
2982 && current_loops != NULL)
2983 {
2984 copy_loops (id, entry_block_map->loop_father,
2985 get_loop (src_cfun, 0));
2986 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2987 loops_state_set (LOOPS_NEED_FIXUP);
2988 }
2989
2990 /* If the loop tree in the source function needed fixup, mark the
2991 destination loop tree for fixup, too. */
2992 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2993 loops_state_set (LOOPS_NEED_FIXUP);
2994
2995 if (gimple_in_ssa_p (cfun))
2996 FOR_ALL_BB_FN (bb, cfun_to_copy)
2997 if (!id->blocks_to_copy
2998 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2999 copy_phis_for_bb (bb, id);
3000
3001 FOR_ALL_BB_FN (bb, cfun_to_copy)
3002 if (bb->aux)
3003 {
3004 if (need_debug_cleanup
3005 && bb->index != ENTRY_BLOCK
3006 && bb->index != EXIT_BLOCK)
3007 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3008 /* Update call edge destinations. This cannot be done before loop
3009 info is updated, because we may split basic blocks. */
3010 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3011 && bb->index != ENTRY_BLOCK
3012 && bb->index != EXIT_BLOCK)
3013 redirect_all_calls (id, (basic_block)bb->aux);
3014 ((basic_block)bb->aux)->aux = NULL;
3015 bb->aux = NULL;
3016 }
3017
3018 /* Zero out AUX fields of newly created block during EH edge
3019 insertion. */
3020 for (; last < last_basic_block_for_fn (cfun); last++)
3021 {
3022 if (need_debug_cleanup)
3023 maybe_move_debug_stmts_to_successors (id,
3024 BASIC_BLOCK_FOR_FN (cfun, last));
3025 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3026 /* Update call edge destinations. This cannot be done before loop
3027 info is updated, because we may split basic blocks. */
3028 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3029 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3030 }
3031 entry_block_map->aux = NULL;
3032 exit_block_map->aux = NULL;
3033
3034 if (id->eh_map)
3035 {
3036 delete id->eh_map;
3037 id->eh_map = NULL;
3038 }
3039 if (id->dependence_map)
3040 {
3041 delete id->dependence_map;
3042 id->dependence_map = NULL;
3043 }
3044
3045 return new_fndecl;
3046 }
3047
3048 /* Copy the debug STMT using ID. We deal with these statements in a
3049 special way: if any variable in their VALUE expression wasn't
3050 remapped yet, we won't remap it, because that would get decl uids
3051 out of sync, causing codegen differences between -g and -g0. If
3052 this arises, we drop the VALUE expression altogether. */
3053
3054 static void
3055 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3056 {
3057 tree t, *n;
3058 struct walk_stmt_info wi;
3059
3060 if (tree block = gimple_block (stmt))
3061 {
3062 n = id->decl_map->get (block);
3063 gimple_set_block (stmt, n ? *n : id->block);
3064 }
3065
3066 if (gimple_debug_nonbind_marker_p (stmt))
3067 return;
3068
3069 /* Remap all the operands in COPY. */
3070 memset (&wi, 0, sizeof (wi));
3071 wi.info = id;
3072
3073 processing_debug_stmt = 1;
3074
3075 if (gimple_debug_source_bind_p (stmt))
3076 t = gimple_debug_source_bind_get_var (stmt);
3077 else if (gimple_debug_bind_p (stmt))
3078 t = gimple_debug_bind_get_var (stmt);
3079 else
3080 gcc_unreachable ();
3081
3082 if (TREE_CODE (t) == PARM_DECL && id->debug_map
3083 && (n = id->debug_map->get (t)))
3084 {
3085 gcc_assert (VAR_P (*n));
3086 t = *n;
3087 }
3088 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3089 /* T is a non-localized variable. */;
3090 else
3091 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3092
3093 if (gimple_debug_bind_p (stmt))
3094 {
3095 gimple_debug_bind_set_var (stmt, t);
3096
3097 if (gimple_debug_bind_has_value_p (stmt))
3098 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3099 remap_gimple_op_r, &wi, NULL);
3100
3101 /* Punt if any decl couldn't be remapped. */
3102 if (processing_debug_stmt < 0)
3103 gimple_debug_bind_reset_value (stmt);
3104 }
3105 else if (gimple_debug_source_bind_p (stmt))
3106 {
3107 gimple_debug_source_bind_set_var (stmt, t);
3108 /* When inlining and source bind refers to one of the optimized
3109 away parameters, change the source bind into normal debug bind
3110 referring to the corresponding DEBUG_EXPR_DECL that should have
3111 been bound before the call stmt. */
3112 t = gimple_debug_source_bind_get_value (stmt);
3113 if (t != NULL_TREE
3114 && TREE_CODE (t) == PARM_DECL
3115 && id->call_stmt)
3116 {
3117 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3118 unsigned int i;
3119 if (debug_args != NULL)
3120 {
3121 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3122 if ((**debug_args)[i] == DECL_ORIGIN (t)
3123 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3124 {
3125 t = (**debug_args)[i + 1];
3126 stmt->subcode = GIMPLE_DEBUG_BIND;
3127 gimple_debug_bind_set_value (stmt, t);
3128 break;
3129 }
3130 }
3131 }
3132 if (gimple_debug_source_bind_p (stmt))
3133 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3134 remap_gimple_op_r, &wi, NULL);
3135 }
3136
3137 processing_debug_stmt = 0;
3138
3139 update_stmt (stmt);
3140 }
3141
3142 /* Process deferred debug stmts. In order to give values better odds
3143 of being successfully remapped, we delay the processing of debug
3144 stmts until all other stmts that might require remapping are
3145 processed. */
3146
3147 static void
3148 copy_debug_stmts (copy_body_data *id)
3149 {
3150 size_t i;
3151 gdebug *stmt;
3152
3153 if (!id->debug_stmts.exists ())
3154 return;
3155
3156 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3157 copy_debug_stmt (stmt, id);
3158
3159 id->debug_stmts.release ();
3160 }
3161
3162 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3163 another function. */
3164
3165 static tree
3166 copy_tree_body (copy_body_data *id)
3167 {
3168 tree fndecl = id->src_fn;
3169 tree body = DECL_SAVED_TREE (fndecl);
3170
3171 walk_tree (&body, copy_tree_body_r, id, NULL);
3172
3173 return body;
3174 }
3175
3176 /* Make a copy of the body of FN so that it can be inserted inline in
3177 another function. */
3178
3179 static tree
3180 copy_body (copy_body_data *id,
3181 basic_block entry_block_map, basic_block exit_block_map,
3182 basic_block new_entry)
3183 {
3184 tree fndecl = id->src_fn;
3185 tree body;
3186
3187 /* If this body has a CFG, walk CFG and copy. */
3188 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3189 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3190 new_entry);
3191 copy_debug_stmts (id);
3192
3193 return body;
3194 }
3195
3196 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3197 defined in function FN, or of a data member thereof. */
3198
3199 static bool
3200 self_inlining_addr_expr (tree value, tree fn)
3201 {
3202 tree var;
3203
3204 if (TREE_CODE (value) != ADDR_EXPR)
3205 return false;
3206
3207 var = get_base_address (TREE_OPERAND (value, 0));
3208
3209 return var && auto_var_in_fn_p (var, fn);
3210 }
3211
3212 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3213 lexical block and line number information from base_stmt, if given,
3214 or from the last stmt of the block otherwise. */
3215
3216 static gimple *
3217 insert_init_debug_bind (copy_body_data *id,
3218 basic_block bb, tree var, tree value,
3219 gimple *base_stmt)
3220 {
3221 gimple *note;
3222 gimple_stmt_iterator gsi;
3223 tree tracked_var;
3224
3225 if (!gimple_in_ssa_p (id->src_cfun))
3226 return NULL;
3227
3228 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3229 return NULL;
3230
3231 tracked_var = target_for_debug_bind (var);
3232 if (!tracked_var)
3233 return NULL;
3234
3235 if (bb)
3236 {
3237 gsi = gsi_last_bb (bb);
3238 if (!base_stmt && !gsi_end_p (gsi))
3239 base_stmt = gsi_stmt (gsi);
3240 }
3241
3242 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3243
3244 if (bb)
3245 {
3246 if (!gsi_end_p (gsi))
3247 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3248 else
3249 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3250 }
3251
3252 return note;
3253 }
3254
3255 static void
3256 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3257 {
3258 /* If VAR represents a zero-sized variable, it's possible that the
3259 assignment statement may result in no gimple statements. */
3260 if (init_stmt)
3261 {
3262 gimple_stmt_iterator si = gsi_last_bb (bb);
3263
3264 /* We can end up with init statements that store to a non-register
3265 from a rhs with a conversion. Handle that here by forcing the
3266 rhs into a temporary. gimple_regimplify_operands is not
3267 prepared to do this for us. */
3268 if (!is_gimple_debug (init_stmt)
3269 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3270 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3271 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3272 {
3273 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3274 gimple_expr_type (init_stmt),
3275 gimple_assign_rhs1 (init_stmt));
3276 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3277 GSI_NEW_STMT);
3278 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3279 gimple_assign_set_rhs1 (init_stmt, rhs);
3280 }
3281 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3282 gimple_regimplify_operands (init_stmt, &si);
3283
3284 if (!is_gimple_debug (init_stmt))
3285 {
3286 tree def = gimple_assign_lhs (init_stmt);
3287 insert_init_debug_bind (id, bb, def, def, init_stmt);
3288 }
3289 }
3290 }
3291
3292 /* Initialize parameter P with VALUE. If needed, produce init statement
3293 at the end of BB. When BB is NULL, we return init statement to be
3294 output later. */
3295 static gimple *
3296 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3297 basic_block bb, tree *vars)
3298 {
3299 gimple *init_stmt = NULL;
3300 tree var;
3301 tree rhs = value;
3302 tree def = (gimple_in_ssa_p (cfun)
3303 ? ssa_default_def (id->src_cfun, p) : NULL);
3304
3305 if (value
3306 && value != error_mark_node
3307 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3308 {
3309 /* If we can match up types by promotion/demotion do so. */
3310 if (fold_convertible_p (TREE_TYPE (p), value))
3311 rhs = fold_convert (TREE_TYPE (p), value);
3312 else
3313 {
3314 /* ??? For valid programs we should not end up here.
3315 Still if we end up with truly mismatched types here, fall back
3316 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3317 GIMPLE to the following passes. */
3318 if (!is_gimple_reg_type (TREE_TYPE (value))
3319 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3320 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3321 else
3322 rhs = build_zero_cst (TREE_TYPE (p));
3323 }
3324 }
3325
3326 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3327 here since the type of this decl must be visible to the calling
3328 function. */
3329 var = copy_decl_to_var (p, id);
3330
3331 /* Declare this new variable. */
3332 DECL_CHAIN (var) = *vars;
3333 *vars = var;
3334
3335 /* Make gimplifier happy about this variable. */
3336 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3337
3338 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3339 we would not need to create a new variable here at all, if it
3340 weren't for debug info. Still, we can just use the argument
3341 value. */
3342 if (TREE_READONLY (p)
3343 && !TREE_ADDRESSABLE (p)
3344 && value && !TREE_SIDE_EFFECTS (value)
3345 && !def)
3346 {
3347 /* We may produce non-gimple trees by adding NOPs or introduce
3348 invalid sharing when operand is not really constant.
3349 It is not big deal to prohibit constant propagation here as
3350 we will constant propagate in DOM1 pass anyway. */
3351 if (is_gimple_min_invariant (value)
3352 && useless_type_conversion_p (TREE_TYPE (p),
3353 TREE_TYPE (value))
3354 /* We have to be very careful about ADDR_EXPR. Make sure
3355 the base variable isn't a local variable of the inlined
3356 function, e.g., when doing recursive inlining, direct or
3357 mutually-recursive or whatever, which is why we don't
3358 just test whether fn == current_function_decl. */
3359 && ! self_inlining_addr_expr (value, fn))
3360 {
3361 insert_decl_map (id, p, value);
3362 insert_debug_decl_map (id, p, var);
3363 return insert_init_debug_bind (id, bb, var, value, NULL);
3364 }
3365 }
3366
3367 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3368 that way, when the PARM_DECL is encountered, it will be
3369 automatically replaced by the VAR_DECL. */
3370 insert_decl_map (id, p, var);
3371
3372 /* Even if P was TREE_READONLY, the new VAR should not be.
3373 In the original code, we would have constructed a
3374 temporary, and then the function body would have never
3375 changed the value of P. However, now, we will be
3376 constructing VAR directly. The constructor body may
3377 change its value multiple times as it is being
3378 constructed. Therefore, it must not be TREE_READONLY;
3379 the back-end assumes that TREE_READONLY variable is
3380 assigned to only once. */
3381 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3382 TREE_READONLY (var) = 0;
3383
3384 /* If there is no setup required and we are in SSA, take the easy route
3385 replacing all SSA names representing the function parameter by the
3386 SSA name passed to function.
3387
3388 We need to construct map for the variable anyway as it might be used
3389 in different SSA names when parameter is set in function.
3390
3391 Do replacement at -O0 for const arguments replaced by constant.
3392 This is important for builtin_constant_p and other construct requiring
3393 constant argument to be visible in inlined function body. */
3394 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3395 && (optimize
3396 || (TREE_READONLY (p)
3397 && is_gimple_min_invariant (rhs)))
3398 && (TREE_CODE (rhs) == SSA_NAME
3399 || is_gimple_min_invariant (rhs))
3400 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3401 {
3402 insert_decl_map (id, def, rhs);
3403 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3404 }
3405
3406 /* If the value of argument is never used, don't care about initializing
3407 it. */
3408 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3409 {
3410 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3411 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3412 }
3413
3414 /* Initialize this VAR_DECL from the equivalent argument. Convert
3415 the argument to the proper type in case it was promoted. */
3416 if (value)
3417 {
3418 if (rhs == error_mark_node)
3419 {
3420 insert_decl_map (id, p, var);
3421 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3422 }
3423
3424 STRIP_USELESS_TYPE_CONVERSION (rhs);
3425
3426 /* If we are in SSA form properly remap the default definition
3427 or assign to a dummy SSA name if the parameter is unused and
3428 we are not optimizing. */
3429 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3430 {
3431 if (def)
3432 {
3433 def = remap_ssa_name (def, id);
3434 init_stmt = gimple_build_assign (def, rhs);
3435 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3436 set_ssa_default_def (cfun, var, NULL);
3437 }
3438 else if (!optimize)
3439 {
3440 def = make_ssa_name (var);
3441 init_stmt = gimple_build_assign (def, rhs);
3442 }
3443 }
3444 else
3445 init_stmt = gimple_build_assign (var, rhs);
3446
3447 if (bb && init_stmt)
3448 insert_init_stmt (id, bb, init_stmt);
3449 }
3450 return init_stmt;
3451 }
3452
3453 /* Generate code to initialize the parameters of the function at the
3454 top of the stack in ID from the GIMPLE_CALL STMT. */
3455
3456 static void
3457 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3458 tree fn, basic_block bb)
3459 {
3460 tree parms;
3461 size_t i;
3462 tree p;
3463 tree vars = NULL_TREE;
3464 tree static_chain = gimple_call_chain (stmt);
3465
3466 /* Figure out what the parameters are. */
3467 parms = DECL_ARGUMENTS (fn);
3468
3469 /* Loop through the parameter declarations, replacing each with an
3470 equivalent VAR_DECL, appropriately initialized. */
3471 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3472 {
3473 tree val;
3474 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3475 setup_one_parameter (id, p, val, fn, bb, &vars);
3476 }
3477 /* After remapping parameters remap their types. This has to be done
3478 in a second loop over all parameters to appropriately remap
3479 variable sized arrays when the size is specified in a
3480 parameter following the array. */
3481 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3482 {
3483 tree *varp = id->decl_map->get (p);
3484 if (varp && VAR_P (*varp))
3485 {
3486 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3487 ? ssa_default_def (id->src_cfun, p) : NULL);
3488 tree var = *varp;
3489 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3490 /* Also remap the default definition if it was remapped
3491 to the default definition of the parameter replacement
3492 by the parameter setup. */
3493 if (def)
3494 {
3495 tree *defp = id->decl_map->get (def);
3496 if (defp
3497 && TREE_CODE (*defp) == SSA_NAME
3498 && SSA_NAME_VAR (*defp) == var)
3499 TREE_TYPE (*defp) = TREE_TYPE (var);
3500 }
3501 }
3502 }
3503
3504 /* Initialize the static chain. */
3505 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3506 gcc_assert (fn != current_function_decl);
3507 if (p)
3508 {
3509 /* No static chain? Seems like a bug in tree-nested.c. */
3510 gcc_assert (static_chain);
3511
3512 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3513 }
3514
3515 declare_inline_vars (id->block, vars);
3516 }
3517
3518
3519 /* Declare a return variable to replace the RESULT_DECL for the
3520 function we are calling. An appropriate DECL_STMT is returned.
3521 The USE_STMT is filled to contain a use of the declaration to
3522 indicate the return value of the function.
3523
3524 RETURN_SLOT, if non-null is place where to store the result. It
3525 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3526 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3527
3528 The return value is a (possibly null) value that holds the result
3529 as seen by the caller. */
3530
3531 static tree
3532 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3533 basic_block entry_bb)
3534 {
3535 tree callee = id->src_fn;
3536 tree result = DECL_RESULT (callee);
3537 tree callee_type = TREE_TYPE (result);
3538 tree caller_type;
3539 tree var, use;
3540
3541 /* Handle type-mismatches in the function declaration return type
3542 vs. the call expression. */
3543 if (modify_dest)
3544 caller_type = TREE_TYPE (modify_dest);
3545 else
3546 caller_type = TREE_TYPE (TREE_TYPE (callee));
3547
3548 /* We don't need to do anything for functions that don't return anything. */
3549 if (VOID_TYPE_P (callee_type))
3550 return NULL_TREE;
3551
3552 /* If there was a return slot, then the return value is the
3553 dereferenced address of that object. */
3554 if (return_slot)
3555 {
3556 /* The front end shouldn't have used both return_slot and
3557 a modify expression. */
3558 gcc_assert (!modify_dest);
3559 if (DECL_BY_REFERENCE (result))
3560 {
3561 tree return_slot_addr = build_fold_addr_expr (return_slot);
3562 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3563
3564 /* We are going to construct *&return_slot and we can't do that
3565 for variables believed to be not addressable.
3566
3567 FIXME: This check possibly can match, because values returned
3568 via return slot optimization are not believed to have address
3569 taken by alias analysis. */
3570 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3571 var = return_slot_addr;
3572 }
3573 else
3574 {
3575 var = return_slot;
3576 gcc_assert (TREE_CODE (var) != SSA_NAME);
3577 if (TREE_ADDRESSABLE (result))
3578 mark_addressable (var);
3579 }
3580 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3581 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3582 && !DECL_GIMPLE_REG_P (result)
3583 && DECL_P (var))
3584 DECL_GIMPLE_REG_P (var) = 0;
3585 use = NULL;
3586 goto done;
3587 }
3588
3589 /* All types requiring non-trivial constructors should have been handled. */
3590 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3591
3592 /* Attempt to avoid creating a new temporary variable. */
3593 if (modify_dest
3594 && TREE_CODE (modify_dest) != SSA_NAME)
3595 {
3596 bool use_it = false;
3597
3598 /* We can't use MODIFY_DEST if there's type promotion involved. */
3599 if (!useless_type_conversion_p (callee_type, caller_type))
3600 use_it = false;
3601
3602 /* ??? If we're assigning to a variable sized type, then we must
3603 reuse the destination variable, because we've no good way to
3604 create variable sized temporaries at this point. */
3605 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3606 use_it = true;
3607
3608 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3609 reuse it as the result of the call directly. Don't do this if
3610 it would promote MODIFY_DEST to addressable. */
3611 else if (TREE_ADDRESSABLE (result))
3612 use_it = false;
3613 else
3614 {
3615 tree base_m = get_base_address (modify_dest);
3616
3617 /* If the base isn't a decl, then it's a pointer, and we don't
3618 know where that's going to go. */
3619 if (!DECL_P (base_m))
3620 use_it = false;
3621 else if (is_global_var (base_m))
3622 use_it = false;
3623 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3624 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3625 && !DECL_GIMPLE_REG_P (result)
3626 && DECL_GIMPLE_REG_P (base_m))
3627 use_it = false;
3628 else if (!TREE_ADDRESSABLE (base_m))
3629 use_it = true;
3630 }
3631
3632 if (use_it)
3633 {
3634 var = modify_dest;
3635 use = NULL;
3636 goto done;
3637 }
3638 }
3639
3640 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3641
3642 var = copy_result_decl_to_var (result, id);
3643 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3644
3645 /* Do not have the rest of GCC warn about this variable as it should
3646 not be visible to the user. */
3647 TREE_NO_WARNING (var) = 1;
3648
3649 declare_inline_vars (id->block, var);
3650
3651 /* Build the use expr. If the return type of the function was
3652 promoted, convert it back to the expected type. */
3653 use = var;
3654 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3655 {
3656 /* If we can match up types by promotion/demotion do so. */
3657 if (fold_convertible_p (caller_type, var))
3658 use = fold_convert (caller_type, var);
3659 else
3660 {
3661 /* ??? For valid programs we should not end up here.
3662 Still if we end up with truly mismatched types here, fall back
3663 to using a MEM_REF to not leak invalid GIMPLE to the following
3664 passes. */
3665 /* Prevent var from being written into SSA form. */
3666 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3667 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3668 DECL_GIMPLE_REG_P (var) = false;
3669 else if (is_gimple_reg_type (TREE_TYPE (var)))
3670 TREE_ADDRESSABLE (var) = true;
3671 use = fold_build2 (MEM_REF, caller_type,
3672 build_fold_addr_expr (var),
3673 build_int_cst (ptr_type_node, 0));
3674 }
3675 }
3676
3677 STRIP_USELESS_TYPE_CONVERSION (use);
3678
3679 if (DECL_BY_REFERENCE (result))
3680 {
3681 TREE_ADDRESSABLE (var) = 1;
3682 var = build_fold_addr_expr (var);
3683 }
3684
3685 done:
3686 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3687 way, when the RESULT_DECL is encountered, it will be
3688 automatically replaced by the VAR_DECL.
3689
3690 When returning by reference, ensure that RESULT_DECL remaps to
3691 gimple_val. */
3692 if (DECL_BY_REFERENCE (result)
3693 && !is_gimple_val (var))
3694 {
3695 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3696 insert_decl_map (id, result, temp);
3697 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3698 it's default_def SSA_NAME. */
3699 if (gimple_in_ssa_p (id->src_cfun)
3700 && is_gimple_reg (result))
3701 {
3702 temp = make_ssa_name (temp);
3703 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3704 }
3705 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3706 }
3707 else
3708 insert_decl_map (id, result, var);
3709
3710 /* Remember this so we can ignore it in remap_decls. */
3711 id->retvar = var;
3712 return use;
3713 }
3714
3715 /* Determine if the function can be copied. If so return NULL. If
3716 not return a string describng the reason for failure. */
3717
3718 const char *
3719 copy_forbidden (struct function *fun)
3720 {
3721 const char *reason = fun->cannot_be_copied_reason;
3722
3723 /* Only examine the function once. */
3724 if (fun->cannot_be_copied_set)
3725 return reason;
3726
3727 /* We cannot copy a function that receives a non-local goto
3728 because we cannot remap the destination label used in the
3729 function that is performing the non-local goto. */
3730 /* ??? Actually, this should be possible, if we work at it.
3731 No doubt there's just a handful of places that simply
3732 assume it doesn't happen and don't substitute properly. */
3733 if (fun->has_nonlocal_label)
3734 {
3735 reason = G_("function %q+F can never be copied "
3736 "because it receives a non-local goto");
3737 goto fail;
3738 }
3739
3740 if (fun->has_forced_label_in_static)
3741 {
3742 reason = G_("function %q+F can never be copied because it saves "
3743 "address of local label in a static variable");
3744 goto fail;
3745 }
3746
3747 fail:
3748 fun->cannot_be_copied_reason = reason;
3749 fun->cannot_be_copied_set = true;
3750 return reason;
3751 }
3752
3753
3754 static const char *inline_forbidden_reason;
3755
3756 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3757 iff a function cannot be inlined. Also sets the reason why. */
3758
3759 static tree
3760 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3761 struct walk_stmt_info *wip)
3762 {
3763 tree fn = (tree) wip->info;
3764 tree t;
3765 gimple *stmt = gsi_stmt (*gsi);
3766
3767 switch (gimple_code (stmt))
3768 {
3769 case GIMPLE_CALL:
3770 /* Refuse to inline alloca call unless user explicitly forced so as
3771 this may change program's memory overhead drastically when the
3772 function using alloca is called in loop. In GCC present in
3773 SPEC2000 inlining into schedule_block cause it to require 2GB of
3774 RAM instead of 256MB. Don't do so for alloca calls emitted for
3775 VLA objects as those can't cause unbounded growth (they're always
3776 wrapped inside stack_save/stack_restore regions. */
3777 if (gimple_maybe_alloca_call_p (stmt)
3778 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3779 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3780 {
3781 inline_forbidden_reason
3782 = G_("function %q+F can never be inlined because it uses "
3783 "alloca (override using the always_inline attribute)");
3784 *handled_ops_p = true;
3785 return fn;
3786 }
3787
3788 t = gimple_call_fndecl (stmt);
3789 if (t == NULL_TREE)
3790 break;
3791
3792 /* We cannot inline functions that call setjmp. */
3793 if (setjmp_call_p (t))
3794 {
3795 inline_forbidden_reason
3796 = G_("function %q+F can never be inlined because it uses setjmp");
3797 *handled_ops_p = true;
3798 return t;
3799 }
3800
3801 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3802 switch (DECL_FUNCTION_CODE (t))
3803 {
3804 /* We cannot inline functions that take a variable number of
3805 arguments. */
3806 case BUILT_IN_VA_START:
3807 case BUILT_IN_NEXT_ARG:
3808 case BUILT_IN_VA_END:
3809 inline_forbidden_reason
3810 = G_("function %q+F can never be inlined because it "
3811 "uses variable argument lists");
3812 *handled_ops_p = true;
3813 return t;
3814
3815 case BUILT_IN_LONGJMP:
3816 /* We can't inline functions that call __builtin_longjmp at
3817 all. The non-local goto machinery really requires the
3818 destination be in a different function. If we allow the
3819 function calling __builtin_longjmp to be inlined into the
3820 function calling __builtin_setjmp, Things will Go Awry. */
3821 inline_forbidden_reason
3822 = G_("function %q+F can never be inlined because "
3823 "it uses setjmp-longjmp exception handling");
3824 *handled_ops_p = true;
3825 return t;
3826
3827 case BUILT_IN_NONLOCAL_GOTO:
3828 /* Similarly. */
3829 inline_forbidden_reason
3830 = G_("function %q+F can never be inlined because "
3831 "it uses non-local goto");
3832 *handled_ops_p = true;
3833 return t;
3834
3835 case BUILT_IN_RETURN:
3836 case BUILT_IN_APPLY_ARGS:
3837 /* If a __builtin_apply_args caller would be inlined,
3838 it would be saving arguments of the function it has
3839 been inlined into. Similarly __builtin_return would
3840 return from the function the inline has been inlined into. */
3841 inline_forbidden_reason
3842 = G_("function %q+F can never be inlined because "
3843 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3844 *handled_ops_p = true;
3845 return t;
3846
3847 default:
3848 break;
3849 }
3850 break;
3851
3852 case GIMPLE_GOTO:
3853 t = gimple_goto_dest (stmt);
3854
3855 /* We will not inline a function which uses computed goto. The
3856 addresses of its local labels, which may be tucked into
3857 global storage, are of course not constant across
3858 instantiations, which causes unexpected behavior. */
3859 if (TREE_CODE (t) != LABEL_DECL)
3860 {
3861 inline_forbidden_reason
3862 = G_("function %q+F can never be inlined "
3863 "because it contains a computed goto");
3864 *handled_ops_p = true;
3865 return t;
3866 }
3867 break;
3868
3869 default:
3870 break;
3871 }
3872
3873 *handled_ops_p = false;
3874 return NULL_TREE;
3875 }
3876
3877 /* Return true if FNDECL is a function that cannot be inlined into
3878 another one. */
3879
3880 static bool
3881 inline_forbidden_p (tree fndecl)
3882 {
3883 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3884 struct walk_stmt_info wi;
3885 basic_block bb;
3886 bool forbidden_p = false;
3887
3888 /* First check for shared reasons not to copy the code. */
3889 inline_forbidden_reason = copy_forbidden (fun);
3890 if (inline_forbidden_reason != NULL)
3891 return true;
3892
3893 /* Next, walk the statements of the function looking for
3894 constraucts we can't handle, or are non-optimal for inlining. */
3895 hash_set<tree> visited_nodes;
3896 memset (&wi, 0, sizeof (wi));
3897 wi.info = (void *) fndecl;
3898 wi.pset = &visited_nodes;
3899
3900 FOR_EACH_BB_FN (bb, fun)
3901 {
3902 gimple *ret;
3903 gimple_seq seq = bb_seq (bb);
3904 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3905 forbidden_p = (ret != NULL);
3906 if (forbidden_p)
3907 break;
3908 }
3909
3910 return forbidden_p;
3911 }
3912 \f
3913 /* Return false if the function FNDECL cannot be inlined on account of its
3914 attributes, true otherwise. */
3915 static bool
3916 function_attribute_inlinable_p (const_tree fndecl)
3917 {
3918 if (targetm.attribute_table)
3919 {
3920 const_tree a;
3921
3922 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3923 {
3924 const_tree name = get_attribute_name (a);
3925 int i;
3926
3927 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3928 if (is_attribute_p (targetm.attribute_table[i].name, name))
3929 return targetm.function_attribute_inlinable_p (fndecl);
3930 }
3931 }
3932
3933 return true;
3934 }
3935
3936 /* Returns nonzero if FN is a function that does not have any
3937 fundamental inline blocking properties. */
3938
3939 bool
3940 tree_inlinable_function_p (tree fn)
3941 {
3942 bool inlinable = true;
3943 bool do_warning;
3944 tree always_inline;
3945
3946 /* If we've already decided this function shouldn't be inlined,
3947 there's no need to check again. */
3948 if (DECL_UNINLINABLE (fn))
3949 return false;
3950
3951 /* We only warn for functions declared `inline' by the user. */
3952 do_warning = (warn_inline
3953 && DECL_DECLARED_INLINE_P (fn)
3954 && !DECL_NO_INLINE_WARNING_P (fn)
3955 && !DECL_IN_SYSTEM_HEADER (fn));
3956
3957 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3958
3959 if (flag_no_inline
3960 && always_inline == NULL)
3961 {
3962 if (do_warning)
3963 warning (OPT_Winline, "function %q+F can never be inlined because it "
3964 "is suppressed using %<-fno-inline%>", fn);
3965 inlinable = false;
3966 }
3967
3968 else if (!function_attribute_inlinable_p (fn))
3969 {
3970 if (do_warning)
3971 warning (OPT_Winline, "function %q+F can never be inlined because it "
3972 "uses attributes conflicting with inlining", fn);
3973 inlinable = false;
3974 }
3975
3976 else if (inline_forbidden_p (fn))
3977 {
3978 /* See if we should warn about uninlinable functions. Previously,
3979 some of these warnings would be issued while trying to expand
3980 the function inline, but that would cause multiple warnings
3981 about functions that would for example call alloca. But since
3982 this a property of the function, just one warning is enough.
3983 As a bonus we can now give more details about the reason why a
3984 function is not inlinable. */
3985 if (always_inline)
3986 error (inline_forbidden_reason, fn);
3987 else if (do_warning)
3988 warning (OPT_Winline, inline_forbidden_reason, fn);
3989
3990 inlinable = false;
3991 }
3992
3993 /* Squirrel away the result so that we don't have to check again. */
3994 DECL_UNINLINABLE (fn) = !inlinable;
3995
3996 return inlinable;
3997 }
3998
3999 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
4000 word size and take possible memcpy call into account and return
4001 cost based on whether optimizing for size or speed according to SPEED_P. */
4002
4003 int
4004 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4005 {
4006 HOST_WIDE_INT size;
4007
4008 gcc_assert (!VOID_TYPE_P (type));
4009
4010 if (TREE_CODE (type) == VECTOR_TYPE)
4011 {
4012 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4013 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4014 int orig_mode_size
4015 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4016 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4017 return ((orig_mode_size + simd_mode_size - 1)
4018 / simd_mode_size);
4019 }
4020
4021 size = int_size_in_bytes (type);
4022
4023 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4024 /* Cost of a memcpy call, 3 arguments and the call. */
4025 return 4;
4026 else
4027 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4028 }
4029
4030 /* Returns cost of operation CODE, according to WEIGHTS */
4031
4032 static int
4033 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4034 tree op1 ATTRIBUTE_UNUSED, tree op2)
4035 {
4036 switch (code)
4037 {
4038 /* These are "free" conversions, or their presumed cost
4039 is folded into other operations. */
4040 case RANGE_EXPR:
4041 CASE_CONVERT:
4042 case COMPLEX_EXPR:
4043 case PAREN_EXPR:
4044 case VIEW_CONVERT_EXPR:
4045 return 0;
4046
4047 /* Assign cost of 1 to usual operations.
4048 ??? We may consider mapping RTL costs to this. */
4049 case COND_EXPR:
4050 case VEC_COND_EXPR:
4051 case VEC_PERM_EXPR:
4052
4053 case PLUS_EXPR:
4054 case POINTER_PLUS_EXPR:
4055 case POINTER_DIFF_EXPR:
4056 case MINUS_EXPR:
4057 case MULT_EXPR:
4058 case MULT_HIGHPART_EXPR:
4059
4060 case ADDR_SPACE_CONVERT_EXPR:
4061 case FIXED_CONVERT_EXPR:
4062 case FIX_TRUNC_EXPR:
4063
4064 case NEGATE_EXPR:
4065 case FLOAT_EXPR:
4066 case MIN_EXPR:
4067 case MAX_EXPR:
4068 case ABS_EXPR:
4069 case ABSU_EXPR:
4070
4071 case LSHIFT_EXPR:
4072 case RSHIFT_EXPR:
4073 case LROTATE_EXPR:
4074 case RROTATE_EXPR:
4075
4076 case BIT_IOR_EXPR:
4077 case BIT_XOR_EXPR:
4078 case BIT_AND_EXPR:
4079 case BIT_NOT_EXPR:
4080
4081 case TRUTH_ANDIF_EXPR:
4082 case TRUTH_ORIF_EXPR:
4083 case TRUTH_AND_EXPR:
4084 case TRUTH_OR_EXPR:
4085 case TRUTH_XOR_EXPR:
4086 case TRUTH_NOT_EXPR:
4087
4088 case LT_EXPR:
4089 case LE_EXPR:
4090 case GT_EXPR:
4091 case GE_EXPR:
4092 case EQ_EXPR:
4093 case NE_EXPR:
4094 case ORDERED_EXPR:
4095 case UNORDERED_EXPR:
4096
4097 case UNLT_EXPR:
4098 case UNLE_EXPR:
4099 case UNGT_EXPR:
4100 case UNGE_EXPR:
4101 case UNEQ_EXPR:
4102 case LTGT_EXPR:
4103
4104 case CONJ_EXPR:
4105
4106 case PREDECREMENT_EXPR:
4107 case PREINCREMENT_EXPR:
4108 case POSTDECREMENT_EXPR:
4109 case POSTINCREMENT_EXPR:
4110
4111 case REALIGN_LOAD_EXPR:
4112
4113 case WIDEN_SUM_EXPR:
4114 case WIDEN_MULT_EXPR:
4115 case DOT_PROD_EXPR:
4116 case SAD_EXPR:
4117 case WIDEN_MULT_PLUS_EXPR:
4118 case WIDEN_MULT_MINUS_EXPR:
4119 case WIDEN_LSHIFT_EXPR:
4120
4121 case VEC_WIDEN_MULT_HI_EXPR:
4122 case VEC_WIDEN_MULT_LO_EXPR:
4123 case VEC_WIDEN_MULT_EVEN_EXPR:
4124 case VEC_WIDEN_MULT_ODD_EXPR:
4125 case VEC_UNPACK_HI_EXPR:
4126 case VEC_UNPACK_LO_EXPR:
4127 case VEC_UNPACK_FLOAT_HI_EXPR:
4128 case VEC_UNPACK_FLOAT_LO_EXPR:
4129 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4130 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4131 case VEC_PACK_TRUNC_EXPR:
4132 case VEC_PACK_SAT_EXPR:
4133 case VEC_PACK_FIX_TRUNC_EXPR:
4134 case VEC_PACK_FLOAT_EXPR:
4135 case VEC_WIDEN_LSHIFT_HI_EXPR:
4136 case VEC_WIDEN_LSHIFT_LO_EXPR:
4137 case VEC_DUPLICATE_EXPR:
4138 case VEC_SERIES_EXPR:
4139
4140 return 1;
4141
4142 /* Few special cases of expensive operations. This is useful
4143 to avoid inlining on functions having too many of these. */
4144 case TRUNC_DIV_EXPR:
4145 case CEIL_DIV_EXPR:
4146 case FLOOR_DIV_EXPR:
4147 case ROUND_DIV_EXPR:
4148 case EXACT_DIV_EXPR:
4149 case TRUNC_MOD_EXPR:
4150 case CEIL_MOD_EXPR:
4151 case FLOOR_MOD_EXPR:
4152 case ROUND_MOD_EXPR:
4153 case RDIV_EXPR:
4154 if (TREE_CODE (op2) != INTEGER_CST)
4155 return weights->div_mod_cost;
4156 return 1;
4157
4158 /* Bit-field insertion needs several shift and mask operations. */
4159 case BIT_INSERT_EXPR:
4160 return 3;
4161
4162 default:
4163 /* We expect a copy assignment with no operator. */
4164 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4165 return 0;
4166 }
4167 }
4168
4169
4170 /* Estimate number of instructions that will be created by expanding
4171 the statements in the statement sequence STMTS.
4172 WEIGHTS contains weights attributed to various constructs. */
4173
4174 int
4175 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4176 {
4177 int cost;
4178 gimple_stmt_iterator gsi;
4179
4180 cost = 0;
4181 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4182 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4183
4184 return cost;
4185 }
4186
4187
4188 /* Estimate number of instructions that will be created by expanding STMT.
4189 WEIGHTS contains weights attributed to various constructs. */
4190
4191 int
4192 estimate_num_insns (gimple *stmt, eni_weights *weights)
4193 {
4194 unsigned cost, i;
4195 enum gimple_code code = gimple_code (stmt);
4196 tree lhs;
4197 tree rhs;
4198
4199 switch (code)
4200 {
4201 case GIMPLE_ASSIGN:
4202 /* Try to estimate the cost of assignments. We have three cases to
4203 deal with:
4204 1) Simple assignments to registers;
4205 2) Stores to things that must live in memory. This includes
4206 "normal" stores to scalars, but also assignments of large
4207 structures, or constructors of big arrays;
4208
4209 Let us look at the first two cases, assuming we have "a = b + C":
4210 <GIMPLE_ASSIGN <var_decl "a">
4211 <plus_expr <var_decl "b"> <constant C>>
4212 If "a" is a GIMPLE register, the assignment to it is free on almost
4213 any target, because "a" usually ends up in a real register. Hence
4214 the only cost of this expression comes from the PLUS_EXPR, and we
4215 can ignore the GIMPLE_ASSIGN.
4216 If "a" is not a GIMPLE register, the assignment to "a" will most
4217 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4218 of moving something into "a", which we compute using the function
4219 estimate_move_cost. */
4220 if (gimple_clobber_p (stmt))
4221 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4222
4223 lhs = gimple_assign_lhs (stmt);
4224 rhs = gimple_assign_rhs1 (stmt);
4225
4226 cost = 0;
4227
4228 /* Account for the cost of moving to / from memory. */
4229 if (gimple_store_p (stmt))
4230 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4231 if (gimple_assign_load_p (stmt))
4232 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4233
4234 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4235 gimple_assign_rhs1 (stmt),
4236 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4237 == GIMPLE_BINARY_RHS
4238 ? gimple_assign_rhs2 (stmt) : NULL);
4239 break;
4240
4241 case GIMPLE_COND:
4242 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4243 gimple_op (stmt, 0),
4244 gimple_op (stmt, 1));
4245 break;
4246
4247 case GIMPLE_SWITCH:
4248 {
4249 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4250 /* Take into account cost of the switch + guess 2 conditional jumps for
4251 each case label.
4252
4253 TODO: once the switch expansion logic is sufficiently separated, we can
4254 do better job on estimating cost of the switch. */
4255 if (weights->time_based)
4256 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4257 else
4258 cost = gimple_switch_num_labels (switch_stmt) * 2;
4259 }
4260 break;
4261
4262 case GIMPLE_CALL:
4263 {
4264 tree decl;
4265
4266 if (gimple_call_internal_p (stmt))
4267 return 0;
4268 else if ((decl = gimple_call_fndecl (stmt))
4269 && fndecl_built_in_p (decl))
4270 {
4271 /* Do not special case builtins where we see the body.
4272 This just confuse inliner. */
4273 struct cgraph_node *node;
4274 if (!(node = cgraph_node::get (decl))
4275 || node->definition)
4276 ;
4277 /* For buitins that are likely expanded to nothing or
4278 inlined do not account operand costs. */
4279 else if (is_simple_builtin (decl))
4280 return 0;
4281 else if (is_inexpensive_builtin (decl))
4282 return weights->target_builtin_call_cost;
4283 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4284 {
4285 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4286 specialize the cheap expansion we do here.
4287 ??? This asks for a more general solution. */
4288 switch (DECL_FUNCTION_CODE (decl))
4289 {
4290 case BUILT_IN_POW:
4291 case BUILT_IN_POWF:
4292 case BUILT_IN_POWL:
4293 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4294 && (real_equal
4295 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4296 &dconst2)))
4297 return estimate_operator_cost
4298 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4299 gimple_call_arg (stmt, 0));
4300 break;
4301
4302 default:
4303 break;
4304 }
4305 }
4306 }
4307
4308 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4309 if (gimple_call_lhs (stmt))
4310 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4311 weights->time_based);
4312 for (i = 0; i < gimple_call_num_args (stmt); i++)
4313 {
4314 tree arg = gimple_call_arg (stmt, i);
4315 cost += estimate_move_cost (TREE_TYPE (arg),
4316 weights->time_based);
4317 }
4318 break;
4319 }
4320
4321 case GIMPLE_RETURN:
4322 return weights->return_cost;
4323
4324 case GIMPLE_GOTO:
4325 case GIMPLE_LABEL:
4326 case GIMPLE_NOP:
4327 case GIMPLE_PHI:
4328 case GIMPLE_PREDICT:
4329 case GIMPLE_DEBUG:
4330 return 0;
4331
4332 case GIMPLE_ASM:
4333 {
4334 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4335 /* 1000 means infinity. This avoids overflows later
4336 with very long asm statements. */
4337 if (count > 1000)
4338 count = 1000;
4339 /* If this asm is asm inline, count anything as minimum size. */
4340 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4341 count = MIN (1, count);
4342 return MAX (1, count);
4343 }
4344
4345 case GIMPLE_RESX:
4346 /* This is either going to be an external function call with one
4347 argument, or two register copy statements plus a goto. */
4348 return 2;
4349
4350 case GIMPLE_EH_DISPATCH:
4351 /* ??? This is going to turn into a switch statement. Ideally
4352 we'd have a look at the eh region and estimate the number of
4353 edges involved. */
4354 return 10;
4355
4356 case GIMPLE_BIND:
4357 return estimate_num_insns_seq (
4358 gimple_bind_body (as_a <gbind *> (stmt)),
4359 weights);
4360
4361 case GIMPLE_EH_FILTER:
4362 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4363
4364 case GIMPLE_CATCH:
4365 return estimate_num_insns_seq (gimple_catch_handler (
4366 as_a <gcatch *> (stmt)),
4367 weights);
4368
4369 case GIMPLE_TRY:
4370 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4371 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4372
4373 /* OMP directives are generally very expensive. */
4374
4375 case GIMPLE_OMP_RETURN:
4376 case GIMPLE_OMP_SECTIONS_SWITCH:
4377 case GIMPLE_OMP_ATOMIC_STORE:
4378 case GIMPLE_OMP_CONTINUE:
4379 /* ...except these, which are cheap. */
4380 return 0;
4381
4382 case GIMPLE_OMP_ATOMIC_LOAD:
4383 return weights->omp_cost;
4384
4385 case GIMPLE_OMP_FOR:
4386 return (weights->omp_cost
4387 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4388 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4389
4390 case GIMPLE_OMP_PARALLEL:
4391 case GIMPLE_OMP_TASK:
4392 case GIMPLE_OMP_CRITICAL:
4393 case GIMPLE_OMP_MASTER:
4394 case GIMPLE_OMP_TASKGROUP:
4395 case GIMPLE_OMP_ORDERED:
4396 case GIMPLE_OMP_SCAN:
4397 case GIMPLE_OMP_SECTION:
4398 case GIMPLE_OMP_SECTIONS:
4399 case GIMPLE_OMP_SINGLE:
4400 case GIMPLE_OMP_TARGET:
4401 case GIMPLE_OMP_TEAMS:
4402 return (weights->omp_cost
4403 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4404
4405 case GIMPLE_TRANSACTION:
4406 return (weights->tm_cost
4407 + estimate_num_insns_seq (gimple_transaction_body (
4408 as_a <gtransaction *> (stmt)),
4409 weights));
4410
4411 default:
4412 gcc_unreachable ();
4413 }
4414
4415 return cost;
4416 }
4417
4418 /* Estimate number of instructions that will be created by expanding
4419 function FNDECL. WEIGHTS contains weights attributed to various
4420 constructs. */
4421
4422 int
4423 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4424 {
4425 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4426 gimple_stmt_iterator bsi;
4427 basic_block bb;
4428 int n = 0;
4429
4430 gcc_assert (my_function && my_function->cfg);
4431 FOR_EACH_BB_FN (bb, my_function)
4432 {
4433 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4434 n += estimate_num_insns (gsi_stmt (bsi), weights);
4435 }
4436
4437 return n;
4438 }
4439
4440
4441 /* Initializes weights used by estimate_num_insns. */
4442
4443 void
4444 init_inline_once (void)
4445 {
4446 eni_size_weights.call_cost = 1;
4447 eni_size_weights.indirect_call_cost = 3;
4448 eni_size_weights.target_builtin_call_cost = 1;
4449 eni_size_weights.div_mod_cost = 1;
4450 eni_size_weights.omp_cost = 40;
4451 eni_size_weights.tm_cost = 10;
4452 eni_size_weights.time_based = false;
4453 eni_size_weights.return_cost = 1;
4454
4455 /* Estimating time for call is difficult, since we have no idea what the
4456 called function does. In the current uses of eni_time_weights,
4457 underestimating the cost does less harm than overestimating it, so
4458 we choose a rather small value here. */
4459 eni_time_weights.call_cost = 10;
4460 eni_time_weights.indirect_call_cost = 15;
4461 eni_time_weights.target_builtin_call_cost = 1;
4462 eni_time_weights.div_mod_cost = 10;
4463 eni_time_weights.omp_cost = 40;
4464 eni_time_weights.tm_cost = 40;
4465 eni_time_weights.time_based = true;
4466 eni_time_weights.return_cost = 2;
4467 }
4468
4469
4470 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4471
4472 static void
4473 prepend_lexical_block (tree current_block, tree new_block)
4474 {
4475 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4476 BLOCK_SUBBLOCKS (current_block) = new_block;
4477 BLOCK_SUPERCONTEXT (new_block) = current_block;
4478 }
4479
4480 /* Add local variables from CALLEE to CALLER. */
4481
4482 static inline void
4483 add_local_variables (struct function *callee, struct function *caller,
4484 copy_body_data *id)
4485 {
4486 tree var;
4487 unsigned ix;
4488
4489 FOR_EACH_LOCAL_DECL (callee, ix, var)
4490 if (!can_be_nonlocal (var, id))
4491 {
4492 tree new_var = remap_decl (var, id);
4493
4494 /* Remap debug-expressions. */
4495 if (VAR_P (new_var)
4496 && DECL_HAS_DEBUG_EXPR_P (var)
4497 && new_var != var)
4498 {
4499 tree tem = DECL_DEBUG_EXPR (var);
4500 bool old_regimplify = id->regimplify;
4501 id->remapping_type_depth++;
4502 walk_tree (&tem, copy_tree_body_r, id, NULL);
4503 id->remapping_type_depth--;
4504 id->regimplify = old_regimplify;
4505 SET_DECL_DEBUG_EXPR (new_var, tem);
4506 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4507 }
4508 add_local_decl (caller, new_var);
4509 }
4510 }
4511
4512 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4513 have brought in or introduced any debug stmts for SRCVAR. */
4514
4515 static inline void
4516 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4517 {
4518 tree *remappedvarp = id->decl_map->get (srcvar);
4519
4520 if (!remappedvarp)
4521 return;
4522
4523 if (!VAR_P (*remappedvarp))
4524 return;
4525
4526 if (*remappedvarp == id->retvar)
4527 return;
4528
4529 tree tvar = target_for_debug_bind (*remappedvarp);
4530 if (!tvar)
4531 return;
4532
4533 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4534 id->call_stmt);
4535 gimple_seq_add_stmt (bindings, stmt);
4536 }
4537
4538 /* For each inlined variable for which we may have debug bind stmts,
4539 add before GSI a final debug stmt resetting it, marking the end of
4540 its life, so that var-tracking knows it doesn't have to compute
4541 further locations for it. */
4542
4543 static inline void
4544 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4545 {
4546 tree var;
4547 unsigned ix;
4548 gimple_seq bindings = NULL;
4549
4550 if (!gimple_in_ssa_p (id->src_cfun))
4551 return;
4552
4553 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4554 return;
4555
4556 for (var = DECL_ARGUMENTS (id->src_fn);
4557 var; var = DECL_CHAIN (var))
4558 reset_debug_binding (id, var, &bindings);
4559
4560 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4561 reset_debug_binding (id, var, &bindings);
4562
4563 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4564 }
4565
4566 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4567
4568 static bool
4569 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4570 {
4571 tree use_retvar;
4572 tree fn;
4573 hash_map<tree, tree> *dst;
4574 hash_map<tree, tree> *st = NULL;
4575 tree return_slot;
4576 tree modify_dest;
4577 struct cgraph_edge *cg_edge;
4578 cgraph_inline_failed_t reason;
4579 basic_block return_block;
4580 edge e;
4581 gimple_stmt_iterator gsi, stmt_gsi;
4582 bool successfully_inlined = false;
4583 bool purge_dead_abnormal_edges;
4584 gcall *call_stmt;
4585 unsigned int prop_mask, src_properties;
4586 struct function *dst_cfun;
4587 tree simduid;
4588 use_operand_p use;
4589 gimple *simtenter_stmt = NULL;
4590 vec<tree> *simtvars_save;
4591
4592 /* The gimplifier uses input_location in too many places, such as
4593 internal_get_tmp_var (). */
4594 location_t saved_location = input_location;
4595 input_location = gimple_location (stmt);
4596
4597 /* From here on, we're only interested in CALL_EXPRs. */
4598 call_stmt = dyn_cast <gcall *> (stmt);
4599 if (!call_stmt)
4600 goto egress;
4601
4602 cg_edge = id->dst_node->get_edge (stmt);
4603 gcc_checking_assert (cg_edge);
4604 /* First, see if we can figure out what function is being called.
4605 If we cannot, then there is no hope of inlining the function. */
4606 if (cg_edge->indirect_unknown_callee)
4607 goto egress;
4608 fn = cg_edge->callee->decl;
4609 gcc_checking_assert (fn);
4610
4611 /* If FN is a declaration of a function in a nested scope that was
4612 globally declared inline, we don't set its DECL_INITIAL.
4613 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4614 C++ front-end uses it for cdtors to refer to their internal
4615 declarations, that are not real functions. Fortunately those
4616 don't have trees to be saved, so we can tell by checking their
4617 gimple_body. */
4618 if (!DECL_INITIAL (fn)
4619 && DECL_ABSTRACT_ORIGIN (fn)
4620 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4621 fn = DECL_ABSTRACT_ORIGIN (fn);
4622
4623 /* Don't try to inline functions that are not well-suited to inlining. */
4624 if (cg_edge->inline_failed)
4625 {
4626 reason = cg_edge->inline_failed;
4627 /* If this call was originally indirect, we do not want to emit any
4628 inlining related warnings or sorry messages because there are no
4629 guarantees regarding those. */
4630 if (cg_edge->indirect_inlining_edge)
4631 goto egress;
4632
4633 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4634 /* For extern inline functions that get redefined we always
4635 silently ignored always_inline flag. Better behavior would
4636 be to be able to keep both bodies and use extern inline body
4637 for inlining, but we can't do that because frontends overwrite
4638 the body. */
4639 && !cg_edge->callee->local.redefined_extern_inline
4640 /* During early inline pass, report only when optimization is
4641 not turned on. */
4642 && (symtab->global_info_ready
4643 || !optimize
4644 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4645 /* PR 20090218-1_0.c. Body can be provided by another module. */
4646 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4647 {
4648 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4649 cgraph_inline_failed_string (reason));
4650 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4651 inform (gimple_location (stmt), "called from here");
4652 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4653 inform (DECL_SOURCE_LOCATION (cfun->decl),
4654 "called from this function");
4655 }
4656 else if (warn_inline
4657 && DECL_DECLARED_INLINE_P (fn)
4658 && !DECL_NO_INLINE_WARNING_P (fn)
4659 && !DECL_IN_SYSTEM_HEADER (fn)
4660 && reason != CIF_UNSPECIFIED
4661 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4662 /* Do not warn about not inlined recursive calls. */
4663 && !cg_edge->recursive_p ()
4664 /* Avoid warnings during early inline pass. */
4665 && symtab->global_info_ready)
4666 {
4667 auto_diagnostic_group d;
4668 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4669 fn, _(cgraph_inline_failed_string (reason))))
4670 {
4671 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4672 inform (gimple_location (stmt), "called from here");
4673 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4674 inform (DECL_SOURCE_LOCATION (cfun->decl),
4675 "called from this function");
4676 }
4677 }
4678 goto egress;
4679 }
4680 id->src_node = cg_edge->callee;
4681
4682 /* If callee is thunk, all we need is to adjust the THIS pointer
4683 and redirect to function being thunked. */
4684 if (id->src_node->thunk.thunk_p)
4685 {
4686 cgraph_edge *edge;
4687 tree virtual_offset = NULL;
4688 profile_count count = cg_edge->count;
4689 tree op;
4690 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4691
4692 cg_edge->remove ();
4693 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4694 gimple_uid (stmt),
4695 profile_count::one (),
4696 profile_count::one (),
4697 true);
4698 edge->count = count;
4699 if (id->src_node->thunk.virtual_offset_p)
4700 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4701 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4702 NULL);
4703 gsi_insert_before (&iter, gimple_build_assign (op,
4704 gimple_call_arg (stmt, 0)),
4705 GSI_NEW_STMT);
4706 gcc_assert (id->src_node->thunk.this_adjusting);
4707 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4708 virtual_offset, id->src_node->thunk.indirect_offset);
4709
4710 gimple_call_set_arg (stmt, 0, op);
4711 gimple_call_set_fndecl (stmt, edge->callee->decl);
4712 update_stmt (stmt);
4713 id->src_node->remove ();
4714 expand_call_inline (bb, stmt, id);
4715 maybe_remove_unused_call_args (cfun, stmt);
4716 return true;
4717 }
4718 fn = cg_edge->callee->decl;
4719 cg_edge->callee->get_untransformed_body ();
4720
4721 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4722 cg_edge->callee->verify ();
4723
4724 /* We will be inlining this callee. */
4725 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4726
4727 /* Update the callers EH personality. */
4728 if (DECL_FUNCTION_PERSONALITY (fn))
4729 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4730 = DECL_FUNCTION_PERSONALITY (fn);
4731
4732 /* Split the block before the GIMPLE_CALL. */
4733 stmt_gsi = gsi_for_stmt (stmt);
4734 gsi_prev (&stmt_gsi);
4735 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4736 bb = e->src;
4737 return_block = e->dest;
4738 remove_edge (e);
4739
4740 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4741 been the source of abnormal edges. In this case, schedule
4742 the removal of dead abnormal edges. */
4743 gsi = gsi_start_bb (return_block);
4744 gsi_next (&gsi);
4745 purge_dead_abnormal_edges = gsi_end_p (gsi);
4746
4747 stmt_gsi = gsi_start_bb (return_block);
4748
4749 /* Build a block containing code to initialize the arguments, the
4750 actual inline expansion of the body, and a label for the return
4751 statements within the function to jump to. The type of the
4752 statement expression is the return type of the function call.
4753 ??? If the call does not have an associated block then we will
4754 remap all callee blocks to NULL, effectively dropping most of
4755 its debug information. This should only happen for calls to
4756 artificial decls inserted by the compiler itself. We need to
4757 either link the inlined blocks into the caller block tree or
4758 not refer to them in any way to not break GC for locations. */
4759 if (tree block = gimple_block (stmt))
4760 {
4761 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4762 to make inlined_function_outer_scope_p return true on this BLOCK. */
4763 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4764 if (loc == UNKNOWN_LOCATION)
4765 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4766 if (loc == UNKNOWN_LOCATION)
4767 loc = BUILTINS_LOCATION;
4768 id->block = make_node (BLOCK);
4769 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4770 BLOCK_SOURCE_LOCATION (id->block) = loc;
4771 prepend_lexical_block (block, id->block);
4772 }
4773
4774 /* Local declarations will be replaced by their equivalents in this map. */
4775 st = id->decl_map;
4776 id->decl_map = new hash_map<tree, tree>;
4777 dst = id->debug_map;
4778 id->debug_map = NULL;
4779 if (flag_stack_reuse != SR_NONE)
4780 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4781
4782 /* Record the function we are about to inline. */
4783 id->src_fn = fn;
4784 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4785 id->reset_location = DECL_IGNORED_P (fn);
4786 id->call_stmt = call_stmt;
4787
4788 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4789 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4790 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4791 simtvars_save = id->dst_simt_vars;
4792 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4793 && (simduid = bb->loop_father->simduid) != NULL_TREE
4794 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4795 && single_imm_use (simduid, &use, &simtenter_stmt)
4796 && is_gimple_call (simtenter_stmt)
4797 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4798 vec_alloc (id->dst_simt_vars, 0);
4799 else
4800 id->dst_simt_vars = NULL;
4801
4802 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4803 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4804
4805 /* If the src function contains an IFN_VA_ARG, then so will the dst
4806 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4807 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4808 src_properties = id->src_cfun->curr_properties & prop_mask;
4809 if (src_properties != prop_mask)
4810 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4811 dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
4812
4813 gcc_assert (!id->src_cfun->after_inlining);
4814
4815 id->entry_bb = bb;
4816 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4817 {
4818 gimple_stmt_iterator si = gsi_last_bb (bb);
4819 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4820 NOT_TAKEN),
4821 GSI_NEW_STMT);
4822 }
4823 initialize_inlined_parameters (id, stmt, fn, bb);
4824 if (debug_nonbind_markers_p && debug_inline_points && id->block
4825 && inlined_function_outer_scope_p (id->block))
4826 {
4827 gimple_stmt_iterator si = gsi_last_bb (bb);
4828 gsi_insert_after (&si, gimple_build_debug_inline_entry
4829 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4830 GSI_NEW_STMT);
4831 }
4832
4833 if (DECL_INITIAL (fn))
4834 {
4835 if (gimple_block (stmt))
4836 {
4837 tree *var;
4838
4839 prepend_lexical_block (id->block,
4840 remap_blocks (DECL_INITIAL (fn), id));
4841 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4842 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4843 == NULL_TREE));
4844 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4845 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4846 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4847 under it. The parameters can be then evaluated in the debugger,
4848 but don't show in backtraces. */
4849 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4850 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4851 {
4852 tree v = *var;
4853 *var = TREE_CHAIN (v);
4854 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4855 BLOCK_VARS (id->block) = v;
4856 }
4857 else
4858 var = &TREE_CHAIN (*var);
4859 }
4860 else
4861 remap_blocks_to_null (DECL_INITIAL (fn), id);
4862 }
4863
4864 /* Return statements in the function body will be replaced by jumps
4865 to the RET_LABEL. */
4866 gcc_assert (DECL_INITIAL (fn));
4867 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4868
4869 /* Find the LHS to which the result of this call is assigned. */
4870 return_slot = NULL;
4871 if (gimple_call_lhs (stmt))
4872 {
4873 modify_dest = gimple_call_lhs (stmt);
4874
4875 /* The function which we are inlining might not return a value,
4876 in which case we should issue a warning that the function
4877 does not return a value. In that case the optimizers will
4878 see that the variable to which the value is assigned was not
4879 initialized. We do not want to issue a warning about that
4880 uninitialized variable. */
4881 if (DECL_P (modify_dest))
4882 TREE_NO_WARNING (modify_dest) = 1;
4883
4884 if (gimple_call_return_slot_opt_p (call_stmt))
4885 {
4886 return_slot = modify_dest;
4887 modify_dest = NULL;
4888 }
4889 }
4890 else
4891 modify_dest = NULL;
4892
4893 /* If we are inlining a call to the C++ operator new, we don't want
4894 to use type based alias analysis on the return value. Otherwise
4895 we may get confused if the compiler sees that the inlined new
4896 function returns a pointer which was just deleted. See bug
4897 33407. */
4898 if (DECL_IS_OPERATOR_NEW (fn))
4899 {
4900 return_slot = NULL;
4901 modify_dest = NULL;
4902 }
4903
4904 /* Declare the return variable for the function. */
4905 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4906
4907 /* Add local vars in this inlined callee to caller. */
4908 add_local_variables (id->src_cfun, cfun, id);
4909
4910 if (dump_enabled_p ())
4911 {
4912 char buf[128];
4913 snprintf (buf, sizeof(buf), "%4.2f",
4914 cg_edge->sreal_frequency ().to_double ());
4915 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
4916 call_stmt,
4917 "Inlining %C to %C with frequency %s\n",
4918 id->src_node, id->dst_node, buf);
4919 if (dump_file && (dump_flags & TDF_DETAILS))
4920 {
4921 id->src_node->dump (dump_file);
4922 id->dst_node->dump (dump_file);
4923 }
4924 }
4925
4926 /* This is it. Duplicate the callee body. Assume callee is
4927 pre-gimplified. Note that we must not alter the caller
4928 function in any way before this point, as this CALL_EXPR may be
4929 a self-referential call; if we're calling ourselves, we need to
4930 duplicate our body before altering anything. */
4931 copy_body (id, bb, return_block, NULL);
4932
4933 reset_debug_bindings (id, stmt_gsi);
4934
4935 if (flag_stack_reuse != SR_NONE)
4936 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4937 if (!TREE_THIS_VOLATILE (p))
4938 {
4939 tree *varp = id->decl_map->get (p);
4940 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4941 {
4942 tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4943 gimple *clobber_stmt;
4944 TREE_THIS_VOLATILE (clobber) = 1;
4945 clobber_stmt = gimple_build_assign (*varp, clobber);
4946 gimple_set_location (clobber_stmt, gimple_location (stmt));
4947 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4948 }
4949 }
4950
4951 /* Reset the escaped solution. */
4952 if (cfun->gimple_df)
4953 pt_solution_reset (&cfun->gimple_df->escaped);
4954
4955 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4956 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4957 {
4958 size_t nargs = gimple_call_num_args (simtenter_stmt);
4959 vec<tree> *vars = id->dst_simt_vars;
4960 auto_vec<tree> newargs (nargs + vars->length ());
4961 for (size_t i = 0; i < nargs; i++)
4962 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4963 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4964 {
4965 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4966 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4967 }
4968 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4969 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4970 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4971 gsi_replace (&gsi, g, false);
4972 }
4973 vec_free (id->dst_simt_vars);
4974 id->dst_simt_vars = simtvars_save;
4975
4976 /* Clean up. */
4977 if (id->debug_map)
4978 {
4979 delete id->debug_map;
4980 id->debug_map = dst;
4981 }
4982 delete id->decl_map;
4983 id->decl_map = st;
4984
4985 /* Unlink the calls virtual operands before replacing it. */
4986 unlink_stmt_vdef (stmt);
4987 if (gimple_vdef (stmt)
4988 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4989 release_ssa_name (gimple_vdef (stmt));
4990
4991 /* If the inlined function returns a result that we care about,
4992 substitute the GIMPLE_CALL with an assignment of the return
4993 variable to the LHS of the call. That is, if STMT was
4994 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4995 if (use_retvar && gimple_call_lhs (stmt))
4996 {
4997 gimple *old_stmt = stmt;
4998 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4999 gimple_set_location (stmt, gimple_location (old_stmt));
5000 gsi_replace (&stmt_gsi, stmt, false);
5001 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5002 /* Append a clobber for id->retvar if easily possible. */
5003 if (flag_stack_reuse != SR_NONE
5004 && id->retvar
5005 && VAR_P (id->retvar)
5006 && id->retvar != return_slot
5007 && id->retvar != modify_dest
5008 && !TREE_THIS_VOLATILE (id->retvar)
5009 && !is_gimple_reg (id->retvar)
5010 && !stmt_ends_bb_p (stmt))
5011 {
5012 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
5013 gimple *clobber_stmt;
5014 TREE_THIS_VOLATILE (clobber) = 1;
5015 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5016 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5017 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5018 }
5019 }
5020 else
5021 {
5022 /* Handle the case of inlining a function with no return
5023 statement, which causes the return value to become undefined. */
5024 if (gimple_call_lhs (stmt)
5025 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5026 {
5027 tree name = gimple_call_lhs (stmt);
5028 tree var = SSA_NAME_VAR (name);
5029 tree def = var ? ssa_default_def (cfun, var) : NULL;
5030
5031 if (def)
5032 {
5033 /* If the variable is used undefined, make this name
5034 undefined via a move. */
5035 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5036 gsi_replace (&stmt_gsi, stmt, true);
5037 }
5038 else
5039 {
5040 if (!var)
5041 {
5042 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5043 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5044 }
5045 /* Otherwise make this variable undefined. */
5046 gsi_remove (&stmt_gsi, true);
5047 set_ssa_default_def (cfun, var, name);
5048 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5049 }
5050 }
5051 /* Replace with a clobber for id->retvar. */
5052 else if (flag_stack_reuse != SR_NONE
5053 && id->retvar
5054 && VAR_P (id->retvar)
5055 && id->retvar != return_slot
5056 && id->retvar != modify_dest
5057 && !TREE_THIS_VOLATILE (id->retvar)
5058 && !is_gimple_reg (id->retvar))
5059 {
5060 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
5061 gimple *clobber_stmt;
5062 TREE_THIS_VOLATILE (clobber) = 1;
5063 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5064 gimple_set_location (clobber_stmt, gimple_location (stmt));
5065 gsi_replace (&stmt_gsi, clobber_stmt, false);
5066 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5067 }
5068 else
5069 gsi_remove (&stmt_gsi, true);
5070 }
5071
5072 if (purge_dead_abnormal_edges)
5073 {
5074 gimple_purge_dead_eh_edges (return_block);
5075 gimple_purge_dead_abnormal_call_edges (return_block);
5076 }
5077
5078 /* If the value of the new expression is ignored, that's OK. We
5079 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5080 the equivalent inlined version either. */
5081 if (is_gimple_assign (stmt))
5082 {
5083 gcc_assert (gimple_assign_single_p (stmt)
5084 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5085 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5086 }
5087
5088 id->add_clobbers_to_eh_landing_pads = 0;
5089
5090 /* Output the inlining info for this abstract function, since it has been
5091 inlined. If we don't do this now, we can lose the information about the
5092 variables in the function when the blocks get blown away as soon as we
5093 remove the cgraph node. */
5094 if (gimple_block (stmt))
5095 (*debug_hooks->outlining_inline_function) (fn);
5096
5097 /* Update callgraph if needed. */
5098 cg_edge->callee->remove ();
5099
5100 id->block = NULL_TREE;
5101 id->retvar = NULL_TREE;
5102 successfully_inlined = true;
5103
5104 egress:
5105 input_location = saved_location;
5106 return successfully_inlined;
5107 }
5108
5109 /* Expand call statements reachable from STMT_P.
5110 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5111 in a MODIFY_EXPR. */
5112
5113 static bool
5114 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
5115 {
5116 gimple_stmt_iterator gsi;
5117 bool inlined = false;
5118
5119 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5120 {
5121 gimple *stmt = gsi_stmt (gsi);
5122 gsi_prev (&gsi);
5123
5124 if (is_gimple_call (stmt)
5125 && !gimple_call_internal_p (stmt))
5126 inlined |= expand_call_inline (bb, stmt, id);
5127 }
5128
5129 return inlined;
5130 }
5131
5132
5133 /* Walk all basic blocks created after FIRST and try to fold every statement
5134 in the STATEMENTS pointer set. */
5135
5136 static void
5137 fold_marked_statements (int first, hash_set<gimple *> *statements)
5138 {
5139 for (; first < last_basic_block_for_fn (cfun); first++)
5140 if (BASIC_BLOCK_FOR_FN (cfun, first))
5141 {
5142 gimple_stmt_iterator gsi;
5143
5144 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5145 !gsi_end_p (gsi);
5146 gsi_next (&gsi))
5147 if (statements->contains (gsi_stmt (gsi)))
5148 {
5149 gimple *old_stmt = gsi_stmt (gsi);
5150 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
5151
5152 if (old_decl && fndecl_built_in_p (old_decl))
5153 {
5154 /* Folding builtins can create multiple instructions,
5155 we need to look at all of them. */
5156 gimple_stmt_iterator i2 = gsi;
5157 gsi_prev (&i2);
5158 if (fold_stmt (&gsi))
5159 {
5160 gimple *new_stmt;
5161 /* If a builtin at the end of a bb folded into nothing,
5162 the following loop won't work. */
5163 if (gsi_end_p (gsi))
5164 {
5165 cgraph_update_edges_for_call_stmt (old_stmt,
5166 old_decl, NULL);
5167 break;
5168 }
5169 if (gsi_end_p (i2))
5170 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5171 else
5172 gsi_next (&i2);
5173 while (1)
5174 {
5175 new_stmt = gsi_stmt (i2);
5176 update_stmt (new_stmt);
5177 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5178 new_stmt);
5179
5180 if (new_stmt == gsi_stmt (gsi))
5181 {
5182 /* It is okay to check only for the very last
5183 of these statements. If it is a throwing
5184 statement nothing will change. If it isn't
5185 this can remove EH edges. If that weren't
5186 correct then because some intermediate stmts
5187 throw, but not the last one. That would mean
5188 we'd have to split the block, which we can't
5189 here and we'd loose anyway. And as builtins
5190 probably never throw, this all
5191 is mood anyway. */
5192 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5193 new_stmt))
5194 gimple_purge_dead_eh_edges (
5195 BASIC_BLOCK_FOR_FN (cfun, first));
5196 break;
5197 }
5198 gsi_next (&i2);
5199 }
5200 }
5201 }
5202 else if (fold_stmt (&gsi))
5203 {
5204 /* Re-read the statement from GSI as fold_stmt() may
5205 have changed it. */
5206 gimple *new_stmt = gsi_stmt (gsi);
5207 update_stmt (new_stmt);
5208
5209 if (is_gimple_call (old_stmt)
5210 || is_gimple_call (new_stmt))
5211 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5212 new_stmt);
5213
5214 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5215 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
5216 first));
5217 }
5218 }
5219 }
5220 }
5221
5222 /* Expand calls to inline functions in the body of FN. */
5223
5224 unsigned int
5225 optimize_inline_calls (tree fn)
5226 {
5227 copy_body_data id;
5228 basic_block bb;
5229 int last = n_basic_blocks_for_fn (cfun);
5230 bool inlined_p = false;
5231
5232 /* Clear out ID. */
5233 memset (&id, 0, sizeof (id));
5234
5235 id.src_node = id.dst_node = cgraph_node::get (fn);
5236 gcc_assert (id.dst_node->definition);
5237 id.dst_fn = fn;
5238 /* Or any functions that aren't finished yet. */
5239 if (current_function_decl)
5240 id.dst_fn = current_function_decl;
5241
5242 id.copy_decl = copy_decl_maybe_to_var;
5243 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5244 id.transform_new_cfg = false;
5245 id.transform_return_to_modify = true;
5246 id.transform_parameter = true;
5247 id.transform_lang_insert_block = NULL;
5248 id.statements_to_fold = new hash_set<gimple *>;
5249
5250 push_gimplify_context ();
5251
5252 /* We make no attempts to keep dominance info up-to-date. */
5253 free_dominance_info (CDI_DOMINATORS);
5254 free_dominance_info (CDI_POST_DOMINATORS);
5255
5256 /* Register specific gimple functions. */
5257 gimple_register_cfg_hooks ();
5258
5259 /* Reach the trees by walking over the CFG, and note the
5260 enclosing basic-blocks in the call edges. */
5261 /* We walk the blocks going forward, because inlined function bodies
5262 will split id->current_basic_block, and the new blocks will
5263 follow it; we'll trudge through them, processing their CALL_EXPRs
5264 along the way. */
5265 FOR_EACH_BB_FN (bb, cfun)
5266 inlined_p |= gimple_expand_calls_inline (bb, &id);
5267
5268 pop_gimplify_context (NULL);
5269
5270 if (flag_checking)
5271 {
5272 struct cgraph_edge *e;
5273
5274 id.dst_node->verify ();
5275
5276 /* Double check that we inlined everything we are supposed to inline. */
5277 for (e = id.dst_node->callees; e; e = e->next_callee)
5278 gcc_assert (e->inline_failed);
5279 }
5280
5281 /* Fold queued statements. */
5282 update_max_bb_count ();
5283 fold_marked_statements (last, id.statements_to_fold);
5284 delete id.statements_to_fold;
5285
5286 gcc_assert (!id.debug_stmts.exists ());
5287
5288 /* If we didn't inline into the function there is nothing to do. */
5289 if (!inlined_p)
5290 return 0;
5291
5292 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5293 number_blocks (fn);
5294
5295 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5296
5297 if (flag_checking)
5298 id.dst_node->verify ();
5299
5300 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5301 not possible yet - the IPA passes might make various functions to not
5302 throw and they don't care to proactively update local EH info. This is
5303 done later in fixup_cfg pass that also execute the verification. */
5304 return (TODO_update_ssa
5305 | TODO_cleanup_cfg
5306 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5307 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5308 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5309 ? TODO_rebuild_frequencies : 0));
5310 }
5311
5312 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5313
5314 tree
5315 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5316 {
5317 enum tree_code code = TREE_CODE (*tp);
5318 enum tree_code_class cl = TREE_CODE_CLASS (code);
5319
5320 /* We make copies of most nodes. */
5321 if (IS_EXPR_CODE_CLASS (cl)
5322 || code == TREE_LIST
5323 || code == TREE_VEC
5324 || code == TYPE_DECL
5325 || code == OMP_CLAUSE)
5326 {
5327 /* Because the chain gets clobbered when we make a copy, we save it
5328 here. */
5329 tree chain = NULL_TREE, new_tree;
5330
5331 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5332 chain = TREE_CHAIN (*tp);
5333
5334 /* Copy the node. */
5335 new_tree = copy_node (*tp);
5336
5337 *tp = new_tree;
5338
5339 /* Now, restore the chain, if appropriate. That will cause
5340 walk_tree to walk into the chain as well. */
5341 if (code == PARM_DECL
5342 || code == TREE_LIST
5343 || code == OMP_CLAUSE)
5344 TREE_CHAIN (*tp) = chain;
5345
5346 /* For now, we don't update BLOCKs when we make copies. So, we
5347 have to nullify all BIND_EXPRs. */
5348 if (TREE_CODE (*tp) == BIND_EXPR)
5349 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5350 }
5351 else if (code == CONSTRUCTOR)
5352 {
5353 /* CONSTRUCTOR nodes need special handling because
5354 we need to duplicate the vector of elements. */
5355 tree new_tree;
5356
5357 new_tree = copy_node (*tp);
5358 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5359 *tp = new_tree;
5360 }
5361 else if (code == STATEMENT_LIST)
5362 /* We used to just abort on STATEMENT_LIST, but we can run into them
5363 with statement-expressions (c++/40975). */
5364 copy_statement_list (tp);
5365 else if (TREE_CODE_CLASS (code) == tcc_type)
5366 *walk_subtrees = 0;
5367 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5368 *walk_subtrees = 0;
5369 else if (TREE_CODE_CLASS (code) == tcc_constant)
5370 *walk_subtrees = 0;
5371 return NULL_TREE;
5372 }
5373
5374 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5375 information indicating to what new SAVE_EXPR this one should be mapped,
5376 use that one. Otherwise, create a new node and enter it in ST. FN is
5377 the function into which the copy will be placed. */
5378
5379 static void
5380 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5381 {
5382 tree *n;
5383 tree t;
5384
5385 /* See if we already encountered this SAVE_EXPR. */
5386 n = st->get (*tp);
5387
5388 /* If we didn't already remap this SAVE_EXPR, do so now. */
5389 if (!n)
5390 {
5391 t = copy_node (*tp);
5392
5393 /* Remember this SAVE_EXPR. */
5394 st->put (*tp, t);
5395 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5396 st->put (t, t);
5397 }
5398 else
5399 {
5400 /* We've already walked into this SAVE_EXPR; don't do it again. */
5401 *walk_subtrees = 0;
5402 t = *n;
5403 }
5404
5405 /* Replace this SAVE_EXPR with the copy. */
5406 *tp = t;
5407 }
5408
5409 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5410 label, copies the declaration and enters it in the splay_tree in DATA (which
5411 is really a 'copy_body_data *'. */
5412
5413 static tree
5414 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5415 bool *handled_ops_p ATTRIBUTE_UNUSED,
5416 struct walk_stmt_info *wi)
5417 {
5418 copy_body_data *id = (copy_body_data *) wi->info;
5419 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5420
5421 if (stmt)
5422 {
5423 tree decl = gimple_label_label (stmt);
5424
5425 /* Copy the decl and remember the copy. */
5426 insert_decl_map (id, decl, id->copy_decl (decl, id));
5427 }
5428
5429 return NULL_TREE;
5430 }
5431
5432 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5433 struct walk_stmt_info *wi);
5434
5435 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5436 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5437 remaps all local declarations to appropriate replacements in gimple
5438 operands. */
5439
5440 static tree
5441 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5442 {
5443 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5444 copy_body_data *id = (copy_body_data *) wi->info;
5445 hash_map<tree, tree> *st = id->decl_map;
5446 tree *n;
5447 tree expr = *tp;
5448
5449 /* For recursive invocations this is no longer the LHS itself. */
5450 bool is_lhs = wi->is_lhs;
5451 wi->is_lhs = false;
5452
5453 if (TREE_CODE (expr) == SSA_NAME)
5454 {
5455 *tp = remap_ssa_name (*tp, id);
5456 *walk_subtrees = 0;
5457 if (is_lhs)
5458 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5459 }
5460 /* Only a local declaration (variable or label). */
5461 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5462 || TREE_CODE (expr) == LABEL_DECL)
5463 {
5464 /* Lookup the declaration. */
5465 n = st->get (expr);
5466
5467 /* If it's there, remap it. */
5468 if (n)
5469 *tp = *n;
5470 *walk_subtrees = 0;
5471 }
5472 else if (TREE_CODE (expr) == STATEMENT_LIST
5473 || TREE_CODE (expr) == BIND_EXPR
5474 || TREE_CODE (expr) == SAVE_EXPR)
5475 gcc_unreachable ();
5476 else if (TREE_CODE (expr) == TARGET_EXPR)
5477 {
5478 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5479 It's OK for this to happen if it was part of a subtree that
5480 isn't immediately expanded, such as operand 2 of another
5481 TARGET_EXPR. */
5482 if (!TREE_OPERAND (expr, 1))
5483 {
5484 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5485 TREE_OPERAND (expr, 3) = NULL_TREE;
5486 }
5487 }
5488 else if (TREE_CODE (expr) == OMP_CLAUSE)
5489 {
5490 /* Before the omplower pass completes, some OMP clauses can contain
5491 sequences that are neither copied by gimple_seq_copy nor walked by
5492 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5493 in those situations, we have to copy and process them explicitely. */
5494
5495 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5496 {
5497 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5498 seq = duplicate_remap_omp_clause_seq (seq, wi);
5499 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5500 }
5501 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5502 {
5503 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5504 seq = duplicate_remap_omp_clause_seq (seq, wi);
5505 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5506 }
5507 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5508 {
5509 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5510 seq = duplicate_remap_omp_clause_seq (seq, wi);
5511 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5512 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5513 seq = duplicate_remap_omp_clause_seq (seq, wi);
5514 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5515 }
5516 }
5517
5518 /* Keep iterating. */
5519 return NULL_TREE;
5520 }
5521
5522
5523 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5524 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5525 remaps all local declarations to appropriate replacements in gimple
5526 statements. */
5527
5528 static tree
5529 replace_locals_stmt (gimple_stmt_iterator *gsip,
5530 bool *handled_ops_p ATTRIBUTE_UNUSED,
5531 struct walk_stmt_info *wi)
5532 {
5533 copy_body_data *id = (copy_body_data *) wi->info;
5534 gimple *gs = gsi_stmt (*gsip);
5535
5536 if (gbind *stmt = dyn_cast <gbind *> (gs))
5537 {
5538 tree block = gimple_bind_block (stmt);
5539
5540 if (block)
5541 {
5542 remap_block (&block, id);
5543 gimple_bind_set_block (stmt, block);
5544 }
5545
5546 /* This will remap a lot of the same decls again, but this should be
5547 harmless. */
5548 if (gimple_bind_vars (stmt))
5549 {
5550 tree old_var, decls = gimple_bind_vars (stmt);
5551
5552 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5553 if (!can_be_nonlocal (old_var, id)
5554 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5555 remap_decl (old_var, id);
5556
5557 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5558 id->prevent_decl_creation_for_types = true;
5559 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5560 id->prevent_decl_creation_for_types = false;
5561 }
5562 }
5563
5564 /* Keep iterating. */
5565 return NULL_TREE;
5566 }
5567
5568 /* Create a copy of SEQ and remap all decls in it. */
5569
5570 static gimple_seq
5571 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5572 {
5573 if (!seq)
5574 return NULL;
5575
5576 /* If there are any labels in OMP sequences, they can be only referred to in
5577 the sequence itself and therefore we can do both here. */
5578 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5579 gimple_seq copy = gimple_seq_copy (seq);
5580 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5581 return copy;
5582 }
5583
5584 /* Copies everything in SEQ and replaces variables and labels local to
5585 current_function_decl. */
5586
5587 gimple_seq
5588 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5589 {
5590 copy_body_data id;
5591 struct walk_stmt_info wi;
5592 gimple_seq copy;
5593
5594 /* There's nothing to do for NULL_TREE. */
5595 if (seq == NULL)
5596 return seq;
5597
5598 /* Set up ID. */
5599 memset (&id, 0, sizeof (id));
5600 id.src_fn = current_function_decl;
5601 id.dst_fn = current_function_decl;
5602 id.src_cfun = cfun;
5603 id.decl_map = new hash_map<tree, tree>;
5604 id.debug_map = NULL;
5605
5606 id.copy_decl = copy_decl_no_change;
5607 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5608 id.transform_new_cfg = false;
5609 id.transform_return_to_modify = false;
5610 id.transform_parameter = false;
5611 id.transform_lang_insert_block = NULL;
5612
5613 /* Walk the tree once to find local labels. */
5614 memset (&wi, 0, sizeof (wi));
5615 hash_set<tree> visited;
5616 wi.info = &id;
5617 wi.pset = &visited;
5618 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5619
5620 copy = gimple_seq_copy (seq);
5621
5622 /* Walk the copy, remapping decls. */
5623 memset (&wi, 0, sizeof (wi));
5624 wi.info = &id;
5625 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5626
5627 /* Clean up. */
5628 delete id.decl_map;
5629 if (id.debug_map)
5630 delete id.debug_map;
5631 if (id.dependence_map)
5632 {
5633 delete id.dependence_map;
5634 id.dependence_map = NULL;
5635 }
5636
5637 return copy;
5638 }
5639
5640
5641 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5642
5643 static tree
5644 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5645 {
5646 if (*tp == data)
5647 return (tree) data;
5648 else
5649 return NULL;
5650 }
5651
5652 DEBUG_FUNCTION bool
5653 debug_find_tree (tree top, tree search)
5654 {
5655 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5656 }
5657
5658
5659 /* Declare the variables created by the inliner. Add all the variables in
5660 VARS to BIND_EXPR. */
5661
5662 static void
5663 declare_inline_vars (tree block, tree vars)
5664 {
5665 tree t;
5666 for (t = vars; t; t = DECL_CHAIN (t))
5667 {
5668 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5669 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5670 add_local_decl (cfun, t);
5671 }
5672
5673 if (block)
5674 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5675 }
5676
5677 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5678 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5679 VAR_DECL translation. */
5680
5681 tree
5682 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5683 {
5684 /* Don't generate debug information for the copy if we wouldn't have
5685 generated it for the copy either. */
5686 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5687 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5688
5689 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5690 declaration inspired this copy. */
5691 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5692
5693 /* The new variable/label has no RTL, yet. */
5694 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5695 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5696 SET_DECL_RTL (copy, 0);
5697 /* For vector typed decls make sure to update DECL_MODE according
5698 to the new function context. */
5699 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5700 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5701
5702 /* These args would always appear unused, if not for this. */
5703 TREE_USED (copy) = 1;
5704
5705 /* Set the context for the new declaration. */
5706 if (!DECL_CONTEXT (decl))
5707 /* Globals stay global. */
5708 ;
5709 else if (DECL_CONTEXT (decl) != id->src_fn)
5710 /* Things that weren't in the scope of the function we're inlining
5711 from aren't in the scope we're inlining to, either. */
5712 ;
5713 else if (TREE_STATIC (decl))
5714 /* Function-scoped static variables should stay in the original
5715 function. */
5716 ;
5717 else
5718 {
5719 /* Ordinary automatic local variables are now in the scope of the
5720 new function. */
5721 DECL_CONTEXT (copy) = id->dst_fn;
5722 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5723 {
5724 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5725 DECL_ATTRIBUTES (copy)
5726 = tree_cons (get_identifier ("omp simt private"), NULL,
5727 DECL_ATTRIBUTES (copy));
5728 id->dst_simt_vars->safe_push (copy);
5729 }
5730 }
5731
5732 return copy;
5733 }
5734
5735 static tree
5736 copy_decl_to_var (tree decl, copy_body_data *id)
5737 {
5738 tree copy, type;
5739
5740 gcc_assert (TREE_CODE (decl) == PARM_DECL
5741 || TREE_CODE (decl) == RESULT_DECL);
5742
5743 type = TREE_TYPE (decl);
5744
5745 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5746 VAR_DECL, DECL_NAME (decl), type);
5747 if (DECL_PT_UID_SET_P (decl))
5748 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5749 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5750 TREE_READONLY (copy) = TREE_READONLY (decl);
5751 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5752 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5753
5754 return copy_decl_for_dup_finish (id, decl, copy);
5755 }
5756
5757 /* Like copy_decl_to_var, but create a return slot object instead of a
5758 pointer variable for return by invisible reference. */
5759
5760 static tree
5761 copy_result_decl_to_var (tree decl, copy_body_data *id)
5762 {
5763 tree copy, type;
5764
5765 gcc_assert (TREE_CODE (decl) == PARM_DECL
5766 || TREE_CODE (decl) == RESULT_DECL);
5767
5768 type = TREE_TYPE (decl);
5769 if (DECL_BY_REFERENCE (decl))
5770 type = TREE_TYPE (type);
5771
5772 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5773 VAR_DECL, DECL_NAME (decl), type);
5774 if (DECL_PT_UID_SET_P (decl))
5775 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5776 TREE_READONLY (copy) = TREE_READONLY (decl);
5777 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5778 if (!DECL_BY_REFERENCE (decl))
5779 {
5780 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5781 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5782 }
5783
5784 return copy_decl_for_dup_finish (id, decl, copy);
5785 }
5786
5787 tree
5788 copy_decl_no_change (tree decl, copy_body_data *id)
5789 {
5790 tree copy;
5791
5792 copy = copy_node (decl);
5793
5794 /* The COPY is not abstract; it will be generated in DST_FN. */
5795 DECL_ABSTRACT_P (copy) = false;
5796 lang_hooks.dup_lang_specific_decl (copy);
5797
5798 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5799 been taken; it's for internal bookkeeping in expand_goto_internal. */
5800 if (TREE_CODE (copy) == LABEL_DECL)
5801 {
5802 TREE_ADDRESSABLE (copy) = 0;
5803 LABEL_DECL_UID (copy) = -1;
5804 }
5805
5806 return copy_decl_for_dup_finish (id, decl, copy);
5807 }
5808
5809 static tree
5810 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5811 {
5812 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5813 return copy_decl_to_var (decl, id);
5814 else
5815 return copy_decl_no_change (decl, id);
5816 }
5817
5818 /* Return a copy of the function's argument tree. */
5819 static tree
5820 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5821 bitmap args_to_skip, tree *vars)
5822 {
5823 tree arg, *parg;
5824 tree new_parm = NULL;
5825 int i = 0;
5826
5827 parg = &new_parm;
5828
5829 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5830 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5831 {
5832 tree new_tree = remap_decl (arg, id);
5833 if (TREE_CODE (new_tree) != PARM_DECL)
5834 new_tree = id->copy_decl (arg, id);
5835 lang_hooks.dup_lang_specific_decl (new_tree);
5836 *parg = new_tree;
5837 parg = &DECL_CHAIN (new_tree);
5838 }
5839 else if (!id->decl_map->get (arg))
5840 {
5841 /* Make an equivalent VAR_DECL. If the argument was used
5842 as temporary variable later in function, the uses will be
5843 replaced by local variable. */
5844 tree var = copy_decl_to_var (arg, id);
5845 insert_decl_map (id, arg, var);
5846 /* Declare this new variable. */
5847 DECL_CHAIN (var) = *vars;
5848 *vars = var;
5849 }
5850 return new_parm;
5851 }
5852
5853 /* Return a copy of the function's static chain. */
5854 static tree
5855 copy_static_chain (tree static_chain, copy_body_data * id)
5856 {
5857 tree *chain_copy, *pvar;
5858
5859 chain_copy = &static_chain;
5860 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5861 {
5862 tree new_tree = remap_decl (*pvar, id);
5863 lang_hooks.dup_lang_specific_decl (new_tree);
5864 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5865 *pvar = new_tree;
5866 }
5867 return static_chain;
5868 }
5869
5870 /* Return true if the function is allowed to be versioned.
5871 This is a guard for the versioning functionality. */
5872
5873 bool
5874 tree_versionable_function_p (tree fndecl)
5875 {
5876 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5877 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5878 }
5879
5880 /* Update clone info after duplication. */
5881
5882 static void
5883 update_clone_info (copy_body_data * id)
5884 {
5885 struct cgraph_node *node;
5886 if (!id->dst_node->clones)
5887 return;
5888 for (node = id->dst_node->clones; node != id->dst_node;)
5889 {
5890 /* First update replace maps to match the new body. */
5891 if (node->clone.tree_map)
5892 {
5893 unsigned int i;
5894 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5895 {
5896 struct ipa_replace_map *replace_info;
5897 replace_info = (*node->clone.tree_map)[i];
5898 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5899 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5900 }
5901 }
5902 if (node->clones)
5903 node = node->clones;
5904 else if (node->next_sibling_clone)
5905 node = node->next_sibling_clone;
5906 else
5907 {
5908 while (node != id->dst_node && !node->next_sibling_clone)
5909 node = node->clone_of;
5910 if (node != id->dst_node)
5911 node = node->next_sibling_clone;
5912 }
5913 }
5914 }
5915
5916 /* Create a copy of a function's tree.
5917 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5918 of the original function and the new copied function
5919 respectively. In case we want to replace a DECL
5920 tree with another tree while duplicating the function's
5921 body, TREE_MAP represents the mapping between these
5922 trees. If UPDATE_CLONES is set, the call_stmt fields
5923 of edges of clones of the function will be updated.
5924
5925 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5926 from new version.
5927 If SKIP_RETURN is true, the new version will return void.
5928 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5929 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5930 */
5931 void
5932 tree_function_versioning (tree old_decl, tree new_decl,
5933 vec<ipa_replace_map *, va_gc> *tree_map,
5934 bool update_clones, bitmap args_to_skip,
5935 bool skip_return, bitmap blocks_to_copy,
5936 basic_block new_entry)
5937 {
5938 struct cgraph_node *old_version_node;
5939 struct cgraph_node *new_version_node;
5940 copy_body_data id;
5941 tree p;
5942 unsigned i;
5943 struct ipa_replace_map *replace_info;
5944 basic_block old_entry_block, bb;
5945 auto_vec<gimple *, 10> init_stmts;
5946 tree vars = NULL_TREE;
5947 bitmap debug_args_to_skip = args_to_skip;
5948
5949 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5950 && TREE_CODE (new_decl) == FUNCTION_DECL);
5951 DECL_POSSIBLY_INLINED (old_decl) = 1;
5952
5953 old_version_node = cgraph_node::get (old_decl);
5954 gcc_checking_assert (old_version_node);
5955 new_version_node = cgraph_node::get (new_decl);
5956 gcc_checking_assert (new_version_node);
5957
5958 /* Copy over debug args. */
5959 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5960 {
5961 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5962 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5963 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5964 old_debug_args = decl_debug_args_lookup (old_decl);
5965 if (old_debug_args)
5966 {
5967 new_debug_args = decl_debug_args_insert (new_decl);
5968 *new_debug_args = vec_safe_copy (*old_debug_args);
5969 }
5970 }
5971
5972 /* Output the inlining info for this abstract function, since it has been
5973 inlined. If we don't do this now, we can lose the information about the
5974 variables in the function when the blocks get blown away as soon as we
5975 remove the cgraph node. */
5976 (*debug_hooks->outlining_inline_function) (old_decl);
5977
5978 DECL_ARTIFICIAL (new_decl) = 1;
5979 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5980 if (DECL_ORIGIN (old_decl) == old_decl)
5981 old_version_node->used_as_abstract_origin = true;
5982 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5983
5984 /* Prepare the data structures for the tree copy. */
5985 memset (&id, 0, sizeof (id));
5986
5987 /* Generate a new name for the new version. */
5988 id.statements_to_fold = new hash_set<gimple *>;
5989
5990 id.decl_map = new hash_map<tree, tree>;
5991 id.debug_map = NULL;
5992 id.src_fn = old_decl;
5993 id.dst_fn = new_decl;
5994 id.src_node = old_version_node;
5995 id.dst_node = new_version_node;
5996 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5997 id.blocks_to_copy = blocks_to_copy;
5998
5999 id.copy_decl = copy_decl_no_change;
6000 id.transform_call_graph_edges
6001 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6002 id.transform_new_cfg = true;
6003 id.transform_return_to_modify = false;
6004 id.transform_parameter = false;
6005 id.transform_lang_insert_block = NULL;
6006
6007 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
6008 (DECL_STRUCT_FUNCTION (old_decl));
6009 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6010 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6011 initialize_cfun (new_decl, old_decl,
6012 new_entry ? new_entry->count : old_entry_block->count);
6013 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6014 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6015 = id.src_cfun->gimple_df->ipa_pta;
6016
6017 /* Copy the function's static chain. */
6018 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6019 if (p)
6020 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6021 = copy_static_chain (p, &id);
6022
6023 /* If there's a tree_map, prepare for substitution. */
6024 if (tree_map)
6025 for (i = 0; i < tree_map->length (); i++)
6026 {
6027 gimple *init;
6028 replace_info = (*tree_map)[i];
6029 if (replace_info->replace_p)
6030 {
6031 int parm_num = -1;
6032 if (!replace_info->old_tree)
6033 {
6034 int p = replace_info->parm_num;
6035 tree parm;
6036 tree req_type, new_type;
6037
6038 for (parm = DECL_ARGUMENTS (old_decl); p;
6039 parm = DECL_CHAIN (parm))
6040 p--;
6041 replace_info->old_tree = parm;
6042 parm_num = replace_info->parm_num;
6043 req_type = TREE_TYPE (parm);
6044 new_type = TREE_TYPE (replace_info->new_tree);
6045 if (!useless_type_conversion_p (req_type, new_type))
6046 {
6047 if (fold_convertible_p (req_type, replace_info->new_tree))
6048 replace_info->new_tree
6049 = fold_build1 (NOP_EXPR, req_type,
6050 replace_info->new_tree);
6051 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
6052 replace_info->new_tree
6053 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
6054 replace_info->new_tree);
6055 else
6056 {
6057 if (dump_file)
6058 {
6059 fprintf (dump_file, " const ");
6060 print_generic_expr (dump_file,
6061 replace_info->new_tree);
6062 fprintf (dump_file,
6063 " can't be converted to param ");
6064 print_generic_expr (dump_file, parm);
6065 fprintf (dump_file, "\n");
6066 }
6067 replace_info->old_tree = NULL;
6068 }
6069 }
6070 }
6071 else
6072 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
6073 if (replace_info->old_tree)
6074 {
6075 init = setup_one_parameter (&id, replace_info->old_tree,
6076 replace_info->new_tree, id.src_fn,
6077 NULL,
6078 &vars);
6079 if (init)
6080 init_stmts.safe_push (init);
6081 if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
6082 {
6083 if (parm_num == -1)
6084 {
6085 tree parm;
6086 int p;
6087 for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
6088 parm = DECL_CHAIN (parm), p++)
6089 if (parm == replace_info->old_tree)
6090 {
6091 parm_num = p;
6092 break;
6093 }
6094 }
6095 if (parm_num != -1)
6096 {
6097 if (debug_args_to_skip == args_to_skip)
6098 {
6099 debug_args_to_skip = BITMAP_ALLOC (NULL);
6100 bitmap_copy (debug_args_to_skip, args_to_skip);
6101 }
6102 bitmap_clear_bit (debug_args_to_skip, parm_num);
6103 }
6104 }
6105 }
6106 }
6107 }
6108 /* Copy the function's arguments. */
6109 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6110 DECL_ARGUMENTS (new_decl)
6111 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
6112 args_to_skip, &vars);
6113
6114 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6115 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6116
6117 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6118
6119 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6120 /* Add local vars. */
6121 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6122
6123 if (DECL_RESULT (old_decl) == NULL_TREE)
6124 ;
6125 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6126 {
6127 DECL_RESULT (new_decl)
6128 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6129 RESULT_DECL, NULL_TREE, void_type_node);
6130 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6131 cfun->returns_struct = 0;
6132 cfun->returns_pcc_struct = 0;
6133 }
6134 else
6135 {
6136 tree old_name;
6137 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6138 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6139 if (gimple_in_ssa_p (id.src_cfun)
6140 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6141 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6142 {
6143 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6144 insert_decl_map (&id, old_name, new_name);
6145 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6146 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6147 }
6148 }
6149
6150 /* Set up the destination functions loop tree. */
6151 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6152 {
6153 cfun->curr_properties &= ~PROP_loops;
6154 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6155 cfun->curr_properties |= PROP_loops;
6156 }
6157
6158 /* Copy the Function's body. */
6159 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6160 new_entry);
6161
6162 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6163 number_blocks (new_decl);
6164
6165 /* We want to create the BB unconditionally, so that the addition of
6166 debug stmts doesn't affect BB count, which may in the end cause
6167 codegen differences. */
6168 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6169 while (init_stmts.length ())
6170 insert_init_stmt (&id, bb, init_stmts.pop ());
6171 update_clone_info (&id);
6172
6173 /* Remap the nonlocal_goto_save_area, if any. */
6174 if (cfun->nonlocal_goto_save_area)
6175 {
6176 struct walk_stmt_info wi;
6177
6178 memset (&wi, 0, sizeof (wi));
6179 wi.info = &id;
6180 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6181 }
6182
6183 /* Clean up. */
6184 delete id.decl_map;
6185 if (id.debug_map)
6186 delete id.debug_map;
6187 free_dominance_info (CDI_DOMINATORS);
6188 free_dominance_info (CDI_POST_DOMINATORS);
6189
6190 update_max_bb_count ();
6191 fold_marked_statements (0, id.statements_to_fold);
6192 delete id.statements_to_fold;
6193 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6194 if (id.dst_node->definition)
6195 cgraph_edge::rebuild_references ();
6196 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6197 {
6198 calculate_dominance_info (CDI_DOMINATORS);
6199 fix_loop_structure (NULL);
6200 }
6201 update_ssa (TODO_update_ssa);
6202
6203 /* After partial cloning we need to rescale frequencies, so they are
6204 within proper range in the cloned function. */
6205 if (new_entry)
6206 {
6207 struct cgraph_edge *e;
6208 rebuild_frequencies ();
6209
6210 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6211 for (e = new_version_node->callees; e; e = e->next_callee)
6212 {
6213 basic_block bb = gimple_bb (e->call_stmt);
6214 e->count = bb->count;
6215 }
6216 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6217 {
6218 basic_block bb = gimple_bb (e->call_stmt);
6219 e->count = bb->count;
6220 }
6221 }
6222
6223 if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6224 {
6225 tree parm;
6226 vec<tree, va_gc> **debug_args = NULL;
6227 unsigned int len = 0;
6228 for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6229 parm; parm = DECL_CHAIN (parm), i++)
6230 if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6231 {
6232 tree ddecl;
6233
6234 if (debug_args == NULL)
6235 {
6236 debug_args = decl_debug_args_insert (new_decl);
6237 len = vec_safe_length (*debug_args);
6238 }
6239 ddecl = make_node (DEBUG_EXPR_DECL);
6240 DECL_ARTIFICIAL (ddecl) = 1;
6241 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6242 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6243 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6244 vec_safe_push (*debug_args, ddecl);
6245 }
6246 if (debug_args != NULL)
6247 {
6248 /* On the callee side, add
6249 DEBUG D#Y s=> parm
6250 DEBUG var => D#Y
6251 stmts to the first bb where var is a VAR_DECL created for the
6252 optimized away parameter in DECL_INITIAL block. This hints
6253 in the debug info that var (whole DECL_ORIGIN is the parm
6254 PARM_DECL) is optimized away, but could be looked up at the
6255 call site as value of D#X there. */
6256 tree vexpr;
6257 gimple_stmt_iterator cgsi
6258 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6259 gimple *def_temp;
6260 tree var = vars;
6261 i = vec_safe_length (*debug_args);
6262 do
6263 {
6264 i -= 2;
6265 while (var != NULL_TREE
6266 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6267 var = TREE_CHAIN (var);
6268 if (var == NULL_TREE)
6269 break;
6270 vexpr = make_node (DEBUG_EXPR_DECL);
6271 parm = (**debug_args)[i];
6272 DECL_ARTIFICIAL (vexpr) = 1;
6273 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6274 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6275 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6276 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6277 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6278 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6279 }
6280 while (i > len);
6281 }
6282 }
6283
6284 if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6285 BITMAP_FREE (debug_args_to_skip);
6286 free_dominance_info (CDI_DOMINATORS);
6287 free_dominance_info (CDI_POST_DOMINATORS);
6288
6289 gcc_assert (!id.debug_stmts.exists ());
6290 pop_cfun ();
6291 return;
6292 }
6293
6294 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6295 the callee and return the inlined body on success. */
6296
6297 tree
6298 maybe_inline_call_in_expr (tree exp)
6299 {
6300 tree fn = get_callee_fndecl (exp);
6301
6302 /* We can only try to inline "const" functions. */
6303 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6304 {
6305 call_expr_arg_iterator iter;
6306 copy_body_data id;
6307 tree param, arg, t;
6308 hash_map<tree, tree> decl_map;
6309
6310 /* Remap the parameters. */
6311 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6312 param;
6313 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6314 decl_map.put (param, arg);
6315
6316 memset (&id, 0, sizeof (id));
6317 id.src_fn = fn;
6318 id.dst_fn = current_function_decl;
6319 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6320 id.decl_map = &decl_map;
6321
6322 id.copy_decl = copy_decl_no_change;
6323 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6324 id.transform_new_cfg = false;
6325 id.transform_return_to_modify = true;
6326 id.transform_parameter = true;
6327 id.transform_lang_insert_block = NULL;
6328
6329 /* Make sure not to unshare trees behind the front-end's back
6330 since front-end specific mechanisms may rely on sharing. */
6331 id.regimplify = false;
6332 id.do_not_unshare = true;
6333
6334 /* We're not inside any EH region. */
6335 id.eh_lp_nr = 0;
6336
6337 t = copy_tree_body (&id);
6338
6339 /* We can only return something suitable for use in a GENERIC
6340 expression tree. */
6341 if (TREE_CODE (t) == MODIFY_EXPR)
6342 return TREE_OPERAND (t, 1);
6343 }
6344
6345 return NULL_TREE;
6346 }
6347
6348 /* Duplicate a type, fields and all. */
6349
6350 tree
6351 build_duplicate_type (tree type)
6352 {
6353 struct copy_body_data id;
6354
6355 memset (&id, 0, sizeof (id));
6356 id.src_fn = current_function_decl;
6357 id.dst_fn = current_function_decl;
6358 id.src_cfun = cfun;
6359 id.decl_map = new hash_map<tree, tree>;
6360 id.debug_map = NULL;
6361 id.copy_decl = copy_decl_no_change;
6362
6363 type = remap_type_1 (type, &id);
6364
6365 delete id.decl_map;
6366 if (id.debug_map)
6367 delete id.debug_map;
6368
6369 TYPE_CANONICAL (type) = type;
6370
6371 return type;
6372 }
6373
6374 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6375 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6376 evaluation. */
6377
6378 tree
6379 copy_fn (tree fn, tree& parms, tree& result)
6380 {
6381 copy_body_data id;
6382 tree param;
6383 hash_map<tree, tree> decl_map;
6384
6385 tree *p = &parms;
6386 *p = NULL_TREE;
6387
6388 memset (&id, 0, sizeof (id));
6389 id.src_fn = fn;
6390 id.dst_fn = current_function_decl;
6391 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6392 id.decl_map = &decl_map;
6393
6394 id.copy_decl = copy_decl_no_change;
6395 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6396 id.transform_new_cfg = false;
6397 id.transform_return_to_modify = false;
6398 id.transform_parameter = true;
6399 id.transform_lang_insert_block = NULL;
6400
6401 /* Make sure not to unshare trees behind the front-end's back
6402 since front-end specific mechanisms may rely on sharing. */
6403 id.regimplify = false;
6404 id.do_not_unshare = true;
6405 id.do_not_fold = true;
6406
6407 /* We're not inside any EH region. */
6408 id.eh_lp_nr = 0;
6409
6410 /* Remap the parameters and result and return them to the caller. */
6411 for (param = DECL_ARGUMENTS (fn);
6412 param;
6413 param = DECL_CHAIN (param))
6414 {
6415 *p = remap_decl (param, &id);
6416 p = &DECL_CHAIN (*p);
6417 }
6418
6419 if (DECL_RESULT (fn))
6420 result = remap_decl (DECL_RESULT (fn), &id);
6421 else
6422 result = NULL_TREE;
6423
6424 return copy_tree_body (&id);
6425 }