]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-inline.c
52f45a73b1d71e7a5ac63dd6f1f60e7f9c96cc70
[thirdparty/gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "sreal.h"
63 #include "tree-cfgcleanup.h"
64 #include "tree-ssa-live.h"
65
66 /* I'm not real happy about this, but we need to handle gimple and
67 non-gimple trees. */
68
69 /* Inlining, Cloning, Versioning, Parallelization
70
71 Inlining: a function body is duplicated, but the PARM_DECLs are
72 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
73 MODIFY_EXPRs that store to a dedicated returned-value variable.
74 The duplicated eh_region info of the copy will later be appended
75 to the info for the caller; the eh_region info in copied throwing
76 statements and RESX statements are adjusted accordingly.
77
78 Cloning: (only in C++) We have one body for a con/de/structor, and
79 multiple function decls, each with a unique parameter list.
80 Duplicate the body, using the given splay tree; some parameters
81 will become constants (like 0 or 1).
82
83 Versioning: a function body is duplicated and the result is a new
84 function rather than into blocks of an existing function as with
85 inlining. Some parameters will become constants.
86
87 Parallelization: a region of a function is duplicated resulting in
88 a new function. Variables may be replaced with complex expressions
89 to enable shared variable semantics.
90
91 All of these will simultaneously lookup any callgraph edges. If
92 we're going to inline the duplicated function body, and the given
93 function has some cloned callgraph nodes (one for each place this
94 function will be inlined) those callgraph edges will be duplicated.
95 If we're cloning the body, those callgraph edges will be
96 updated to point into the new body. (Note that the original
97 callgraph node and edge list will not be altered.)
98
99 See the CALL_EXPR handling case in copy_tree_body_r (). */
100
101 /* To Do:
102
103 o In order to make inlining-on-trees work, we pessimized
104 function-local static constants. In particular, they are now
105 always output, even when not addressed. Fix this by treating
106 function-local static constants just like global static
107 constants; the back-end already knows not to output them if they
108 are not needed.
109
110 o Provide heuristics to clamp inlining of recursive template
111 calls? */
112
113
114 /* Weights that estimate_num_insns uses to estimate the size of the
115 produced code. */
116
117 eni_weights eni_size_weights;
118
119 /* Weights that estimate_num_insns uses to estimate the time necessary
120 to execute the produced code. */
121
122 eni_weights eni_time_weights;
123
124 /* Prototypes. */
125
126 static tree declare_return_variable (copy_body_data *, tree, tree,
127 basic_block);
128 static void remap_block (tree *, copy_body_data *);
129 static void copy_bind_expr (tree *, int *, copy_body_data *);
130 static void declare_inline_vars (tree, tree);
131 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
132 static void prepend_lexical_block (tree current_block, tree new_block);
133 static tree copy_decl_to_var (tree, copy_body_data *);
134 static tree copy_result_decl_to_var (tree, copy_body_data *);
135 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
136 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
137 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
138
139 /* Insert a tree->tree mapping for ID. Despite the name suggests
140 that the trees should be variables, it is used for more than that. */
141
142 void
143 insert_decl_map (copy_body_data *id, tree key, tree value)
144 {
145 id->decl_map->put (key, value);
146
147 /* Always insert an identity map as well. If we see this same new
148 node again, we won't want to duplicate it a second time. */
149 if (key != value)
150 id->decl_map->put (value, value);
151 }
152
153 /* Insert a tree->tree mapping for ID. This is only used for
154 variables. */
155
156 static void
157 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
158 {
159 if (!gimple_in_ssa_p (id->src_cfun))
160 return;
161
162 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
163 return;
164
165 if (!target_for_debug_bind (key))
166 return;
167
168 gcc_assert (TREE_CODE (key) == PARM_DECL);
169 gcc_assert (VAR_P (value));
170
171 if (!id->debug_map)
172 id->debug_map = new hash_map<tree, tree>;
173
174 id->debug_map->put (key, value);
175 }
176
177 /* If nonzero, we're remapping the contents of inlined debug
178 statements. If negative, an error has occurred, such as a
179 reference to a variable that isn't available in the inlined
180 context. */
181 static int processing_debug_stmt = 0;
182
183 /* Construct new SSA name for old NAME. ID is the inline context. */
184
185 static tree
186 remap_ssa_name (tree name, copy_body_data *id)
187 {
188 tree new_tree, var;
189 tree *n;
190
191 gcc_assert (TREE_CODE (name) == SSA_NAME);
192
193 n = id->decl_map->get (name);
194 if (n)
195 return unshare_expr (*n);
196
197 if (processing_debug_stmt)
198 {
199 if (SSA_NAME_IS_DEFAULT_DEF (name)
200 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
201 && id->entry_bb == NULL
202 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
203 {
204 tree vexpr = make_node (DEBUG_EXPR_DECL);
205 gimple *def_temp;
206 gimple_stmt_iterator gsi;
207 tree val = SSA_NAME_VAR (name);
208
209 n = id->decl_map->get (val);
210 if (n != NULL)
211 val = *n;
212 if (TREE_CODE (val) != PARM_DECL
213 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
214 {
215 processing_debug_stmt = -1;
216 return name;
217 }
218 n = id->decl_map->get (val);
219 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
220 return *n;
221 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
222 DECL_ARTIFICIAL (vexpr) = 1;
223 TREE_TYPE (vexpr) = TREE_TYPE (name);
224 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
225 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
226 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
227 insert_decl_map (id, val, vexpr);
228 return vexpr;
229 }
230
231 processing_debug_stmt = -1;
232 return name;
233 }
234
235 /* Remap anonymous SSA names or SSA names of anonymous decls. */
236 var = SSA_NAME_VAR (name);
237 if (!var
238 || (!SSA_NAME_IS_DEFAULT_DEF (name)
239 && VAR_P (var)
240 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
241 && DECL_ARTIFICIAL (var)
242 && DECL_IGNORED_P (var)
243 && !DECL_NAME (var)))
244 {
245 struct ptr_info_def *pi;
246 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
247 if (!var && SSA_NAME_IDENTIFIER (name))
248 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
249 insert_decl_map (id, name, new_tree);
250 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
251 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
252 /* At least IPA points-to info can be directly transferred. */
253 if (id->src_cfun->gimple_df
254 && id->src_cfun->gimple_df->ipa_pta
255 && POINTER_TYPE_P (TREE_TYPE (name))
256 && (pi = SSA_NAME_PTR_INFO (name))
257 && !pi->pt.anything)
258 {
259 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
260 new_pi->pt = pi->pt;
261 }
262 return new_tree;
263 }
264
265 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
266 in copy_bb. */
267 new_tree = remap_decl (var, id);
268
269 /* We might've substituted constant or another SSA_NAME for
270 the variable.
271
272 Replace the SSA name representing RESULT_DECL by variable during
273 inlining: this saves us from need to introduce PHI node in a case
274 return value is just partly initialized. */
275 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
276 && (!SSA_NAME_VAR (name)
277 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
278 || !id->transform_return_to_modify))
279 {
280 struct ptr_info_def *pi;
281 new_tree = make_ssa_name (new_tree);
282 insert_decl_map (id, name, new_tree);
283 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
284 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
285 /* At least IPA points-to info can be directly transferred. */
286 if (id->src_cfun->gimple_df
287 && id->src_cfun->gimple_df->ipa_pta
288 && POINTER_TYPE_P (TREE_TYPE (name))
289 && (pi = SSA_NAME_PTR_INFO (name))
290 && !pi->pt.anything)
291 {
292 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
293 new_pi->pt = pi->pt;
294 }
295 if (SSA_NAME_IS_DEFAULT_DEF (name))
296 {
297 /* By inlining function having uninitialized variable, we might
298 extend the lifetime (variable might get reused). This cause
299 ICE in the case we end up extending lifetime of SSA name across
300 abnormal edge, but also increase register pressure.
301
302 We simply initialize all uninitialized vars by 0 except
303 for case we are inlining to very first BB. We can avoid
304 this for all BBs that are not inside strongly connected
305 regions of the CFG, but this is expensive to test. */
306 if (id->entry_bb
307 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
308 && (!SSA_NAME_VAR (name)
309 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
310 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
311 0)->dest
312 || EDGE_COUNT (id->entry_bb->preds) != 1))
313 {
314 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
315 gimple *init_stmt;
316 tree zero = build_zero_cst (TREE_TYPE (new_tree));
317
318 init_stmt = gimple_build_assign (new_tree, zero);
319 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
320 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
321 }
322 else
323 {
324 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
325 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
326 }
327 }
328 }
329 else
330 insert_decl_map (id, name, new_tree);
331 return new_tree;
332 }
333
334 /* Remap DECL during the copying of the BLOCK tree for the function. */
335
336 tree
337 remap_decl (tree decl, copy_body_data *id)
338 {
339 tree *n;
340
341 /* We only remap local variables in the current function. */
342
343 /* See if we have remapped this declaration. */
344
345 n = id->decl_map->get (decl);
346
347 if (!n && processing_debug_stmt)
348 {
349 processing_debug_stmt = -1;
350 return decl;
351 }
352
353 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
354 necessary DECLs have already been remapped and we do not want to duplicate
355 a decl coming from outside of the sequence we are copying. */
356 if (!n
357 && id->prevent_decl_creation_for_types
358 && id->remapping_type_depth > 0
359 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
360 return decl;
361
362 /* If we didn't already have an equivalent for this declaration, create one
363 now. */
364 if (!n)
365 {
366 /* Make a copy of the variable or label. */
367 tree t = id->copy_decl (decl, id);
368
369 /* Remember it, so that if we encounter this local entity again
370 we can reuse this copy. Do this early because remap_type may
371 need this decl for TYPE_STUB_DECL. */
372 insert_decl_map (id, decl, t);
373
374 if (!DECL_P (t))
375 return t;
376
377 /* Remap types, if necessary. */
378 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
379 if (TREE_CODE (t) == TYPE_DECL)
380 {
381 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
382
383 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
384 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
385 is not set on the TYPE_DECL, for example in LTO mode. */
386 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
387 {
388 tree x = build_variant_type_copy (TREE_TYPE (t));
389 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
390 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
391 DECL_ORIGINAL_TYPE (t) = x;
392 }
393 }
394
395 /* Remap sizes as necessary. */
396 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
397 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
398
399 /* If fields, do likewise for offset and qualifier. */
400 if (TREE_CODE (t) == FIELD_DECL)
401 {
402 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
403 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
404 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
405 }
406
407 return t;
408 }
409
410 if (id->do_not_unshare)
411 return *n;
412 else
413 return unshare_expr (*n);
414 }
415
416 static tree
417 remap_type_1 (tree type, copy_body_data *id)
418 {
419 tree new_tree, t;
420
421 /* We do need a copy. build and register it now. If this is a pointer or
422 reference type, remap the designated type and make a new pointer or
423 reference type. */
424 if (TREE_CODE (type) == POINTER_TYPE)
425 {
426 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
427 TYPE_MODE (type),
428 TYPE_REF_CAN_ALIAS_ALL (type));
429 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
430 new_tree = build_type_attribute_qual_variant (new_tree,
431 TYPE_ATTRIBUTES (type),
432 TYPE_QUALS (type));
433 insert_decl_map (id, type, new_tree);
434 return new_tree;
435 }
436 else if (TREE_CODE (type) == REFERENCE_TYPE)
437 {
438 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
439 TYPE_MODE (type),
440 TYPE_REF_CAN_ALIAS_ALL (type));
441 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
442 new_tree = build_type_attribute_qual_variant (new_tree,
443 TYPE_ATTRIBUTES (type),
444 TYPE_QUALS (type));
445 insert_decl_map (id, type, new_tree);
446 return new_tree;
447 }
448 else
449 new_tree = copy_node (type);
450
451 insert_decl_map (id, type, new_tree);
452
453 /* This is a new type, not a copy of an old type. Need to reassociate
454 variants. We can handle everything except the main variant lazily. */
455 t = TYPE_MAIN_VARIANT (type);
456 if (type != t)
457 {
458 t = remap_type (t, id);
459 TYPE_MAIN_VARIANT (new_tree) = t;
460 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
461 TYPE_NEXT_VARIANT (t) = new_tree;
462 }
463 else
464 {
465 TYPE_MAIN_VARIANT (new_tree) = new_tree;
466 TYPE_NEXT_VARIANT (new_tree) = NULL;
467 }
468
469 if (TYPE_STUB_DECL (type))
470 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
471
472 /* Lazily create pointer and reference types. */
473 TYPE_POINTER_TO (new_tree) = NULL;
474 TYPE_REFERENCE_TO (new_tree) = NULL;
475
476 /* Copy all types that may contain references to local variables; be sure to
477 preserve sharing in between type and its main variant when possible. */
478 switch (TREE_CODE (new_tree))
479 {
480 case INTEGER_TYPE:
481 case REAL_TYPE:
482 case FIXED_POINT_TYPE:
483 case ENUMERAL_TYPE:
484 case BOOLEAN_TYPE:
485 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
486 {
487 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
488 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
489
490 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
491 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
492 }
493 else
494 {
495 t = TYPE_MIN_VALUE (new_tree);
496 if (t && TREE_CODE (t) != INTEGER_CST)
497 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
498
499 t = TYPE_MAX_VALUE (new_tree);
500 if (t && TREE_CODE (t) != INTEGER_CST)
501 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
502 }
503 return new_tree;
504
505 case FUNCTION_TYPE:
506 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
507 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
508 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
509 else
510 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
511 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
512 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
513 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
514 else
515 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
516 return new_tree;
517
518 case ARRAY_TYPE:
519 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
520 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
521 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
522 else
523 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
524
525 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
526 {
527 gcc_checking_assert (TYPE_DOMAIN (type)
528 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
529 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
530 }
531 else
532 {
533 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
534 /* For array bounds where we have decided not to copy over the bounds
535 variable which isn't used in OpenMP/OpenACC region, change them to
536 an uninitialized VAR_DECL temporary. */
537 if (TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
538 && id->adjust_array_error_bounds
539 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
540 {
541 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
542 DECL_ATTRIBUTES (v)
543 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
544 DECL_ATTRIBUTES (v));
545 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
546 }
547 }
548 break;
549
550 case RECORD_TYPE:
551 case UNION_TYPE:
552 case QUAL_UNION_TYPE:
553 if (TYPE_MAIN_VARIANT (type) != type
554 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
555 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
556 else
557 {
558 tree f, nf = NULL;
559
560 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
561 {
562 t = remap_decl (f, id);
563 DECL_CONTEXT (t) = new_tree;
564 DECL_CHAIN (t) = nf;
565 nf = t;
566 }
567 TYPE_FIELDS (new_tree) = nreverse (nf);
568 }
569 break;
570
571 case OFFSET_TYPE:
572 default:
573 /* Shouldn't have been thought variable sized. */
574 gcc_unreachable ();
575 }
576
577 /* All variants of type share the same size, so use the already remaped data. */
578 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
579 {
580 tree s = TYPE_SIZE (type);
581 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
582 tree su = TYPE_SIZE_UNIT (type);
583 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
584 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
585 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
586 || s == mvs);
587 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
588 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
589 || su == mvsu);
590 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
591 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
592 }
593 else
594 {
595 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
596 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
597 }
598
599 return new_tree;
600 }
601
602 /* Helper function for remap_type_2, called through walk_tree. */
603
604 static tree
605 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
606 {
607 copy_body_data *id = (copy_body_data *) data;
608
609 if (TYPE_P (*tp))
610 *walk_subtrees = 0;
611
612 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
613 return *tp;
614
615 return NULL_TREE;
616 }
617
618 /* Return true if TYPE needs to be remapped because remap_decl on any
619 needed embedded decl returns something other than that decl. */
620
621 static bool
622 remap_type_2 (tree type, copy_body_data *id)
623 {
624 tree t;
625
626 #define RETURN_TRUE_IF_VAR(T) \
627 do \
628 { \
629 tree _t = (T); \
630 if (_t) \
631 { \
632 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
633 return true; \
634 if (!TYPE_SIZES_GIMPLIFIED (type) \
635 && walk_tree (&_t, remap_type_3, id, NULL)) \
636 return true; \
637 } \
638 } \
639 while (0)
640
641 switch (TREE_CODE (type))
642 {
643 case POINTER_TYPE:
644 case REFERENCE_TYPE:
645 case FUNCTION_TYPE:
646 case METHOD_TYPE:
647 return remap_type_2 (TREE_TYPE (type), id);
648
649 case INTEGER_TYPE:
650 case REAL_TYPE:
651 case FIXED_POINT_TYPE:
652 case ENUMERAL_TYPE:
653 case BOOLEAN_TYPE:
654 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
655 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
656 return false;
657
658 case ARRAY_TYPE:
659 if (remap_type_2 (TREE_TYPE (type), id)
660 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
661 return true;
662 break;
663
664 case RECORD_TYPE:
665 case UNION_TYPE:
666 case QUAL_UNION_TYPE:
667 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
668 if (TREE_CODE (t) == FIELD_DECL)
669 {
670 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
671 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
672 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
673 if (TREE_CODE (type) == QUAL_UNION_TYPE)
674 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
675 }
676 break;
677
678 default:
679 return false;
680 }
681
682 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
683 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
684 return false;
685 #undef RETURN_TRUE_IF_VAR
686 }
687
688 tree
689 remap_type (tree type, copy_body_data *id)
690 {
691 tree *node;
692 tree tmp;
693
694 if (type == NULL)
695 return type;
696
697 /* See if we have remapped this type. */
698 node = id->decl_map->get (type);
699 if (node)
700 return *node;
701
702 /* The type only needs remapping if it's variably modified. */
703 if (! variably_modified_type_p (type, id->src_fn)
704 /* Don't remap if copy_decl method doesn't always return a new
705 decl and for all embedded decls returns the passed in decl. */
706 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
707 {
708 insert_decl_map (id, type, type);
709 return type;
710 }
711
712 id->remapping_type_depth++;
713 tmp = remap_type_1 (type, id);
714 id->remapping_type_depth--;
715
716 return tmp;
717 }
718
719 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
720
721 static bool
722 can_be_nonlocal (tree decl, copy_body_data *id)
723 {
724 /* We cannot duplicate function decls. */
725 if (TREE_CODE (decl) == FUNCTION_DECL)
726 return true;
727
728 /* Local static vars must be non-local or we get multiple declaration
729 problems. */
730 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
731 return true;
732
733 return false;
734 }
735
736 static tree
737 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
738 copy_body_data *id)
739 {
740 tree old_var;
741 tree new_decls = NULL_TREE;
742
743 /* Remap its variables. */
744 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
745 {
746 tree new_var;
747
748 if (can_be_nonlocal (old_var, id))
749 {
750 /* We need to add this variable to the local decls as otherwise
751 nothing else will do so. */
752 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
753 add_local_decl (cfun, old_var);
754 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
755 && !DECL_IGNORED_P (old_var)
756 && nonlocalized_list)
757 vec_safe_push (*nonlocalized_list, old_var);
758 continue;
759 }
760
761 /* Remap the variable. */
762 new_var = remap_decl (old_var, id);
763
764 /* If we didn't remap this variable, we can't mess with its
765 TREE_CHAIN. If we remapped this variable to the return slot, it's
766 already declared somewhere else, so don't declare it here. */
767
768 if (new_var == id->retvar)
769 ;
770 else if (!new_var)
771 {
772 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
773 && !DECL_IGNORED_P (old_var)
774 && nonlocalized_list)
775 vec_safe_push (*nonlocalized_list, old_var);
776 }
777 else
778 {
779 gcc_assert (DECL_P (new_var));
780 DECL_CHAIN (new_var) = new_decls;
781 new_decls = new_var;
782
783 /* Also copy value-expressions. */
784 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
785 {
786 tree tem = DECL_VALUE_EXPR (new_var);
787 bool old_regimplify = id->regimplify;
788 id->remapping_type_depth++;
789 walk_tree (&tem, copy_tree_body_r, id, NULL);
790 id->remapping_type_depth--;
791 id->regimplify = old_regimplify;
792 SET_DECL_VALUE_EXPR (new_var, tem);
793 }
794 }
795 }
796
797 return nreverse (new_decls);
798 }
799
800 /* Copy the BLOCK to contain remapped versions of the variables
801 therein. And hook the new block into the block-tree. */
802
803 static void
804 remap_block (tree *block, copy_body_data *id)
805 {
806 tree old_block;
807 tree new_block;
808
809 /* Make the new block. */
810 old_block = *block;
811 new_block = make_node (BLOCK);
812 TREE_USED (new_block) = TREE_USED (old_block);
813 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
814 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
815 BLOCK_NONLOCALIZED_VARS (new_block)
816 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
817 *block = new_block;
818
819 /* Remap its variables. */
820 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
821 &BLOCK_NONLOCALIZED_VARS (new_block),
822 id);
823
824 if (id->transform_lang_insert_block)
825 id->transform_lang_insert_block (new_block);
826
827 /* Remember the remapped block. */
828 insert_decl_map (id, old_block, new_block);
829 }
830
831 /* Copy the whole block tree and root it in id->block. */
832
833 static tree
834 remap_blocks (tree block, copy_body_data *id)
835 {
836 tree t;
837 tree new_tree = block;
838
839 if (!block)
840 return NULL;
841
842 remap_block (&new_tree, id);
843 gcc_assert (new_tree != block);
844 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
845 prepend_lexical_block (new_tree, remap_blocks (t, id));
846 /* Blocks are in arbitrary order, but make things slightly prettier and do
847 not swap order when producing a copy. */
848 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
849 return new_tree;
850 }
851
852 /* Remap the block tree rooted at BLOCK to nothing. */
853
854 static void
855 remap_blocks_to_null (tree block, copy_body_data *id)
856 {
857 tree t;
858 insert_decl_map (id, block, NULL_TREE);
859 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
860 remap_blocks_to_null (t, id);
861 }
862
863 /* Remap the location info pointed to by LOCUS. */
864
865 static location_t
866 remap_location (location_t locus, copy_body_data *id)
867 {
868 if (LOCATION_BLOCK (locus))
869 {
870 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
871 gcc_assert (n);
872 if (*n)
873 return set_block (locus, *n);
874 }
875
876 locus = LOCATION_LOCUS (locus);
877
878 if (locus != UNKNOWN_LOCATION && id->block)
879 return set_block (locus, id->block);
880
881 return locus;
882 }
883
884 static void
885 copy_statement_list (tree *tp)
886 {
887 tree_stmt_iterator oi, ni;
888 tree new_tree;
889
890 new_tree = alloc_stmt_list ();
891 ni = tsi_start (new_tree);
892 oi = tsi_start (*tp);
893 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
894 *tp = new_tree;
895
896 for (; !tsi_end_p (oi); tsi_next (&oi))
897 {
898 tree stmt = tsi_stmt (oi);
899 if (TREE_CODE (stmt) == STATEMENT_LIST)
900 /* This copy is not redundant; tsi_link_after will smash this
901 STATEMENT_LIST into the end of the one we're building, and we
902 don't want to do that with the original. */
903 copy_statement_list (&stmt);
904 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
905 }
906 }
907
908 static void
909 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
910 {
911 tree block = BIND_EXPR_BLOCK (*tp);
912 /* Copy (and replace) the statement. */
913 copy_tree_r (tp, walk_subtrees, NULL);
914 if (block)
915 {
916 remap_block (&block, id);
917 BIND_EXPR_BLOCK (*tp) = block;
918 }
919
920 if (BIND_EXPR_VARS (*tp))
921 /* This will remap a lot of the same decls again, but this should be
922 harmless. */
923 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
924 }
925
926
927 /* Create a new gimple_seq by remapping all the statements in BODY
928 using the inlining information in ID. */
929
930 static gimple_seq
931 remap_gimple_seq (gimple_seq body, copy_body_data *id)
932 {
933 gimple_stmt_iterator si;
934 gimple_seq new_body = NULL;
935
936 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
937 {
938 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
939 gimple_seq_add_seq (&new_body, new_stmts);
940 }
941
942 return new_body;
943 }
944
945
946 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
947 block using the mapping information in ID. */
948
949 static gimple *
950 copy_gimple_bind (gbind *stmt, copy_body_data *id)
951 {
952 gimple *new_bind;
953 tree new_block, new_vars;
954 gimple_seq body, new_body;
955
956 /* Copy the statement. Note that we purposely don't use copy_stmt
957 here because we need to remap statements as we copy. */
958 body = gimple_bind_body (stmt);
959 new_body = remap_gimple_seq (body, id);
960
961 new_block = gimple_bind_block (stmt);
962 if (new_block)
963 remap_block (&new_block, id);
964
965 /* This will remap a lot of the same decls again, but this should be
966 harmless. */
967 new_vars = gimple_bind_vars (stmt);
968 if (new_vars)
969 new_vars = remap_decls (new_vars, NULL, id);
970
971 new_bind = gimple_build_bind (new_vars, new_body, new_block);
972
973 return new_bind;
974 }
975
976 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
977
978 static bool
979 is_parm (tree decl)
980 {
981 if (TREE_CODE (decl) == SSA_NAME)
982 {
983 decl = SSA_NAME_VAR (decl);
984 if (!decl)
985 return false;
986 }
987
988 return (TREE_CODE (decl) == PARM_DECL);
989 }
990
991 /* Remap the dependence CLIQUE from the source to the destination function
992 as specified in ID. */
993
994 static unsigned short
995 remap_dependence_clique (copy_body_data *id, unsigned short clique)
996 {
997 if (clique == 0 || processing_debug_stmt)
998 return 0;
999 if (!id->dependence_map)
1000 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1001 bool existed;
1002 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1003 if (!existed)
1004 {
1005 /* Clique 1 is reserved for local ones set by PTA. */
1006 if (cfun->last_clique == 0)
1007 cfun->last_clique = 1;
1008 newc = ++cfun->last_clique;
1009 }
1010 return newc;
1011 }
1012
1013 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1014 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1015 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1016 recursing into the children nodes of *TP. */
1017
1018 static tree
1019 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1020 {
1021 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1022 copy_body_data *id = (copy_body_data *) wi_p->info;
1023 tree fn = id->src_fn;
1024
1025 /* For recursive invocations this is no longer the LHS itself. */
1026 bool is_lhs = wi_p->is_lhs;
1027 wi_p->is_lhs = false;
1028
1029 if (TREE_CODE (*tp) == SSA_NAME)
1030 {
1031 *tp = remap_ssa_name (*tp, id);
1032 *walk_subtrees = 0;
1033 if (is_lhs)
1034 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1035 return NULL;
1036 }
1037 else if (auto_var_in_fn_p (*tp, fn))
1038 {
1039 /* Local variables and labels need to be replaced by equivalent
1040 variables. We don't want to copy static variables; there's
1041 only one of those, no matter how many times we inline the
1042 containing function. Similarly for globals from an outer
1043 function. */
1044 tree new_decl;
1045
1046 /* Remap the declaration. */
1047 new_decl = remap_decl (*tp, id);
1048 gcc_assert (new_decl);
1049 /* Replace this variable with the copy. */
1050 STRIP_TYPE_NOPS (new_decl);
1051 /* ??? The C++ frontend uses void * pointer zero to initialize
1052 any other type. This confuses the middle-end type verification.
1053 As cloned bodies do not go through gimplification again the fixup
1054 there doesn't trigger. */
1055 if (TREE_CODE (new_decl) == INTEGER_CST
1056 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1057 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1058 *tp = new_decl;
1059 *walk_subtrees = 0;
1060 }
1061 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1062 gcc_unreachable ();
1063 else if (TREE_CODE (*tp) == SAVE_EXPR)
1064 gcc_unreachable ();
1065 else if (TREE_CODE (*tp) == LABEL_DECL
1066 && (!DECL_CONTEXT (*tp)
1067 || decl_function_context (*tp) == id->src_fn))
1068 /* These may need to be remapped for EH handling. */
1069 *tp = remap_decl (*tp, id);
1070 else if (TREE_CODE (*tp) == FIELD_DECL)
1071 {
1072 /* If the enclosing record type is variably_modified_type_p, the field
1073 has already been remapped. Otherwise, it need not be. */
1074 tree *n = id->decl_map->get (*tp);
1075 if (n)
1076 *tp = *n;
1077 *walk_subtrees = 0;
1078 }
1079 else if (TYPE_P (*tp))
1080 /* Types may need remapping as well. */
1081 *tp = remap_type (*tp, id);
1082 else if (CONSTANT_CLASS_P (*tp))
1083 {
1084 /* If this is a constant, we have to copy the node iff the type
1085 will be remapped. copy_tree_r will not copy a constant. */
1086 tree new_type = remap_type (TREE_TYPE (*tp), id);
1087
1088 if (new_type == TREE_TYPE (*tp))
1089 *walk_subtrees = 0;
1090
1091 else if (TREE_CODE (*tp) == INTEGER_CST)
1092 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1093 else
1094 {
1095 *tp = copy_node (*tp);
1096 TREE_TYPE (*tp) = new_type;
1097 }
1098 }
1099 else
1100 {
1101 /* Otherwise, just copy the node. Note that copy_tree_r already
1102 knows not to copy VAR_DECLs, etc., so this is safe. */
1103
1104 if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1105 {
1106 /* We need to re-canonicalize MEM_REFs from inline substitutions
1107 that can happen when a pointer argument is an ADDR_EXPR.
1108 Recurse here manually to allow that. */
1109 tree ptr = TREE_OPERAND (*tp, 0);
1110 tree type = remap_type (TREE_TYPE (*tp), id);
1111 tree old = *tp;
1112 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1113 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1114 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1115 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1116 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1117 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1118 {
1119 MR_DEPENDENCE_CLIQUE (*tp)
1120 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1121 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1122 }
1123 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1124 remapped a parameter as the property might be valid only
1125 for the parameter itself. */
1126 if (TREE_THIS_NOTRAP (old)
1127 && (!is_parm (TREE_OPERAND (old, 0))
1128 || (!id->transform_parameter && is_parm (ptr))))
1129 TREE_THIS_NOTRAP (*tp) = 1;
1130 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1131 *walk_subtrees = 0;
1132 return NULL;
1133 }
1134
1135 /* Here is the "usual case". Copy this tree node, and then
1136 tweak some special cases. */
1137 copy_tree_r (tp, walk_subtrees, NULL);
1138
1139 if (TREE_CODE (*tp) != OMP_CLAUSE)
1140 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1141
1142 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1143 {
1144 /* The copied TARGET_EXPR has never been expanded, even if the
1145 original node was expanded already. */
1146 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1147 TREE_OPERAND (*tp, 3) = NULL_TREE;
1148 }
1149 else if (TREE_CODE (*tp) == ADDR_EXPR)
1150 {
1151 /* Variable substitution need not be simple. In particular,
1152 the MEM_REF substitution above. Make sure that
1153 TREE_CONSTANT and friends are up-to-date. */
1154 int invariant = is_gimple_min_invariant (*tp);
1155 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1156 recompute_tree_invariant_for_addr_expr (*tp);
1157
1158 /* If this used to be invariant, but is not any longer,
1159 then regimplification is probably needed. */
1160 if (invariant && !is_gimple_min_invariant (*tp))
1161 id->regimplify = true;
1162
1163 *walk_subtrees = 0;
1164 }
1165 }
1166
1167 /* Update the TREE_BLOCK for the cloned expr. */
1168 if (EXPR_P (*tp))
1169 {
1170 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1171 tree old_block = TREE_BLOCK (*tp);
1172 if (old_block)
1173 {
1174 tree *n;
1175 n = id->decl_map->get (TREE_BLOCK (*tp));
1176 if (n)
1177 new_block = *n;
1178 }
1179 TREE_SET_BLOCK (*tp, new_block);
1180 }
1181
1182 /* Keep iterating. */
1183 return NULL_TREE;
1184 }
1185
1186
1187 /* Called from copy_body_id via walk_tree. DATA is really a
1188 `copy_body_data *'. */
1189
1190 tree
1191 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1192 {
1193 copy_body_data *id = (copy_body_data *) data;
1194 tree fn = id->src_fn;
1195 tree new_block;
1196
1197 /* Begin by recognizing trees that we'll completely rewrite for the
1198 inlining context. Our output for these trees is completely
1199 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1200 into an edge). Further down, we'll handle trees that get
1201 duplicated and/or tweaked. */
1202
1203 /* When requested, RETURN_EXPRs should be transformed to just the
1204 contained MODIFY_EXPR. The branch semantics of the return will
1205 be handled elsewhere by manipulating the CFG rather than a statement. */
1206 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1207 {
1208 tree assignment = TREE_OPERAND (*tp, 0);
1209
1210 /* If we're returning something, just turn that into an
1211 assignment into the equivalent of the original RESULT_DECL.
1212 If the "assignment" is just the result decl, the result
1213 decl has already been set (e.g. a recent "foo (&result_decl,
1214 ...)"); just toss the entire RETURN_EXPR. */
1215 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1216 {
1217 /* Replace the RETURN_EXPR with (a copy of) the
1218 MODIFY_EXPR hanging underneath. */
1219 *tp = copy_node (assignment);
1220 }
1221 else /* Else the RETURN_EXPR returns no value. */
1222 {
1223 *tp = NULL;
1224 return (tree) (void *)1;
1225 }
1226 }
1227 else if (TREE_CODE (*tp) == SSA_NAME)
1228 {
1229 *tp = remap_ssa_name (*tp, id);
1230 *walk_subtrees = 0;
1231 return NULL;
1232 }
1233
1234 /* Local variables and labels need to be replaced by equivalent
1235 variables. We don't want to copy static variables; there's only
1236 one of those, no matter how many times we inline the containing
1237 function. Similarly for globals from an outer function. */
1238 else if (auto_var_in_fn_p (*tp, fn))
1239 {
1240 tree new_decl;
1241
1242 /* Remap the declaration. */
1243 new_decl = remap_decl (*tp, id);
1244 gcc_assert (new_decl);
1245 /* Replace this variable with the copy. */
1246 STRIP_TYPE_NOPS (new_decl);
1247 *tp = new_decl;
1248 *walk_subtrees = 0;
1249 }
1250 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1251 copy_statement_list (tp);
1252 else if (TREE_CODE (*tp) == SAVE_EXPR
1253 || TREE_CODE (*tp) == TARGET_EXPR)
1254 remap_save_expr (tp, id->decl_map, walk_subtrees);
1255 else if (TREE_CODE (*tp) == LABEL_DECL
1256 && (! DECL_CONTEXT (*tp)
1257 || decl_function_context (*tp) == id->src_fn))
1258 /* These may need to be remapped for EH handling. */
1259 *tp = remap_decl (*tp, id);
1260 else if (TREE_CODE (*tp) == BIND_EXPR)
1261 copy_bind_expr (tp, walk_subtrees, id);
1262 /* Types may need remapping as well. */
1263 else if (TYPE_P (*tp))
1264 *tp = remap_type (*tp, id);
1265
1266 /* If this is a constant, we have to copy the node iff the type will be
1267 remapped. copy_tree_r will not copy a constant. */
1268 else if (CONSTANT_CLASS_P (*tp))
1269 {
1270 tree new_type = remap_type (TREE_TYPE (*tp), id);
1271
1272 if (new_type == TREE_TYPE (*tp))
1273 *walk_subtrees = 0;
1274
1275 else if (TREE_CODE (*tp) == INTEGER_CST)
1276 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1277 else
1278 {
1279 *tp = copy_node (*tp);
1280 TREE_TYPE (*tp) = new_type;
1281 }
1282 }
1283
1284 /* Otherwise, just copy the node. Note that copy_tree_r already
1285 knows not to copy VAR_DECLs, etc., so this is safe. */
1286 else
1287 {
1288 /* Here we handle trees that are not completely rewritten.
1289 First we detect some inlining-induced bogosities for
1290 discarding. */
1291 if (TREE_CODE (*tp) == MODIFY_EXPR
1292 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1293 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1294 {
1295 /* Some assignments VAR = VAR; don't generate any rtl code
1296 and thus don't count as variable modification. Avoid
1297 keeping bogosities like 0 = 0. */
1298 tree decl = TREE_OPERAND (*tp, 0), value;
1299 tree *n;
1300
1301 n = id->decl_map->get (decl);
1302 if (n)
1303 {
1304 value = *n;
1305 STRIP_TYPE_NOPS (value);
1306 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1307 {
1308 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1309 return copy_tree_body_r (tp, walk_subtrees, data);
1310 }
1311 }
1312 }
1313 else if (TREE_CODE (*tp) == INDIRECT_REF)
1314 {
1315 /* Get rid of *& from inline substitutions that can happen when a
1316 pointer argument is an ADDR_EXPR. */
1317 tree decl = TREE_OPERAND (*tp, 0);
1318 tree *n = id->decl_map->get (decl);
1319 if (n)
1320 {
1321 /* If we happen to get an ADDR_EXPR in n->value, strip
1322 it manually here as we'll eventually get ADDR_EXPRs
1323 which lie about their types pointed to. In this case
1324 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1325 but we absolutely rely on that. As fold_indirect_ref
1326 does other useful transformations, try that first, though. */
1327 tree type = TREE_TYPE (*tp);
1328 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1329 tree old = *tp;
1330 *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1331 if (! *tp)
1332 {
1333 type = remap_type (type, id);
1334 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1335 {
1336 *tp
1337 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1338 /* ??? We should either assert here or build
1339 a VIEW_CONVERT_EXPR instead of blindly leaking
1340 incompatible types to our IL. */
1341 if (! *tp)
1342 *tp = TREE_OPERAND (ptr, 0);
1343 }
1344 else
1345 {
1346 *tp = build1 (INDIRECT_REF, type, ptr);
1347 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1348 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1349 TREE_READONLY (*tp) = TREE_READONLY (old);
1350 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1351 have remapped a parameter as the property might be
1352 valid only for the parameter itself. */
1353 if (TREE_THIS_NOTRAP (old)
1354 && (!is_parm (TREE_OPERAND (old, 0))
1355 || (!id->transform_parameter && is_parm (ptr))))
1356 TREE_THIS_NOTRAP (*tp) = 1;
1357 }
1358 }
1359 *walk_subtrees = 0;
1360 return NULL;
1361 }
1362 }
1363 else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1364 {
1365 /* We need to re-canonicalize MEM_REFs from inline substitutions
1366 that can happen when a pointer argument is an ADDR_EXPR.
1367 Recurse here manually to allow that. */
1368 tree ptr = TREE_OPERAND (*tp, 0);
1369 tree type = remap_type (TREE_TYPE (*tp), id);
1370 tree old = *tp;
1371 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1372 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1373 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1374 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1375 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1376 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1377 {
1378 MR_DEPENDENCE_CLIQUE (*tp)
1379 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1380 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1381 }
1382 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1383 remapped a parameter as the property might be valid only
1384 for the parameter itself. */
1385 if (TREE_THIS_NOTRAP (old)
1386 && (!is_parm (TREE_OPERAND (old, 0))
1387 || (!id->transform_parameter && is_parm (ptr))))
1388 TREE_THIS_NOTRAP (*tp) = 1;
1389 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1390 *walk_subtrees = 0;
1391 return NULL;
1392 }
1393
1394 /* Here is the "usual case". Copy this tree node, and then
1395 tweak some special cases. */
1396 copy_tree_r (tp, walk_subtrees, NULL);
1397
1398 /* If EXPR has block defined, map it to newly constructed block.
1399 When inlining we want EXPRs without block appear in the block
1400 of function call if we are not remapping a type. */
1401 if (EXPR_P (*tp))
1402 {
1403 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1404 if (TREE_BLOCK (*tp))
1405 {
1406 tree *n;
1407 n = id->decl_map->get (TREE_BLOCK (*tp));
1408 if (n)
1409 new_block = *n;
1410 }
1411 TREE_SET_BLOCK (*tp, new_block);
1412 }
1413
1414 if (TREE_CODE (*tp) != OMP_CLAUSE)
1415 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1416
1417 /* The copied TARGET_EXPR has never been expanded, even if the
1418 original node was expanded already. */
1419 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1420 {
1421 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1422 TREE_OPERAND (*tp, 3) = NULL_TREE;
1423 }
1424
1425 /* Variable substitution need not be simple. In particular, the
1426 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1427 and friends are up-to-date. */
1428 else if (TREE_CODE (*tp) == ADDR_EXPR)
1429 {
1430 int invariant = is_gimple_min_invariant (*tp);
1431 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1432
1433 /* Handle the case where we substituted an INDIRECT_REF
1434 into the operand of the ADDR_EXPR. */
1435 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1436 && !id->do_not_fold)
1437 {
1438 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1439 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1440 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1441 *tp = t;
1442 }
1443 else
1444 recompute_tree_invariant_for_addr_expr (*tp);
1445
1446 /* If this used to be invariant, but is not any longer,
1447 then regimplification is probably needed. */
1448 if (invariant && !is_gimple_min_invariant (*tp))
1449 id->regimplify = true;
1450
1451 *walk_subtrees = 0;
1452 }
1453 }
1454
1455 /* Keep iterating. */
1456 return NULL_TREE;
1457 }
1458
1459 /* Helper for remap_gimple_stmt. Given an EH region number for the
1460 source function, map that to the duplicate EH region number in
1461 the destination function. */
1462
1463 static int
1464 remap_eh_region_nr (int old_nr, copy_body_data *id)
1465 {
1466 eh_region old_r, new_r;
1467
1468 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1469 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1470
1471 return new_r->index;
1472 }
1473
1474 /* Similar, but operate on INTEGER_CSTs. */
1475
1476 static tree
1477 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1478 {
1479 int old_nr, new_nr;
1480
1481 old_nr = tree_to_shwi (old_t_nr);
1482 new_nr = remap_eh_region_nr (old_nr, id);
1483
1484 return build_int_cst (integer_type_node, new_nr);
1485 }
1486
1487 /* Helper for copy_bb. Remap statement STMT using the inlining
1488 information in ID. Return the new statement copy. */
1489
1490 static gimple_seq
1491 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1492 {
1493 gimple *copy = NULL;
1494 struct walk_stmt_info wi;
1495 bool skip_first = false;
1496 gimple_seq stmts = NULL;
1497
1498 if (is_gimple_debug (stmt)
1499 && (gimple_debug_nonbind_marker_p (stmt)
1500 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1501 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1502 return NULL;
1503
1504 /* Begin by recognizing trees that we'll completely rewrite for the
1505 inlining context. Our output for these trees is completely
1506 different from our input (e.g. RETURN_EXPR is deleted and morphs
1507 into an edge). Further down, we'll handle trees that get
1508 duplicated and/or tweaked. */
1509
1510 /* When requested, GIMPLE_RETURN should be transformed to just the
1511 contained GIMPLE_ASSIGN. The branch semantics of the return will
1512 be handled elsewhere by manipulating the CFG rather than the
1513 statement. */
1514 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1515 {
1516 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1517
1518 /* If we're returning something, just turn that into an
1519 assignment to the equivalent of the original RESULT_DECL.
1520 If RETVAL is just the result decl, the result decl has
1521 already been set (e.g. a recent "foo (&result_decl, ...)");
1522 just toss the entire GIMPLE_RETURN. */
1523 if (retval
1524 && (TREE_CODE (retval) != RESULT_DECL
1525 && (TREE_CODE (retval) != SSA_NAME
1526 || ! SSA_NAME_VAR (retval)
1527 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1528 {
1529 copy = gimple_build_assign (id->do_not_unshare
1530 ? id->retvar : unshare_expr (id->retvar),
1531 retval);
1532 /* id->retvar is already substituted. Skip it on later remapping. */
1533 skip_first = true;
1534 }
1535 else
1536 return NULL;
1537 }
1538 else if (gimple_has_substatements (stmt))
1539 {
1540 gimple_seq s1, s2;
1541
1542 /* When cloning bodies from the C++ front end, we will be handed bodies
1543 in High GIMPLE form. Handle here all the High GIMPLE statements that
1544 have embedded statements. */
1545 switch (gimple_code (stmt))
1546 {
1547 case GIMPLE_BIND:
1548 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1549 break;
1550
1551 case GIMPLE_CATCH:
1552 {
1553 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1554 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1555 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1556 }
1557 break;
1558
1559 case GIMPLE_EH_FILTER:
1560 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1561 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1562 break;
1563
1564 case GIMPLE_TRY:
1565 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1566 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1567 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1568 break;
1569
1570 case GIMPLE_WITH_CLEANUP_EXPR:
1571 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1572 copy = gimple_build_wce (s1);
1573 break;
1574
1575 case GIMPLE_OMP_PARALLEL:
1576 {
1577 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1578 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1579 copy = gimple_build_omp_parallel
1580 (s1,
1581 gimple_omp_parallel_clauses (omp_par_stmt),
1582 gimple_omp_parallel_child_fn (omp_par_stmt),
1583 gimple_omp_parallel_data_arg (omp_par_stmt));
1584 }
1585 break;
1586
1587 case GIMPLE_OMP_TASK:
1588 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1589 copy = gimple_build_omp_task
1590 (s1,
1591 gimple_omp_task_clauses (stmt),
1592 gimple_omp_task_child_fn (stmt),
1593 gimple_omp_task_data_arg (stmt),
1594 gimple_omp_task_copy_fn (stmt),
1595 gimple_omp_task_arg_size (stmt),
1596 gimple_omp_task_arg_align (stmt));
1597 break;
1598
1599 case GIMPLE_OMP_FOR:
1600 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1601 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1602 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1603 gimple_omp_for_clauses (stmt),
1604 gimple_omp_for_collapse (stmt), s2);
1605 {
1606 size_t i;
1607 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1608 {
1609 gimple_omp_for_set_index (copy, i,
1610 gimple_omp_for_index (stmt, i));
1611 gimple_omp_for_set_initial (copy, i,
1612 gimple_omp_for_initial (stmt, i));
1613 gimple_omp_for_set_final (copy, i,
1614 gimple_omp_for_final (stmt, i));
1615 gimple_omp_for_set_incr (copy, i,
1616 gimple_omp_for_incr (stmt, i));
1617 gimple_omp_for_set_cond (copy, i,
1618 gimple_omp_for_cond (stmt, i));
1619 }
1620 }
1621 break;
1622
1623 case GIMPLE_OMP_MASTER:
1624 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1625 copy = gimple_build_omp_master (s1);
1626 break;
1627
1628 case GIMPLE_OMP_TASKGROUP:
1629 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1630 copy = gimple_build_omp_taskgroup
1631 (s1, gimple_omp_taskgroup_clauses (stmt));
1632 break;
1633
1634 case GIMPLE_OMP_ORDERED:
1635 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1636 copy = gimple_build_omp_ordered
1637 (s1,
1638 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1639 break;
1640
1641 case GIMPLE_OMP_SCAN:
1642 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1643 copy = gimple_build_omp_scan
1644 (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1645 break;
1646
1647 case GIMPLE_OMP_SECTION:
1648 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1649 copy = gimple_build_omp_section (s1);
1650 break;
1651
1652 case GIMPLE_OMP_SECTIONS:
1653 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1654 copy = gimple_build_omp_sections
1655 (s1, gimple_omp_sections_clauses (stmt));
1656 break;
1657
1658 case GIMPLE_OMP_SINGLE:
1659 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1660 copy = gimple_build_omp_single
1661 (s1, gimple_omp_single_clauses (stmt));
1662 break;
1663
1664 case GIMPLE_OMP_TARGET:
1665 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1666 copy = gimple_build_omp_target
1667 (s1, gimple_omp_target_kind (stmt),
1668 gimple_omp_target_clauses (stmt));
1669 break;
1670
1671 case GIMPLE_OMP_TEAMS:
1672 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1673 copy = gimple_build_omp_teams
1674 (s1, gimple_omp_teams_clauses (stmt));
1675 break;
1676
1677 case GIMPLE_OMP_CRITICAL:
1678 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1679 copy = gimple_build_omp_critical (s1,
1680 gimple_omp_critical_name
1681 (as_a <gomp_critical *> (stmt)),
1682 gimple_omp_critical_clauses
1683 (as_a <gomp_critical *> (stmt)));
1684 break;
1685
1686 case GIMPLE_TRANSACTION:
1687 {
1688 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1689 gtransaction *new_trans_stmt;
1690 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1691 id);
1692 copy = new_trans_stmt = gimple_build_transaction (s1);
1693 gimple_transaction_set_subcode (new_trans_stmt,
1694 gimple_transaction_subcode (old_trans_stmt));
1695 gimple_transaction_set_label_norm (new_trans_stmt,
1696 gimple_transaction_label_norm (old_trans_stmt));
1697 gimple_transaction_set_label_uninst (new_trans_stmt,
1698 gimple_transaction_label_uninst (old_trans_stmt));
1699 gimple_transaction_set_label_over (new_trans_stmt,
1700 gimple_transaction_label_over (old_trans_stmt));
1701 }
1702 break;
1703
1704 default:
1705 gcc_unreachable ();
1706 }
1707 }
1708 else
1709 {
1710 if (gimple_assign_copy_p (stmt)
1711 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1712 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1713 {
1714 /* Here we handle statements that are not completely rewritten.
1715 First we detect some inlining-induced bogosities for
1716 discarding. */
1717
1718 /* Some assignments VAR = VAR; don't generate any rtl code
1719 and thus don't count as variable modification. Avoid
1720 keeping bogosities like 0 = 0. */
1721 tree decl = gimple_assign_lhs (stmt), value;
1722 tree *n;
1723
1724 n = id->decl_map->get (decl);
1725 if (n)
1726 {
1727 value = *n;
1728 STRIP_TYPE_NOPS (value);
1729 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1730 return NULL;
1731 }
1732 }
1733
1734 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1735 in a block that we aren't copying during tree_function_versioning,
1736 just drop the clobber stmt. */
1737 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1738 {
1739 tree lhs = gimple_assign_lhs (stmt);
1740 if (TREE_CODE (lhs) == MEM_REF
1741 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1742 {
1743 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1744 if (gimple_bb (def_stmt)
1745 && !bitmap_bit_p (id->blocks_to_copy,
1746 gimple_bb (def_stmt)->index))
1747 return NULL;
1748 }
1749 }
1750
1751 if (gimple_debug_bind_p (stmt))
1752 {
1753 gdebug *copy
1754 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1755 gimple_debug_bind_get_value (stmt),
1756 stmt);
1757 if (id->reset_location)
1758 gimple_set_location (copy, input_location);
1759 id->debug_stmts.safe_push (copy);
1760 gimple_seq_add_stmt (&stmts, copy);
1761 return stmts;
1762 }
1763 if (gimple_debug_source_bind_p (stmt))
1764 {
1765 gdebug *copy = gimple_build_debug_source_bind
1766 (gimple_debug_source_bind_get_var (stmt),
1767 gimple_debug_source_bind_get_value (stmt),
1768 stmt);
1769 if (id->reset_location)
1770 gimple_set_location (copy, input_location);
1771 id->debug_stmts.safe_push (copy);
1772 gimple_seq_add_stmt (&stmts, copy);
1773 return stmts;
1774 }
1775 if (gimple_debug_nonbind_marker_p (stmt))
1776 {
1777 /* If the inlined function has too many debug markers,
1778 don't copy them. */
1779 if (id->src_cfun->debug_marker_count
1780 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1781 return stmts;
1782
1783 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1784 if (id->reset_location)
1785 gimple_set_location (copy, input_location);
1786 id->debug_stmts.safe_push (copy);
1787 gimple_seq_add_stmt (&stmts, copy);
1788 return stmts;
1789 }
1790
1791 /* Create a new deep copy of the statement. */
1792 copy = gimple_copy (stmt);
1793
1794 /* Clear flags that need revisiting. */
1795 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1796 {
1797 if (gimple_call_tail_p (call_stmt))
1798 gimple_call_set_tail (call_stmt, false);
1799 if (gimple_call_from_thunk_p (call_stmt))
1800 gimple_call_set_from_thunk (call_stmt, false);
1801 if (gimple_call_internal_p (call_stmt))
1802 switch (gimple_call_internal_fn (call_stmt))
1803 {
1804 case IFN_GOMP_SIMD_LANE:
1805 case IFN_GOMP_SIMD_VF:
1806 case IFN_GOMP_SIMD_LAST_LANE:
1807 case IFN_GOMP_SIMD_ORDERED_START:
1808 case IFN_GOMP_SIMD_ORDERED_END:
1809 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1810 break;
1811 default:
1812 break;
1813 }
1814 }
1815
1816 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1817 RESX and EH_DISPATCH. */
1818 if (id->eh_map)
1819 switch (gimple_code (copy))
1820 {
1821 case GIMPLE_CALL:
1822 {
1823 tree r, fndecl = gimple_call_fndecl (copy);
1824 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1825 switch (DECL_FUNCTION_CODE (fndecl))
1826 {
1827 case BUILT_IN_EH_COPY_VALUES:
1828 r = gimple_call_arg (copy, 1);
1829 r = remap_eh_region_tree_nr (r, id);
1830 gimple_call_set_arg (copy, 1, r);
1831 /* FALLTHRU */
1832
1833 case BUILT_IN_EH_POINTER:
1834 case BUILT_IN_EH_FILTER:
1835 r = gimple_call_arg (copy, 0);
1836 r = remap_eh_region_tree_nr (r, id);
1837 gimple_call_set_arg (copy, 0, r);
1838 break;
1839
1840 default:
1841 break;
1842 }
1843
1844 /* Reset alias info if we didn't apply measures to
1845 keep it valid over inlining by setting DECL_PT_UID. */
1846 if (!id->src_cfun->gimple_df
1847 || !id->src_cfun->gimple_df->ipa_pta)
1848 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1849 }
1850 break;
1851
1852 case GIMPLE_RESX:
1853 {
1854 gresx *resx_stmt = as_a <gresx *> (copy);
1855 int r = gimple_resx_region (resx_stmt);
1856 r = remap_eh_region_nr (r, id);
1857 gimple_resx_set_region (resx_stmt, r);
1858 }
1859 break;
1860
1861 case GIMPLE_EH_DISPATCH:
1862 {
1863 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1864 int r = gimple_eh_dispatch_region (eh_dispatch);
1865 r = remap_eh_region_nr (r, id);
1866 gimple_eh_dispatch_set_region (eh_dispatch, r);
1867 }
1868 break;
1869
1870 default:
1871 break;
1872 }
1873 }
1874
1875 /* If STMT has a block defined, map it to the newly constructed block. */
1876 if (tree block = gimple_block (copy))
1877 {
1878 tree *n;
1879 n = id->decl_map->get (block);
1880 gcc_assert (n);
1881 gimple_set_block (copy, *n);
1882 }
1883
1884 if (id->reset_location)
1885 gimple_set_location (copy, input_location);
1886
1887 /* Debug statements ought to be rebuilt and not copied. */
1888 gcc_checking_assert (!is_gimple_debug (copy));
1889
1890 /* Remap all the operands in COPY. */
1891 memset (&wi, 0, sizeof (wi));
1892 wi.info = id;
1893 if (skip_first)
1894 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1895 else
1896 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1897
1898 /* Clear the copied virtual operands. We are not remapping them here
1899 but are going to recreate them from scratch. */
1900 if (gimple_has_mem_ops (copy))
1901 {
1902 gimple_set_vdef (copy, NULL_TREE);
1903 gimple_set_vuse (copy, NULL_TREE);
1904 }
1905
1906 gimple_seq_add_stmt (&stmts, copy);
1907 return stmts;
1908 }
1909
1910
1911 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1912 later */
1913
1914 static basic_block
1915 copy_bb (copy_body_data *id, basic_block bb,
1916 profile_count num, profile_count den)
1917 {
1918 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1919 basic_block copy_basic_block;
1920 tree decl;
1921 basic_block prev;
1922
1923 profile_count::adjust_for_ipa_scaling (&num, &den);
1924
1925 /* Search for previous copied basic block. */
1926 prev = bb->prev_bb;
1927 while (!prev->aux)
1928 prev = prev->prev_bb;
1929
1930 /* create_basic_block() will append every new block to
1931 basic_block_info automatically. */
1932 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1933 copy_basic_block->count = bb->count.apply_scale (num, den);
1934
1935 copy_gsi = gsi_start_bb (copy_basic_block);
1936
1937 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1938 {
1939 gimple_seq stmts;
1940 gimple *stmt = gsi_stmt (gsi);
1941 gimple *orig_stmt = stmt;
1942 gimple_stmt_iterator stmts_gsi;
1943 bool stmt_added = false;
1944
1945 id->regimplify = false;
1946 stmts = remap_gimple_stmt (stmt, id);
1947
1948 if (gimple_seq_empty_p (stmts))
1949 continue;
1950
1951 seq_gsi = copy_gsi;
1952
1953 for (stmts_gsi = gsi_start (stmts);
1954 !gsi_end_p (stmts_gsi); )
1955 {
1956 stmt = gsi_stmt (stmts_gsi);
1957
1958 /* Advance iterator now before stmt is moved to seq_gsi. */
1959 gsi_next (&stmts_gsi);
1960
1961 if (gimple_nop_p (stmt))
1962 continue;
1963
1964 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1965 orig_stmt);
1966
1967 /* With return slot optimization we can end up with
1968 non-gimple (foo *)&this->m, fix that here. */
1969 if (is_gimple_assign (stmt)
1970 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1971 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1972 {
1973 tree new_rhs;
1974 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1975 gimple_assign_rhs1 (stmt),
1976 true, NULL, false,
1977 GSI_CONTINUE_LINKING);
1978 gimple_assign_set_rhs1 (stmt, new_rhs);
1979 id->regimplify = false;
1980 }
1981
1982 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1983
1984 if (id->regimplify)
1985 gimple_regimplify_operands (stmt, &seq_gsi);
1986
1987 stmt_added = true;
1988 }
1989
1990 if (!stmt_added)
1991 continue;
1992
1993 /* If copy_basic_block has been empty at the start of this iteration,
1994 call gsi_start_bb again to get at the newly added statements. */
1995 if (gsi_end_p (copy_gsi))
1996 copy_gsi = gsi_start_bb (copy_basic_block);
1997 else
1998 gsi_next (&copy_gsi);
1999
2000 /* Process the new statement. The call to gimple_regimplify_operands
2001 possibly turned the statement into multiple statements, we
2002 need to process all of them. */
2003 do
2004 {
2005 tree fn;
2006 gcall *call_stmt;
2007
2008 stmt = gsi_stmt (copy_gsi);
2009 call_stmt = dyn_cast <gcall *> (stmt);
2010 if (call_stmt
2011 && gimple_call_va_arg_pack_p (call_stmt)
2012 && id->call_stmt
2013 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2014 {
2015 /* __builtin_va_arg_pack () should be replaced by
2016 all arguments corresponding to ... in the caller. */
2017 tree p;
2018 gcall *new_call;
2019 vec<tree> argarray;
2020 size_t nargs = gimple_call_num_args (id->call_stmt);
2021 size_t n;
2022
2023 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2024 nargs--;
2025
2026 /* Create the new array of arguments. */
2027 n = nargs + gimple_call_num_args (call_stmt);
2028 argarray.create (n);
2029 argarray.safe_grow_cleared (n);
2030
2031 /* Copy all the arguments before '...' */
2032 memcpy (argarray.address (),
2033 gimple_call_arg_ptr (call_stmt, 0),
2034 gimple_call_num_args (call_stmt) * sizeof (tree));
2035
2036 /* Append the arguments passed in '...' */
2037 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2038 gimple_call_arg_ptr (id->call_stmt, 0)
2039 + (gimple_call_num_args (id->call_stmt) - nargs),
2040 nargs * sizeof (tree));
2041
2042 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2043 argarray);
2044
2045 argarray.release ();
2046
2047 /* Copy all GIMPLE_CALL flags, location and block, except
2048 GF_CALL_VA_ARG_PACK. */
2049 gimple_call_copy_flags (new_call, call_stmt);
2050 gimple_call_set_va_arg_pack (new_call, false);
2051 /* location includes block. */
2052 gimple_set_location (new_call, gimple_location (stmt));
2053 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2054
2055 gsi_replace (&copy_gsi, new_call, false);
2056 stmt = new_call;
2057 }
2058 else if (call_stmt
2059 && id->call_stmt
2060 && (decl = gimple_call_fndecl (stmt))
2061 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2062 {
2063 /* __builtin_va_arg_pack_len () should be replaced by
2064 the number of anonymous arguments. */
2065 size_t nargs = gimple_call_num_args (id->call_stmt);
2066 tree count, p;
2067 gimple *new_stmt;
2068
2069 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2070 nargs--;
2071
2072 if (!gimple_call_lhs (stmt))
2073 {
2074 /* Drop unused calls. */
2075 gsi_remove (&copy_gsi, false);
2076 continue;
2077 }
2078 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2079 {
2080 count = build_int_cst (integer_type_node, nargs);
2081 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2082 gsi_replace (&copy_gsi, new_stmt, false);
2083 stmt = new_stmt;
2084 }
2085 else if (nargs != 0)
2086 {
2087 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2088 count = build_int_cst (integer_type_node, nargs);
2089 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2090 PLUS_EXPR, newlhs, count);
2091 gimple_call_set_lhs (stmt, newlhs);
2092 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2093 }
2094 }
2095 else if (call_stmt
2096 && id->call_stmt
2097 && gimple_call_internal_p (stmt)
2098 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2099 {
2100 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2101 gsi_remove (&copy_gsi, false);
2102 continue;
2103 }
2104
2105 /* Statements produced by inlining can be unfolded, especially
2106 when we constant propagated some operands. We can't fold
2107 them right now for two reasons:
2108 1) folding require SSA_NAME_DEF_STMTs to be correct
2109 2) we can't change function calls to builtins.
2110 So we just mark statement for later folding. We mark
2111 all new statements, instead just statements that has changed
2112 by some nontrivial substitution so even statements made
2113 foldable indirectly are updated. If this turns out to be
2114 expensive, copy_body can be told to watch for nontrivial
2115 changes. */
2116 if (id->statements_to_fold)
2117 id->statements_to_fold->add (stmt);
2118
2119 /* We're duplicating a CALL_EXPR. Find any corresponding
2120 callgraph edges and update or duplicate them. */
2121 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2122 {
2123 struct cgraph_edge *edge;
2124
2125 switch (id->transform_call_graph_edges)
2126 {
2127 case CB_CGE_DUPLICATE:
2128 edge = id->src_node->get_edge (orig_stmt);
2129 if (edge)
2130 {
2131 struct cgraph_edge *old_edge = edge;
2132 profile_count old_cnt = edge->count;
2133 edge = edge->clone (id->dst_node, call_stmt,
2134 gimple_uid (stmt),
2135 num, den,
2136 true);
2137
2138 /* Speculative calls consist of two edges - direct and
2139 indirect. Duplicate the whole thing and distribute
2140 frequencies accordingly. */
2141 if (edge->speculative)
2142 {
2143 struct cgraph_edge *direct, *indirect;
2144 struct ipa_ref *ref;
2145
2146 gcc_assert (!edge->indirect_unknown_callee);
2147 old_edge->speculative_call_info (direct, indirect, ref);
2148
2149 profile_count indir_cnt = indirect->count;
2150 indirect = indirect->clone (id->dst_node, call_stmt,
2151 gimple_uid (stmt),
2152 num, den,
2153 true);
2154
2155 profile_probability prob
2156 = indir_cnt.probability_in (old_cnt + indir_cnt);
2157 indirect->count
2158 = copy_basic_block->count.apply_probability (prob);
2159 edge->count = copy_basic_block->count - indirect->count;
2160 id->dst_node->clone_reference (ref, stmt);
2161 }
2162 else
2163 edge->count = copy_basic_block->count;
2164 }
2165 break;
2166
2167 case CB_CGE_MOVE_CLONES:
2168 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2169 call_stmt);
2170 edge = id->dst_node->get_edge (stmt);
2171 break;
2172
2173 case CB_CGE_MOVE:
2174 edge = id->dst_node->get_edge (orig_stmt);
2175 if (edge)
2176 edge->set_call_stmt (call_stmt);
2177 break;
2178
2179 default:
2180 gcc_unreachable ();
2181 }
2182
2183 /* Constant propagation on argument done during inlining
2184 may create new direct call. Produce an edge for it. */
2185 if ((!edge
2186 || (edge->indirect_inlining_edge
2187 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2188 && id->dst_node->definition
2189 && (fn = gimple_call_fndecl (stmt)) != NULL)
2190 {
2191 struct cgraph_node *dest = cgraph_node::get_create (fn);
2192
2193 /* We have missing edge in the callgraph. This can happen
2194 when previous inlining turned an indirect call into a
2195 direct call by constant propagating arguments or we are
2196 producing dead clone (for further cloning). In all
2197 other cases we hit a bug (incorrect node sharing is the
2198 most common reason for missing edges). */
2199 gcc_assert (!dest->definition
2200 || dest->address_taken
2201 || !id->src_node->definition
2202 || !id->dst_node->definition);
2203 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2204 id->dst_node->create_edge_including_clones
2205 (dest, orig_stmt, call_stmt, bb->count,
2206 CIF_ORIGINALLY_INDIRECT_CALL);
2207 else
2208 id->dst_node->create_edge (dest, call_stmt,
2209 bb->count)->inline_failed
2210 = CIF_ORIGINALLY_INDIRECT_CALL;
2211 if (dump_file)
2212 {
2213 fprintf (dump_file, "Created new direct edge to %s\n",
2214 dest->name ());
2215 }
2216 }
2217
2218 notice_special_calls (as_a <gcall *> (stmt));
2219 }
2220
2221 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2222 id->eh_map, id->eh_lp_nr);
2223
2224 gsi_next (&copy_gsi);
2225 }
2226 while (!gsi_end_p (copy_gsi));
2227
2228 copy_gsi = gsi_last_bb (copy_basic_block);
2229 }
2230
2231 return copy_basic_block;
2232 }
2233
2234 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2235 form is quite easy, since dominator relationship for old basic blocks does
2236 not change.
2237
2238 There is however exception where inlining might change dominator relation
2239 across EH edges from basic block within inlined functions destinating
2240 to landing pads in function we inline into.
2241
2242 The function fills in PHI_RESULTs of such PHI nodes if they refer
2243 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2244 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2245 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2246 set, and this means that there will be no overlapping live ranges
2247 for the underlying symbol.
2248
2249 This might change in future if we allow redirecting of EH edges and
2250 we might want to change way build CFG pre-inlining to include
2251 all the possible edges then. */
2252 static void
2253 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2254 bool can_throw, bool nonlocal_goto)
2255 {
2256 edge e;
2257 edge_iterator ei;
2258
2259 FOR_EACH_EDGE (e, ei, bb->succs)
2260 if (!e->dest->aux
2261 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2262 {
2263 gphi *phi;
2264 gphi_iterator si;
2265
2266 if (!nonlocal_goto)
2267 gcc_assert (e->flags & EDGE_EH);
2268
2269 if (!can_throw)
2270 gcc_assert (!(e->flags & EDGE_EH));
2271
2272 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2273 {
2274 edge re;
2275
2276 phi = si.phi ();
2277
2278 /* For abnormal goto/call edges the receiver can be the
2279 ENTRY_BLOCK. Do not assert this cannot happen. */
2280
2281 gcc_assert ((e->flags & EDGE_EH)
2282 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2283
2284 re = find_edge (ret_bb, e->dest);
2285 gcc_checking_assert (re);
2286 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2287 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2288
2289 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2290 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2291 }
2292 }
2293 }
2294
2295 /* Insert clobbers for automatic variables of inlined ID->src_fn
2296 function at the start of basic block ID->eh_landing_pad_dest. */
2297
2298 static void
2299 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2300 {
2301 tree var;
2302 basic_block bb = id->eh_landing_pad_dest;
2303 live_vars_map *vars = NULL;
2304 unsigned int cnt = 0;
2305 unsigned int i;
2306 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2307 if (VAR_P (var)
2308 && !DECL_HARD_REGISTER (var)
2309 && !TREE_THIS_VOLATILE (var)
2310 && !DECL_HAS_VALUE_EXPR_P (var)
2311 && !is_gimple_reg (var)
2312 && auto_var_in_fn_p (var, id->src_fn)
2313 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2314 {
2315 tree *t = id->decl_map->get (var);
2316 if (!t)
2317 continue;
2318 tree new_var = *t;
2319 if (VAR_P (new_var)
2320 && !DECL_HARD_REGISTER (new_var)
2321 && !TREE_THIS_VOLATILE (new_var)
2322 && !DECL_HAS_VALUE_EXPR_P (new_var)
2323 && !is_gimple_reg (new_var)
2324 && auto_var_in_fn_p (new_var, id->dst_fn))
2325 {
2326 if (vars == NULL)
2327 vars = new live_vars_map;
2328 vars->put (DECL_UID (var), cnt++);
2329 }
2330 }
2331 if (vars == NULL)
2332 return;
2333
2334 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2335 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2336 if (VAR_P (var))
2337 {
2338 edge e;
2339 edge_iterator ei;
2340 bool needed = false;
2341 unsigned int *v = vars->get (DECL_UID (var));
2342 if (v == NULL)
2343 continue;
2344 FOR_EACH_EDGE (e, ei, bb->preds)
2345 if ((e->flags & EDGE_EH) != 0
2346 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2347 {
2348 basic_block src_bb = (basic_block) e->src->aux;
2349
2350 if (bitmap_bit_p (&live[src_bb->index], *v))
2351 {
2352 needed = true;
2353 break;
2354 }
2355 }
2356 if (needed)
2357 {
2358 tree new_var = *id->decl_map->get (var);
2359 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2360 tree clobber = build_clobber (TREE_TYPE (new_var));
2361 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2362 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2363 }
2364 }
2365 destroy_live_vars (live);
2366 delete vars;
2367 }
2368
2369 /* Copy edges from BB into its copy constructed earlier, scale profile
2370 accordingly. Edges will be taken care of later. Assume aux
2371 pointers to point to the copies of each BB. Return true if any
2372 debug stmts are left after a statement that must end the basic block. */
2373
2374 static bool
2375 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2376 basic_block ret_bb, basic_block abnormal_goto_dest,
2377 copy_body_data *id)
2378 {
2379 basic_block new_bb = (basic_block) bb->aux;
2380 edge_iterator ei;
2381 edge old_edge;
2382 gimple_stmt_iterator si;
2383 bool need_debug_cleanup = false;
2384
2385 /* Use the indices from the original blocks to create edges for the
2386 new ones. */
2387 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2388 if (!(old_edge->flags & EDGE_EH))
2389 {
2390 edge new_edge;
2391 int flags = old_edge->flags;
2392 location_t locus = old_edge->goto_locus;
2393
2394 /* Return edges do get a FALLTHRU flag when they get inlined. */
2395 if (old_edge->dest->index == EXIT_BLOCK
2396 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2397 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2398 flags |= EDGE_FALLTHRU;
2399
2400 new_edge
2401 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2402 new_edge->probability = old_edge->probability;
2403 if (!id->reset_location)
2404 new_edge->goto_locus = remap_location (locus, id);
2405 }
2406
2407 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2408 return false;
2409
2410 /* When doing function splitting, we must decrease count of the return block
2411 which was previously reachable by block we did not copy. */
2412 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2413 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2414 if (old_edge->src->index != ENTRY_BLOCK
2415 && !old_edge->src->aux)
2416 new_bb->count -= old_edge->count ().apply_scale (num, den);
2417
2418 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2419 {
2420 gimple *copy_stmt;
2421 bool can_throw, nonlocal_goto;
2422
2423 copy_stmt = gsi_stmt (si);
2424 if (!is_gimple_debug (copy_stmt))
2425 update_stmt (copy_stmt);
2426
2427 /* Do this before the possible split_block. */
2428 gsi_next (&si);
2429
2430 /* If this tree could throw an exception, there are two
2431 cases where we need to add abnormal edge(s): the
2432 tree wasn't in a region and there is a "current
2433 region" in the caller; or the original tree had
2434 EH edges. In both cases split the block after the tree,
2435 and add abnormal edge(s) as needed; we need both
2436 those from the callee and the caller.
2437 We check whether the copy can throw, because the const
2438 propagation can change an INDIRECT_REF which throws
2439 into a COMPONENT_REF which doesn't. If the copy
2440 can throw, the original could also throw. */
2441 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2442 nonlocal_goto
2443 = (stmt_can_make_abnormal_goto (copy_stmt)
2444 && !computed_goto_p (copy_stmt));
2445
2446 if (can_throw || nonlocal_goto)
2447 {
2448 if (!gsi_end_p (si))
2449 {
2450 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2451 gsi_next (&si);
2452 if (gsi_end_p (si))
2453 need_debug_cleanup = true;
2454 }
2455 if (!gsi_end_p (si))
2456 /* Note that bb's predecessor edges aren't necessarily
2457 right at this point; split_block doesn't care. */
2458 {
2459 edge e = split_block (new_bb, copy_stmt);
2460
2461 new_bb = e->dest;
2462 new_bb->aux = e->src->aux;
2463 si = gsi_start_bb (new_bb);
2464 }
2465 }
2466
2467 bool update_probs = false;
2468
2469 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2470 {
2471 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2472 update_probs = true;
2473 }
2474 else if (can_throw)
2475 {
2476 make_eh_edges (copy_stmt);
2477 update_probs = true;
2478 }
2479
2480 /* EH edges may not match old edges. Copy as much as possible. */
2481 if (update_probs)
2482 {
2483 edge e;
2484 edge_iterator ei;
2485 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2486
2487 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2488 if ((old_edge->flags & EDGE_EH)
2489 && (e = find_edge (copy_stmt_bb,
2490 (basic_block) old_edge->dest->aux))
2491 && (e->flags & EDGE_EH))
2492 e->probability = old_edge->probability;
2493
2494 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2495 if (e->flags & EDGE_EH)
2496 {
2497 if (!e->probability.initialized_p ())
2498 e->probability = profile_probability::never ();
2499 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2500 {
2501 if (id->eh_landing_pad_dest == NULL)
2502 id->eh_landing_pad_dest = e->dest;
2503 else
2504 gcc_assert (id->eh_landing_pad_dest == e->dest);
2505 }
2506 }
2507 }
2508
2509
2510 /* If the call we inline cannot make abnormal goto do not add
2511 additional abnormal edges but only retain those already present
2512 in the original function body. */
2513 if (abnormal_goto_dest == NULL)
2514 nonlocal_goto = false;
2515 if (nonlocal_goto)
2516 {
2517 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2518
2519 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2520 nonlocal_goto = false;
2521 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2522 in OpenMP regions which aren't allowed to be left abnormally.
2523 So, no need to add abnormal edge in that case. */
2524 else if (is_gimple_call (copy_stmt)
2525 && gimple_call_internal_p (copy_stmt)
2526 && (gimple_call_internal_fn (copy_stmt)
2527 == IFN_ABNORMAL_DISPATCHER)
2528 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2529 nonlocal_goto = false;
2530 else
2531 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2532 EDGE_ABNORMAL);
2533 }
2534
2535 if ((can_throw || nonlocal_goto)
2536 && gimple_in_ssa_p (cfun))
2537 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2538 can_throw, nonlocal_goto);
2539 }
2540 return need_debug_cleanup;
2541 }
2542
2543 /* Copy the PHIs. All blocks and edges are copied, some blocks
2544 was possibly split and new outgoing EH edges inserted.
2545 BB points to the block of original function and AUX pointers links
2546 the original and newly copied blocks. */
2547
2548 static void
2549 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2550 {
2551 basic_block const new_bb = (basic_block) bb->aux;
2552 edge_iterator ei;
2553 gphi *phi;
2554 gphi_iterator si;
2555 edge new_edge;
2556 bool inserted = false;
2557
2558 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2559 {
2560 tree res, new_res;
2561 gphi *new_phi;
2562
2563 phi = si.phi ();
2564 res = PHI_RESULT (phi);
2565 new_res = res;
2566 if (!virtual_operand_p (res))
2567 {
2568 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2569 if (EDGE_COUNT (new_bb->preds) == 0)
2570 {
2571 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2572 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2573 }
2574 else
2575 {
2576 new_phi = create_phi_node (new_res, new_bb);
2577 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2578 {
2579 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2580 bb);
2581 tree arg;
2582 tree new_arg;
2583 edge_iterator ei2;
2584 location_t locus;
2585
2586 /* When doing partial cloning, we allow PHIs on the entry
2587 block as long as all the arguments are the same.
2588 Find any input edge to see argument to copy. */
2589 if (!old_edge)
2590 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2591 if (!old_edge->src->aux)
2592 break;
2593
2594 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2595 new_arg = arg;
2596 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2597 gcc_assert (new_arg);
2598 /* With return slot optimization we can end up with
2599 non-gimple (foo *)&this->m, fix that here. */
2600 if (TREE_CODE (new_arg) != SSA_NAME
2601 && TREE_CODE (new_arg) != FUNCTION_DECL
2602 && !is_gimple_val (new_arg))
2603 {
2604 gimple_seq stmts = NULL;
2605 new_arg = force_gimple_operand (new_arg, &stmts, true,
2606 NULL);
2607 gsi_insert_seq_on_edge (new_edge, stmts);
2608 inserted = true;
2609 }
2610 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2611 if (id->reset_location)
2612 locus = input_location;
2613 else
2614 locus = remap_location (locus, id);
2615 add_phi_arg (new_phi, new_arg, new_edge, locus);
2616 }
2617 }
2618 }
2619 }
2620
2621 /* Commit the delayed edge insertions. */
2622 if (inserted)
2623 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2624 gsi_commit_one_edge_insert (new_edge, NULL);
2625 }
2626
2627
2628 /* Wrapper for remap_decl so it can be used as a callback. */
2629
2630 static tree
2631 remap_decl_1 (tree decl, void *data)
2632 {
2633 return remap_decl (decl, (copy_body_data *) data);
2634 }
2635
2636 /* Build struct function and associated datastructures for the new clone
2637 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2638 the cfun to the function of new_fndecl (and current_function_decl too). */
2639
2640 static void
2641 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2642 {
2643 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2644
2645 if (!DECL_ARGUMENTS (new_fndecl))
2646 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2647 if (!DECL_RESULT (new_fndecl))
2648 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2649
2650 /* Register specific tree functions. */
2651 gimple_register_cfg_hooks ();
2652
2653 /* Get clean struct function. */
2654 push_struct_function (new_fndecl);
2655
2656 /* We will rebuild these, so just sanity check that they are empty. */
2657 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2658 gcc_assert (cfun->local_decls == NULL);
2659 gcc_assert (cfun->cfg == NULL);
2660 gcc_assert (cfun->decl == new_fndecl);
2661
2662 /* Copy items we preserve during cloning. */
2663 cfun->static_chain_decl = src_cfun->static_chain_decl;
2664 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2665 cfun->function_end_locus = src_cfun->function_end_locus;
2666 cfun->curr_properties = src_cfun->curr_properties;
2667 cfun->last_verified = src_cfun->last_verified;
2668 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2669 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2670 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2671 cfun->calls_eh_return = src_cfun->calls_eh_return;
2672 cfun->stdarg = src_cfun->stdarg;
2673 cfun->after_inlining = src_cfun->after_inlining;
2674 cfun->can_throw_non_call_exceptions
2675 = src_cfun->can_throw_non_call_exceptions;
2676 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2677 cfun->returns_struct = src_cfun->returns_struct;
2678 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2679
2680 init_empty_tree_cfg ();
2681
2682 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2683
2684 profile_count num = count;
2685 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2686 profile_count::adjust_for_ipa_scaling (&num, &den);
2687
2688 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2689 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2690 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2691 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2692 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2693 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2694 if (src_cfun->eh)
2695 init_eh_for_function ();
2696
2697 if (src_cfun->gimple_df)
2698 {
2699 init_tree_ssa (cfun);
2700 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2701 if (cfun->gimple_df->in_ssa_p)
2702 init_ssa_operands (cfun);
2703 }
2704 }
2705
2706 /* Helper function for copy_cfg_body. Move debug stmts from the end
2707 of NEW_BB to the beginning of successor basic blocks when needed. If the
2708 successor has multiple predecessors, reset them, otherwise keep
2709 their value. */
2710
2711 static void
2712 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2713 {
2714 edge e;
2715 edge_iterator ei;
2716 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2717
2718 if (gsi_end_p (si)
2719 || gsi_one_before_end_p (si)
2720 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2721 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2722 return;
2723
2724 FOR_EACH_EDGE (e, ei, new_bb->succs)
2725 {
2726 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2727 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2728 while (is_gimple_debug (gsi_stmt (ssi)))
2729 {
2730 gimple *stmt = gsi_stmt (ssi);
2731 gdebug *new_stmt;
2732 tree var;
2733 tree value;
2734
2735 /* For the last edge move the debug stmts instead of copying
2736 them. */
2737 if (ei_one_before_end_p (ei))
2738 {
2739 si = ssi;
2740 gsi_prev (&ssi);
2741 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2742 {
2743 gimple_debug_bind_reset_value (stmt);
2744 gimple_set_location (stmt, UNKNOWN_LOCATION);
2745 }
2746 gsi_remove (&si, false);
2747 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2748 continue;
2749 }
2750
2751 if (gimple_debug_bind_p (stmt))
2752 {
2753 var = gimple_debug_bind_get_var (stmt);
2754 if (single_pred_p (e->dest))
2755 {
2756 value = gimple_debug_bind_get_value (stmt);
2757 value = unshare_expr (value);
2758 new_stmt = gimple_build_debug_bind (var, value, stmt);
2759 }
2760 else
2761 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2762 }
2763 else if (gimple_debug_source_bind_p (stmt))
2764 {
2765 var = gimple_debug_source_bind_get_var (stmt);
2766 value = gimple_debug_source_bind_get_value (stmt);
2767 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2768 }
2769 else if (gimple_debug_nonbind_marker_p (stmt))
2770 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2771 else
2772 gcc_unreachable ();
2773 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2774 id->debug_stmts.safe_push (new_stmt);
2775 gsi_prev (&ssi);
2776 }
2777 }
2778 }
2779
2780 /* Make a copy of the sub-loops of SRC_PARENT and place them
2781 as siblings of DEST_PARENT. */
2782
2783 static void
2784 copy_loops (copy_body_data *id,
2785 struct loop *dest_parent, struct loop *src_parent)
2786 {
2787 struct loop *src_loop = src_parent->inner;
2788 while (src_loop)
2789 {
2790 if (!id->blocks_to_copy
2791 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2792 {
2793 struct loop *dest_loop = alloc_loop ();
2794
2795 /* Assign the new loop its header and latch and associate
2796 those with the new loop. */
2797 dest_loop->header = (basic_block)src_loop->header->aux;
2798 dest_loop->header->loop_father = dest_loop;
2799 if (src_loop->latch != NULL)
2800 {
2801 dest_loop->latch = (basic_block)src_loop->latch->aux;
2802 dest_loop->latch->loop_father = dest_loop;
2803 }
2804
2805 /* Copy loop meta-data. */
2806 copy_loop_info (src_loop, dest_loop);
2807 if (dest_loop->unroll)
2808 cfun->has_unroll = true;
2809 if (dest_loop->force_vectorize)
2810 cfun->has_force_vectorize_loops = true;
2811 if (id->src_cfun->last_clique != 0)
2812 dest_loop->owned_clique
2813 = remap_dependence_clique (id,
2814 src_loop->owned_clique
2815 ? src_loop->owned_clique : 1);
2816
2817 /* Finally place it into the loop array and the loop tree. */
2818 place_new_loop (cfun, dest_loop);
2819 flow_loop_tree_node_add (dest_parent, dest_loop);
2820
2821 if (src_loop->simduid)
2822 {
2823 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2824 cfun->has_simduid_loops = true;
2825 }
2826
2827 /* Recurse. */
2828 copy_loops (id, dest_loop, src_loop);
2829 }
2830 src_loop = src_loop->next;
2831 }
2832 }
2833
2834 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2835
2836 void
2837 redirect_all_calls (copy_body_data * id, basic_block bb)
2838 {
2839 gimple_stmt_iterator si;
2840 gimple *last = last_stmt (bb);
2841 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2842 {
2843 gimple *stmt = gsi_stmt (si);
2844 if (is_gimple_call (stmt))
2845 {
2846 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2847 if (edge)
2848 {
2849 edge->redirect_call_stmt_to_callee ();
2850 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2851 gimple_purge_dead_eh_edges (bb);
2852 }
2853 }
2854 }
2855 }
2856
2857 /* Make a copy of the body of FN so that it can be inserted inline in
2858 another function. Walks FN via CFG, returns new fndecl. */
2859
2860 static tree
2861 copy_cfg_body (copy_body_data * id,
2862 basic_block entry_block_map, basic_block exit_block_map,
2863 basic_block new_entry)
2864 {
2865 tree callee_fndecl = id->src_fn;
2866 /* Original cfun for the callee, doesn't change. */
2867 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2868 struct function *cfun_to_copy;
2869 basic_block bb;
2870 tree new_fndecl = NULL;
2871 bool need_debug_cleanup = false;
2872 int last;
2873 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2874 profile_count num = entry_block_map->count;
2875
2876 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2877
2878 /* Register specific tree functions. */
2879 gimple_register_cfg_hooks ();
2880
2881 /* If we are inlining just region of the function, make sure to connect
2882 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2883 part of loop, we must compute frequency and probability of
2884 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2885 probabilities of edges incoming from nonduplicated region. */
2886 if (new_entry)
2887 {
2888 edge e;
2889 edge_iterator ei;
2890 den = profile_count::zero ();
2891
2892 FOR_EACH_EDGE (e, ei, new_entry->preds)
2893 if (!e->src->aux)
2894 den += e->count ();
2895 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2896 }
2897
2898 profile_count::adjust_for_ipa_scaling (&num, &den);
2899
2900 /* Must have a CFG here at this point. */
2901 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2902 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2903
2904
2905 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2906 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2907 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2908 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2909
2910 /* Duplicate any exception-handling regions. */
2911 if (cfun->eh)
2912 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2913 remap_decl_1, id);
2914
2915 /* Use aux pointers to map the original blocks to copy. */
2916 FOR_EACH_BB_FN (bb, cfun_to_copy)
2917 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2918 {
2919 basic_block new_bb = copy_bb (id, bb, num, den);
2920 bb->aux = new_bb;
2921 new_bb->aux = bb;
2922 new_bb->loop_father = entry_block_map->loop_father;
2923 }
2924
2925 last = last_basic_block_for_fn (cfun);
2926
2927 /* Now that we've duplicated the blocks, duplicate their edges. */
2928 basic_block abnormal_goto_dest = NULL;
2929 if (id->call_stmt
2930 && stmt_can_make_abnormal_goto (id->call_stmt))
2931 {
2932 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2933
2934 bb = gimple_bb (id->call_stmt);
2935 gsi_next (&gsi);
2936 if (gsi_end_p (gsi))
2937 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2938 }
2939 FOR_ALL_BB_FN (bb, cfun_to_copy)
2940 if (!id->blocks_to_copy
2941 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2942 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2943 abnormal_goto_dest, id);
2944
2945 if (id->eh_landing_pad_dest)
2946 {
2947 add_clobbers_to_eh_landing_pad (id);
2948 id->eh_landing_pad_dest = NULL;
2949 }
2950
2951 if (new_entry)
2952 {
2953 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2954 EDGE_FALLTHRU);
2955 e->probability = profile_probability::always ();
2956 }
2957
2958 /* Duplicate the loop tree, if available and wanted. */
2959 if (loops_for_fn (src_cfun) != NULL
2960 && current_loops != NULL)
2961 {
2962 copy_loops (id, entry_block_map->loop_father,
2963 get_loop (src_cfun, 0));
2964 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2965 loops_state_set (LOOPS_NEED_FIXUP);
2966 }
2967
2968 /* If the loop tree in the source function needed fixup, mark the
2969 destination loop tree for fixup, too. */
2970 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2971 loops_state_set (LOOPS_NEED_FIXUP);
2972
2973 if (gimple_in_ssa_p (cfun))
2974 FOR_ALL_BB_FN (bb, cfun_to_copy)
2975 if (!id->blocks_to_copy
2976 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2977 copy_phis_for_bb (bb, id);
2978
2979 FOR_ALL_BB_FN (bb, cfun_to_copy)
2980 if (bb->aux)
2981 {
2982 if (need_debug_cleanup
2983 && bb->index != ENTRY_BLOCK
2984 && bb->index != EXIT_BLOCK)
2985 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2986 /* Update call edge destinations. This cannot be done before loop
2987 info is updated, because we may split basic blocks. */
2988 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2989 && bb->index != ENTRY_BLOCK
2990 && bb->index != EXIT_BLOCK)
2991 redirect_all_calls (id, (basic_block)bb->aux);
2992 ((basic_block)bb->aux)->aux = NULL;
2993 bb->aux = NULL;
2994 }
2995
2996 /* Zero out AUX fields of newly created block during EH edge
2997 insertion. */
2998 for (; last < last_basic_block_for_fn (cfun); last++)
2999 {
3000 if (need_debug_cleanup)
3001 maybe_move_debug_stmts_to_successors (id,
3002 BASIC_BLOCK_FOR_FN (cfun, last));
3003 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3004 /* Update call edge destinations. This cannot be done before loop
3005 info is updated, because we may split basic blocks. */
3006 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3007 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3008 }
3009 entry_block_map->aux = NULL;
3010 exit_block_map->aux = NULL;
3011
3012 if (id->eh_map)
3013 {
3014 delete id->eh_map;
3015 id->eh_map = NULL;
3016 }
3017 if (id->dependence_map)
3018 {
3019 delete id->dependence_map;
3020 id->dependence_map = NULL;
3021 }
3022
3023 return new_fndecl;
3024 }
3025
3026 /* Copy the debug STMT using ID. We deal with these statements in a
3027 special way: if any variable in their VALUE expression wasn't
3028 remapped yet, we won't remap it, because that would get decl uids
3029 out of sync, causing codegen differences between -g and -g0. If
3030 this arises, we drop the VALUE expression altogether. */
3031
3032 static void
3033 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3034 {
3035 tree t, *n;
3036 struct walk_stmt_info wi;
3037
3038 if (tree block = gimple_block (stmt))
3039 {
3040 n = id->decl_map->get (block);
3041 gimple_set_block (stmt, n ? *n : id->block);
3042 }
3043
3044 if (gimple_debug_nonbind_marker_p (stmt))
3045 return;
3046
3047 /* Remap all the operands in COPY. */
3048 memset (&wi, 0, sizeof (wi));
3049 wi.info = id;
3050
3051 processing_debug_stmt = 1;
3052
3053 if (gimple_debug_source_bind_p (stmt))
3054 t = gimple_debug_source_bind_get_var (stmt);
3055 else if (gimple_debug_bind_p (stmt))
3056 t = gimple_debug_bind_get_var (stmt);
3057 else
3058 gcc_unreachable ();
3059
3060 if (TREE_CODE (t) == PARM_DECL && id->debug_map
3061 && (n = id->debug_map->get (t)))
3062 {
3063 gcc_assert (VAR_P (*n));
3064 t = *n;
3065 }
3066 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3067 /* T is a non-localized variable. */;
3068 else
3069 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3070
3071 if (gimple_debug_bind_p (stmt))
3072 {
3073 gimple_debug_bind_set_var (stmt, t);
3074
3075 if (gimple_debug_bind_has_value_p (stmt))
3076 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3077 remap_gimple_op_r, &wi, NULL);
3078
3079 /* Punt if any decl couldn't be remapped. */
3080 if (processing_debug_stmt < 0)
3081 gimple_debug_bind_reset_value (stmt);
3082 }
3083 else if (gimple_debug_source_bind_p (stmt))
3084 {
3085 gimple_debug_source_bind_set_var (stmt, t);
3086 /* When inlining and source bind refers to one of the optimized
3087 away parameters, change the source bind into normal debug bind
3088 referring to the corresponding DEBUG_EXPR_DECL that should have
3089 been bound before the call stmt. */
3090 t = gimple_debug_source_bind_get_value (stmt);
3091 if (t != NULL_TREE
3092 && TREE_CODE (t) == PARM_DECL
3093 && id->call_stmt)
3094 {
3095 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3096 unsigned int i;
3097 if (debug_args != NULL)
3098 {
3099 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3100 if ((**debug_args)[i] == DECL_ORIGIN (t)
3101 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3102 {
3103 t = (**debug_args)[i + 1];
3104 stmt->subcode = GIMPLE_DEBUG_BIND;
3105 gimple_debug_bind_set_value (stmt, t);
3106 break;
3107 }
3108 }
3109 }
3110 if (gimple_debug_source_bind_p (stmt))
3111 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3112 remap_gimple_op_r, &wi, NULL);
3113 }
3114
3115 processing_debug_stmt = 0;
3116
3117 update_stmt (stmt);
3118 }
3119
3120 /* Process deferred debug stmts. In order to give values better odds
3121 of being successfully remapped, we delay the processing of debug
3122 stmts until all other stmts that might require remapping are
3123 processed. */
3124
3125 static void
3126 copy_debug_stmts (copy_body_data *id)
3127 {
3128 size_t i;
3129 gdebug *stmt;
3130
3131 if (!id->debug_stmts.exists ())
3132 return;
3133
3134 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3135 copy_debug_stmt (stmt, id);
3136
3137 id->debug_stmts.release ();
3138 }
3139
3140 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3141 another function. */
3142
3143 static tree
3144 copy_tree_body (copy_body_data *id)
3145 {
3146 tree fndecl = id->src_fn;
3147 tree body = DECL_SAVED_TREE (fndecl);
3148
3149 walk_tree (&body, copy_tree_body_r, id, NULL);
3150
3151 return body;
3152 }
3153
3154 /* Make a copy of the body of FN so that it can be inserted inline in
3155 another function. */
3156
3157 static tree
3158 copy_body (copy_body_data *id,
3159 basic_block entry_block_map, basic_block exit_block_map,
3160 basic_block new_entry)
3161 {
3162 tree fndecl = id->src_fn;
3163 tree body;
3164
3165 /* If this body has a CFG, walk CFG and copy. */
3166 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3167 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3168 new_entry);
3169 copy_debug_stmts (id);
3170
3171 return body;
3172 }
3173
3174 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3175 defined in function FN, or of a data member thereof. */
3176
3177 static bool
3178 self_inlining_addr_expr (tree value, tree fn)
3179 {
3180 tree var;
3181
3182 if (TREE_CODE (value) != ADDR_EXPR)
3183 return false;
3184
3185 var = get_base_address (TREE_OPERAND (value, 0));
3186
3187 return var && auto_var_in_fn_p (var, fn);
3188 }
3189
3190 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3191 lexical block and line number information from base_stmt, if given,
3192 or from the last stmt of the block otherwise. */
3193
3194 static gimple *
3195 insert_init_debug_bind (copy_body_data *id,
3196 basic_block bb, tree var, tree value,
3197 gimple *base_stmt)
3198 {
3199 gimple *note;
3200 gimple_stmt_iterator gsi;
3201 tree tracked_var;
3202
3203 if (!gimple_in_ssa_p (id->src_cfun))
3204 return NULL;
3205
3206 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3207 return NULL;
3208
3209 tracked_var = target_for_debug_bind (var);
3210 if (!tracked_var)
3211 return NULL;
3212
3213 if (bb)
3214 {
3215 gsi = gsi_last_bb (bb);
3216 if (!base_stmt && !gsi_end_p (gsi))
3217 base_stmt = gsi_stmt (gsi);
3218 }
3219
3220 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3221
3222 if (bb)
3223 {
3224 if (!gsi_end_p (gsi))
3225 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3226 else
3227 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3228 }
3229
3230 return note;
3231 }
3232
3233 static void
3234 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3235 {
3236 /* If VAR represents a zero-sized variable, it's possible that the
3237 assignment statement may result in no gimple statements. */
3238 if (init_stmt)
3239 {
3240 gimple_stmt_iterator si = gsi_last_bb (bb);
3241
3242 /* We can end up with init statements that store to a non-register
3243 from a rhs with a conversion. Handle that here by forcing the
3244 rhs into a temporary. gimple_regimplify_operands is not
3245 prepared to do this for us. */
3246 if (!is_gimple_debug (init_stmt)
3247 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3248 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3249 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3250 {
3251 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3252 gimple_expr_type (init_stmt),
3253 gimple_assign_rhs1 (init_stmt));
3254 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3255 GSI_NEW_STMT);
3256 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3257 gimple_assign_set_rhs1 (init_stmt, rhs);
3258 }
3259 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3260 gimple_regimplify_operands (init_stmt, &si);
3261
3262 if (!is_gimple_debug (init_stmt))
3263 {
3264 tree def = gimple_assign_lhs (init_stmt);
3265 insert_init_debug_bind (id, bb, def, def, init_stmt);
3266 }
3267 }
3268 }
3269
3270 /* Initialize parameter P with VALUE. If needed, produce init statement
3271 at the end of BB. When BB is NULL, we return init statement to be
3272 output later. */
3273 static gimple *
3274 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3275 basic_block bb, tree *vars)
3276 {
3277 gimple *init_stmt = NULL;
3278 tree var;
3279 tree rhs = value;
3280 tree def = (gimple_in_ssa_p (cfun)
3281 ? ssa_default_def (id->src_cfun, p) : NULL);
3282
3283 if (value
3284 && value != error_mark_node
3285 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3286 {
3287 /* If we can match up types by promotion/demotion do so. */
3288 if (fold_convertible_p (TREE_TYPE (p), value))
3289 rhs = fold_convert (TREE_TYPE (p), value);
3290 else
3291 {
3292 /* ??? For valid programs we should not end up here.
3293 Still if we end up with truly mismatched types here, fall back
3294 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3295 GIMPLE to the following passes. */
3296 if (!is_gimple_reg_type (TREE_TYPE (value))
3297 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3298 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3299 else
3300 rhs = build_zero_cst (TREE_TYPE (p));
3301 }
3302 }
3303
3304 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3305 here since the type of this decl must be visible to the calling
3306 function. */
3307 var = copy_decl_to_var (p, id);
3308
3309 /* Declare this new variable. */
3310 DECL_CHAIN (var) = *vars;
3311 *vars = var;
3312
3313 /* Make gimplifier happy about this variable. */
3314 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3315
3316 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3317 we would not need to create a new variable here at all, if it
3318 weren't for debug info. Still, we can just use the argument
3319 value. */
3320 if (TREE_READONLY (p)
3321 && !TREE_ADDRESSABLE (p)
3322 && value && !TREE_SIDE_EFFECTS (value)
3323 && !def)
3324 {
3325 /* We may produce non-gimple trees by adding NOPs or introduce
3326 invalid sharing when operand is not really constant.
3327 It is not big deal to prohibit constant propagation here as
3328 we will constant propagate in DOM1 pass anyway. */
3329 if (is_gimple_min_invariant (value)
3330 && useless_type_conversion_p (TREE_TYPE (p),
3331 TREE_TYPE (value))
3332 /* We have to be very careful about ADDR_EXPR. Make sure
3333 the base variable isn't a local variable of the inlined
3334 function, e.g., when doing recursive inlining, direct or
3335 mutually-recursive or whatever, which is why we don't
3336 just test whether fn == current_function_decl. */
3337 && ! self_inlining_addr_expr (value, fn))
3338 {
3339 insert_decl_map (id, p, value);
3340 insert_debug_decl_map (id, p, var);
3341 return insert_init_debug_bind (id, bb, var, value, NULL);
3342 }
3343 }
3344
3345 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3346 that way, when the PARM_DECL is encountered, it will be
3347 automatically replaced by the VAR_DECL. */
3348 insert_decl_map (id, p, var);
3349
3350 /* Even if P was TREE_READONLY, the new VAR should not be.
3351 In the original code, we would have constructed a
3352 temporary, and then the function body would have never
3353 changed the value of P. However, now, we will be
3354 constructing VAR directly. The constructor body may
3355 change its value multiple times as it is being
3356 constructed. Therefore, it must not be TREE_READONLY;
3357 the back-end assumes that TREE_READONLY variable is
3358 assigned to only once. */
3359 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3360 TREE_READONLY (var) = 0;
3361
3362 /* If there is no setup required and we are in SSA, take the easy route
3363 replacing all SSA names representing the function parameter by the
3364 SSA name passed to function.
3365
3366 We need to construct map for the variable anyway as it might be used
3367 in different SSA names when parameter is set in function.
3368
3369 Do replacement at -O0 for const arguments replaced by constant.
3370 This is important for builtin_constant_p and other construct requiring
3371 constant argument to be visible in inlined function body. */
3372 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3373 && (optimize
3374 || (TREE_READONLY (p)
3375 && is_gimple_min_invariant (rhs)))
3376 && (TREE_CODE (rhs) == SSA_NAME
3377 || is_gimple_min_invariant (rhs))
3378 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3379 {
3380 insert_decl_map (id, def, rhs);
3381 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3382 }
3383
3384 /* If the value of argument is never used, don't care about initializing
3385 it. */
3386 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3387 {
3388 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3389 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3390 }
3391
3392 /* Initialize this VAR_DECL from the equivalent argument. Convert
3393 the argument to the proper type in case it was promoted. */
3394 if (value)
3395 {
3396 if (rhs == error_mark_node)
3397 {
3398 insert_decl_map (id, p, var);
3399 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3400 }
3401
3402 STRIP_USELESS_TYPE_CONVERSION (rhs);
3403
3404 /* If we are in SSA form properly remap the default definition
3405 or assign to a dummy SSA name if the parameter is unused and
3406 we are not optimizing. */
3407 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3408 {
3409 if (def)
3410 {
3411 def = remap_ssa_name (def, id);
3412 init_stmt = gimple_build_assign (def, rhs);
3413 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3414 set_ssa_default_def (cfun, var, NULL);
3415 }
3416 else if (!optimize)
3417 {
3418 def = make_ssa_name (var);
3419 init_stmt = gimple_build_assign (def, rhs);
3420 }
3421 }
3422 else
3423 init_stmt = gimple_build_assign (var, rhs);
3424
3425 if (bb && init_stmt)
3426 insert_init_stmt (id, bb, init_stmt);
3427 }
3428 return init_stmt;
3429 }
3430
3431 /* Generate code to initialize the parameters of the function at the
3432 top of the stack in ID from the GIMPLE_CALL STMT. */
3433
3434 static void
3435 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3436 tree fn, basic_block bb)
3437 {
3438 tree parms;
3439 size_t i;
3440 tree p;
3441 tree vars = NULL_TREE;
3442 tree static_chain = gimple_call_chain (stmt);
3443
3444 /* Figure out what the parameters are. */
3445 parms = DECL_ARGUMENTS (fn);
3446
3447 /* Loop through the parameter declarations, replacing each with an
3448 equivalent VAR_DECL, appropriately initialized. */
3449 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3450 {
3451 tree val;
3452 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3453 setup_one_parameter (id, p, val, fn, bb, &vars);
3454 }
3455 /* After remapping parameters remap their types. This has to be done
3456 in a second loop over all parameters to appropriately remap
3457 variable sized arrays when the size is specified in a
3458 parameter following the array. */
3459 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3460 {
3461 tree *varp = id->decl_map->get (p);
3462 if (varp && VAR_P (*varp))
3463 {
3464 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3465 ? ssa_default_def (id->src_cfun, p) : NULL);
3466 tree var = *varp;
3467 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3468 /* Also remap the default definition if it was remapped
3469 to the default definition of the parameter replacement
3470 by the parameter setup. */
3471 if (def)
3472 {
3473 tree *defp = id->decl_map->get (def);
3474 if (defp
3475 && TREE_CODE (*defp) == SSA_NAME
3476 && SSA_NAME_VAR (*defp) == var)
3477 TREE_TYPE (*defp) = TREE_TYPE (var);
3478 }
3479 }
3480 }
3481
3482 /* Initialize the static chain. */
3483 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3484 gcc_assert (fn != current_function_decl);
3485 if (p)
3486 {
3487 /* No static chain? Seems like a bug in tree-nested.c. */
3488 gcc_assert (static_chain);
3489
3490 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3491 }
3492
3493 declare_inline_vars (id->block, vars);
3494 }
3495
3496
3497 /* Declare a return variable to replace the RESULT_DECL for the
3498 function we are calling. An appropriate DECL_STMT is returned.
3499 The USE_STMT is filled to contain a use of the declaration to
3500 indicate the return value of the function.
3501
3502 RETURN_SLOT, if non-null is place where to store the result. It
3503 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3504 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3505
3506 The return value is a (possibly null) value that holds the result
3507 as seen by the caller. */
3508
3509 static tree
3510 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3511 basic_block entry_bb)
3512 {
3513 tree callee = id->src_fn;
3514 tree result = DECL_RESULT (callee);
3515 tree callee_type = TREE_TYPE (result);
3516 tree caller_type;
3517 tree var, use;
3518
3519 /* Handle type-mismatches in the function declaration return type
3520 vs. the call expression. */
3521 if (modify_dest)
3522 caller_type = TREE_TYPE (modify_dest);
3523 else
3524 caller_type = TREE_TYPE (TREE_TYPE (callee));
3525
3526 /* We don't need to do anything for functions that don't return anything. */
3527 if (VOID_TYPE_P (callee_type))
3528 return NULL_TREE;
3529
3530 /* If there was a return slot, then the return value is the
3531 dereferenced address of that object. */
3532 if (return_slot)
3533 {
3534 /* The front end shouldn't have used both return_slot and
3535 a modify expression. */
3536 gcc_assert (!modify_dest);
3537 if (DECL_BY_REFERENCE (result))
3538 {
3539 tree return_slot_addr = build_fold_addr_expr (return_slot);
3540 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3541
3542 /* We are going to construct *&return_slot and we can't do that
3543 for variables believed to be not addressable.
3544
3545 FIXME: This check possibly can match, because values returned
3546 via return slot optimization are not believed to have address
3547 taken by alias analysis. */
3548 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3549 var = return_slot_addr;
3550 }
3551 else
3552 {
3553 var = return_slot;
3554 gcc_assert (TREE_CODE (var) != SSA_NAME);
3555 if (TREE_ADDRESSABLE (result))
3556 mark_addressable (var);
3557 }
3558 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3559 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3560 && !DECL_GIMPLE_REG_P (result)
3561 && DECL_P (var))
3562 DECL_GIMPLE_REG_P (var) = 0;
3563 use = NULL;
3564 goto done;
3565 }
3566
3567 /* All types requiring non-trivial constructors should have been handled. */
3568 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3569
3570 /* Attempt to avoid creating a new temporary variable. */
3571 if (modify_dest
3572 && TREE_CODE (modify_dest) != SSA_NAME)
3573 {
3574 bool use_it = false;
3575
3576 /* We can't use MODIFY_DEST if there's type promotion involved. */
3577 if (!useless_type_conversion_p (callee_type, caller_type))
3578 use_it = false;
3579
3580 /* ??? If we're assigning to a variable sized type, then we must
3581 reuse the destination variable, because we've no good way to
3582 create variable sized temporaries at this point. */
3583 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3584 use_it = true;
3585
3586 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3587 reuse it as the result of the call directly. Don't do this if
3588 it would promote MODIFY_DEST to addressable. */
3589 else if (TREE_ADDRESSABLE (result))
3590 use_it = false;
3591 else
3592 {
3593 tree base_m = get_base_address (modify_dest);
3594
3595 /* If the base isn't a decl, then it's a pointer, and we don't
3596 know where that's going to go. */
3597 if (!DECL_P (base_m))
3598 use_it = false;
3599 else if (is_global_var (base_m))
3600 use_it = false;
3601 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3602 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3603 && !DECL_GIMPLE_REG_P (result)
3604 && DECL_GIMPLE_REG_P (base_m))
3605 use_it = false;
3606 else if (!TREE_ADDRESSABLE (base_m))
3607 use_it = true;
3608 }
3609
3610 if (use_it)
3611 {
3612 var = modify_dest;
3613 use = NULL;
3614 goto done;
3615 }
3616 }
3617
3618 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3619
3620 var = copy_result_decl_to_var (result, id);
3621 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3622
3623 /* Do not have the rest of GCC warn about this variable as it should
3624 not be visible to the user. */
3625 TREE_NO_WARNING (var) = 1;
3626
3627 declare_inline_vars (id->block, var);
3628
3629 /* Build the use expr. If the return type of the function was
3630 promoted, convert it back to the expected type. */
3631 use = var;
3632 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3633 {
3634 /* If we can match up types by promotion/demotion do so. */
3635 if (fold_convertible_p (caller_type, var))
3636 use = fold_convert (caller_type, var);
3637 else
3638 {
3639 /* ??? For valid programs we should not end up here.
3640 Still if we end up with truly mismatched types here, fall back
3641 to using a MEM_REF to not leak invalid GIMPLE to the following
3642 passes. */
3643 /* Prevent var from being written into SSA form. */
3644 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3645 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3646 DECL_GIMPLE_REG_P (var) = false;
3647 else if (is_gimple_reg_type (TREE_TYPE (var)))
3648 TREE_ADDRESSABLE (var) = true;
3649 use = fold_build2 (MEM_REF, caller_type,
3650 build_fold_addr_expr (var),
3651 build_int_cst (ptr_type_node, 0));
3652 }
3653 }
3654
3655 STRIP_USELESS_TYPE_CONVERSION (use);
3656
3657 if (DECL_BY_REFERENCE (result))
3658 {
3659 TREE_ADDRESSABLE (var) = 1;
3660 var = build_fold_addr_expr (var);
3661 }
3662
3663 done:
3664 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3665 way, when the RESULT_DECL is encountered, it will be
3666 automatically replaced by the VAR_DECL.
3667
3668 When returning by reference, ensure that RESULT_DECL remaps to
3669 gimple_val. */
3670 if (DECL_BY_REFERENCE (result)
3671 && !is_gimple_val (var))
3672 {
3673 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3674 insert_decl_map (id, result, temp);
3675 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3676 it's default_def SSA_NAME. */
3677 if (gimple_in_ssa_p (id->src_cfun)
3678 && is_gimple_reg (result))
3679 {
3680 temp = make_ssa_name (temp);
3681 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3682 }
3683 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3684 }
3685 else
3686 insert_decl_map (id, result, var);
3687
3688 /* Remember this so we can ignore it in remap_decls. */
3689 id->retvar = var;
3690 return use;
3691 }
3692
3693 /* Determine if the function can be copied. If so return NULL. If
3694 not return a string describng the reason for failure. */
3695
3696 const char *
3697 copy_forbidden (struct function *fun)
3698 {
3699 const char *reason = fun->cannot_be_copied_reason;
3700
3701 /* Only examine the function once. */
3702 if (fun->cannot_be_copied_set)
3703 return reason;
3704
3705 /* We cannot copy a function that receives a non-local goto
3706 because we cannot remap the destination label used in the
3707 function that is performing the non-local goto. */
3708 /* ??? Actually, this should be possible, if we work at it.
3709 No doubt there's just a handful of places that simply
3710 assume it doesn't happen and don't substitute properly. */
3711 if (fun->has_nonlocal_label)
3712 {
3713 reason = G_("function %q+F can never be copied "
3714 "because it receives a non-local goto");
3715 goto fail;
3716 }
3717
3718 if (fun->has_forced_label_in_static)
3719 {
3720 reason = G_("function %q+F can never be copied because it saves "
3721 "address of local label in a static variable");
3722 goto fail;
3723 }
3724
3725 fail:
3726 fun->cannot_be_copied_reason = reason;
3727 fun->cannot_be_copied_set = true;
3728 return reason;
3729 }
3730
3731
3732 static const char *inline_forbidden_reason;
3733
3734 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3735 iff a function cannot be inlined. Also sets the reason why. */
3736
3737 static tree
3738 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3739 struct walk_stmt_info *wip)
3740 {
3741 tree fn = (tree) wip->info;
3742 tree t;
3743 gimple *stmt = gsi_stmt (*gsi);
3744
3745 switch (gimple_code (stmt))
3746 {
3747 case GIMPLE_CALL:
3748 /* Refuse to inline alloca call unless user explicitly forced so as
3749 this may change program's memory overhead drastically when the
3750 function using alloca is called in loop. In GCC present in
3751 SPEC2000 inlining into schedule_block cause it to require 2GB of
3752 RAM instead of 256MB. Don't do so for alloca calls emitted for
3753 VLA objects as those can't cause unbounded growth (they're always
3754 wrapped inside stack_save/stack_restore regions. */
3755 if (gimple_maybe_alloca_call_p (stmt)
3756 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3757 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3758 {
3759 inline_forbidden_reason
3760 = G_("function %q+F can never be inlined because it uses "
3761 "alloca (override using the always_inline attribute)");
3762 *handled_ops_p = true;
3763 return fn;
3764 }
3765
3766 t = gimple_call_fndecl (stmt);
3767 if (t == NULL_TREE)
3768 break;
3769
3770 /* We cannot inline functions that call setjmp. */
3771 if (setjmp_call_p (t))
3772 {
3773 inline_forbidden_reason
3774 = G_("function %q+F can never be inlined because it uses setjmp");
3775 *handled_ops_p = true;
3776 return t;
3777 }
3778
3779 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3780 switch (DECL_FUNCTION_CODE (t))
3781 {
3782 /* We cannot inline functions that take a variable number of
3783 arguments. */
3784 case BUILT_IN_VA_START:
3785 case BUILT_IN_NEXT_ARG:
3786 case BUILT_IN_VA_END:
3787 inline_forbidden_reason
3788 = G_("function %q+F can never be inlined because it "
3789 "uses variable argument lists");
3790 *handled_ops_p = true;
3791 return t;
3792
3793 case BUILT_IN_LONGJMP:
3794 /* We can't inline functions that call __builtin_longjmp at
3795 all. The non-local goto machinery really requires the
3796 destination be in a different function. If we allow the
3797 function calling __builtin_longjmp to be inlined into the
3798 function calling __builtin_setjmp, Things will Go Awry. */
3799 inline_forbidden_reason
3800 = G_("function %q+F can never be inlined because "
3801 "it uses setjmp-longjmp exception handling");
3802 *handled_ops_p = true;
3803 return t;
3804
3805 case BUILT_IN_NONLOCAL_GOTO:
3806 /* Similarly. */
3807 inline_forbidden_reason
3808 = G_("function %q+F can never be inlined because "
3809 "it uses non-local goto");
3810 *handled_ops_p = true;
3811 return t;
3812
3813 case BUILT_IN_RETURN:
3814 case BUILT_IN_APPLY_ARGS:
3815 /* If a __builtin_apply_args caller would be inlined,
3816 it would be saving arguments of the function it has
3817 been inlined into. Similarly __builtin_return would
3818 return from the function the inline has been inlined into. */
3819 inline_forbidden_reason
3820 = G_("function %q+F can never be inlined because "
3821 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3822 *handled_ops_p = true;
3823 return t;
3824
3825 default:
3826 break;
3827 }
3828 break;
3829
3830 case GIMPLE_GOTO:
3831 t = gimple_goto_dest (stmt);
3832
3833 /* We will not inline a function which uses computed goto. The
3834 addresses of its local labels, which may be tucked into
3835 global storage, are of course not constant across
3836 instantiations, which causes unexpected behavior. */
3837 if (TREE_CODE (t) != LABEL_DECL)
3838 {
3839 inline_forbidden_reason
3840 = G_("function %q+F can never be inlined "
3841 "because it contains a computed goto");
3842 *handled_ops_p = true;
3843 return t;
3844 }
3845 break;
3846
3847 default:
3848 break;
3849 }
3850
3851 *handled_ops_p = false;
3852 return NULL_TREE;
3853 }
3854
3855 /* Return true if FNDECL is a function that cannot be inlined into
3856 another one. */
3857
3858 static bool
3859 inline_forbidden_p (tree fndecl)
3860 {
3861 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3862 struct walk_stmt_info wi;
3863 basic_block bb;
3864 bool forbidden_p = false;
3865
3866 /* First check for shared reasons not to copy the code. */
3867 inline_forbidden_reason = copy_forbidden (fun);
3868 if (inline_forbidden_reason != NULL)
3869 return true;
3870
3871 /* Next, walk the statements of the function looking for
3872 constraucts we can't handle, or are non-optimal for inlining. */
3873 hash_set<tree> visited_nodes;
3874 memset (&wi, 0, sizeof (wi));
3875 wi.info = (void *) fndecl;
3876 wi.pset = &visited_nodes;
3877
3878 FOR_EACH_BB_FN (bb, fun)
3879 {
3880 gimple *ret;
3881 gimple_seq seq = bb_seq (bb);
3882 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3883 forbidden_p = (ret != NULL);
3884 if (forbidden_p)
3885 break;
3886 }
3887
3888 return forbidden_p;
3889 }
3890 \f
3891 /* Return false if the function FNDECL cannot be inlined on account of its
3892 attributes, true otherwise. */
3893 static bool
3894 function_attribute_inlinable_p (const_tree fndecl)
3895 {
3896 if (targetm.attribute_table)
3897 {
3898 const_tree a;
3899
3900 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3901 {
3902 const_tree name = TREE_PURPOSE (a);
3903 int i;
3904
3905 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3906 if (is_attribute_p (targetm.attribute_table[i].name, name))
3907 return targetm.function_attribute_inlinable_p (fndecl);
3908 }
3909 }
3910
3911 return true;
3912 }
3913
3914 /* Returns nonzero if FN is a function that does not have any
3915 fundamental inline blocking properties. */
3916
3917 bool
3918 tree_inlinable_function_p (tree fn)
3919 {
3920 bool inlinable = true;
3921 bool do_warning;
3922 tree always_inline;
3923
3924 /* If we've already decided this function shouldn't be inlined,
3925 there's no need to check again. */
3926 if (DECL_UNINLINABLE (fn))
3927 return false;
3928
3929 /* We only warn for functions declared `inline' by the user. */
3930 do_warning = (warn_inline
3931 && DECL_DECLARED_INLINE_P (fn)
3932 && !DECL_NO_INLINE_WARNING_P (fn)
3933 && !DECL_IN_SYSTEM_HEADER (fn));
3934
3935 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3936
3937 if (flag_no_inline
3938 && always_inline == NULL)
3939 {
3940 if (do_warning)
3941 warning (OPT_Winline, "function %q+F can never be inlined because it "
3942 "is suppressed using %<-fno-inline%>", fn);
3943 inlinable = false;
3944 }
3945
3946 else if (!function_attribute_inlinable_p (fn))
3947 {
3948 if (do_warning)
3949 warning (OPT_Winline, "function %q+F can never be inlined because it "
3950 "uses attributes conflicting with inlining", fn);
3951 inlinable = false;
3952 }
3953
3954 else if (inline_forbidden_p (fn))
3955 {
3956 /* See if we should warn about uninlinable functions. Previously,
3957 some of these warnings would be issued while trying to expand
3958 the function inline, but that would cause multiple warnings
3959 about functions that would for example call alloca. But since
3960 this a property of the function, just one warning is enough.
3961 As a bonus we can now give more details about the reason why a
3962 function is not inlinable. */
3963 if (always_inline)
3964 error (inline_forbidden_reason, fn);
3965 else if (do_warning)
3966 warning (OPT_Winline, inline_forbidden_reason, fn);
3967
3968 inlinable = false;
3969 }
3970
3971 /* Squirrel away the result so that we don't have to check again. */
3972 DECL_UNINLINABLE (fn) = !inlinable;
3973
3974 return inlinable;
3975 }
3976
3977 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3978 word size and take possible memcpy call into account and return
3979 cost based on whether optimizing for size or speed according to SPEED_P. */
3980
3981 int
3982 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3983 {
3984 HOST_WIDE_INT size;
3985
3986 gcc_assert (!VOID_TYPE_P (type));
3987
3988 if (TREE_CODE (type) == VECTOR_TYPE)
3989 {
3990 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
3991 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
3992 int orig_mode_size
3993 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
3994 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
3995 return ((orig_mode_size + simd_mode_size - 1)
3996 / simd_mode_size);
3997 }
3998
3999 size = int_size_in_bytes (type);
4000
4001 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4002 /* Cost of a memcpy call, 3 arguments and the call. */
4003 return 4;
4004 else
4005 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4006 }
4007
4008 /* Returns cost of operation CODE, according to WEIGHTS */
4009
4010 static int
4011 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4012 tree op1 ATTRIBUTE_UNUSED, tree op2)
4013 {
4014 switch (code)
4015 {
4016 /* These are "free" conversions, or their presumed cost
4017 is folded into other operations. */
4018 case RANGE_EXPR:
4019 CASE_CONVERT:
4020 case COMPLEX_EXPR:
4021 case PAREN_EXPR:
4022 case VIEW_CONVERT_EXPR:
4023 return 0;
4024
4025 /* Assign cost of 1 to usual operations.
4026 ??? We may consider mapping RTL costs to this. */
4027 case COND_EXPR:
4028 case VEC_COND_EXPR:
4029 case VEC_PERM_EXPR:
4030
4031 case PLUS_EXPR:
4032 case POINTER_PLUS_EXPR:
4033 case POINTER_DIFF_EXPR:
4034 case MINUS_EXPR:
4035 case MULT_EXPR:
4036 case MULT_HIGHPART_EXPR:
4037
4038 case ADDR_SPACE_CONVERT_EXPR:
4039 case FIXED_CONVERT_EXPR:
4040 case FIX_TRUNC_EXPR:
4041
4042 case NEGATE_EXPR:
4043 case FLOAT_EXPR:
4044 case MIN_EXPR:
4045 case MAX_EXPR:
4046 case ABS_EXPR:
4047 case ABSU_EXPR:
4048
4049 case LSHIFT_EXPR:
4050 case RSHIFT_EXPR:
4051 case LROTATE_EXPR:
4052 case RROTATE_EXPR:
4053
4054 case BIT_IOR_EXPR:
4055 case BIT_XOR_EXPR:
4056 case BIT_AND_EXPR:
4057 case BIT_NOT_EXPR:
4058
4059 case TRUTH_ANDIF_EXPR:
4060 case TRUTH_ORIF_EXPR:
4061 case TRUTH_AND_EXPR:
4062 case TRUTH_OR_EXPR:
4063 case TRUTH_XOR_EXPR:
4064 case TRUTH_NOT_EXPR:
4065
4066 case LT_EXPR:
4067 case LE_EXPR:
4068 case GT_EXPR:
4069 case GE_EXPR:
4070 case EQ_EXPR:
4071 case NE_EXPR:
4072 case ORDERED_EXPR:
4073 case UNORDERED_EXPR:
4074
4075 case UNLT_EXPR:
4076 case UNLE_EXPR:
4077 case UNGT_EXPR:
4078 case UNGE_EXPR:
4079 case UNEQ_EXPR:
4080 case LTGT_EXPR:
4081
4082 case CONJ_EXPR:
4083
4084 case PREDECREMENT_EXPR:
4085 case PREINCREMENT_EXPR:
4086 case POSTDECREMENT_EXPR:
4087 case POSTINCREMENT_EXPR:
4088
4089 case REALIGN_LOAD_EXPR:
4090
4091 case WIDEN_SUM_EXPR:
4092 case WIDEN_MULT_EXPR:
4093 case DOT_PROD_EXPR:
4094 case SAD_EXPR:
4095 case WIDEN_MULT_PLUS_EXPR:
4096 case WIDEN_MULT_MINUS_EXPR:
4097 case WIDEN_LSHIFT_EXPR:
4098
4099 case VEC_WIDEN_MULT_HI_EXPR:
4100 case VEC_WIDEN_MULT_LO_EXPR:
4101 case VEC_WIDEN_MULT_EVEN_EXPR:
4102 case VEC_WIDEN_MULT_ODD_EXPR:
4103 case VEC_UNPACK_HI_EXPR:
4104 case VEC_UNPACK_LO_EXPR:
4105 case VEC_UNPACK_FLOAT_HI_EXPR:
4106 case VEC_UNPACK_FLOAT_LO_EXPR:
4107 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4108 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4109 case VEC_PACK_TRUNC_EXPR:
4110 case VEC_PACK_SAT_EXPR:
4111 case VEC_PACK_FIX_TRUNC_EXPR:
4112 case VEC_PACK_FLOAT_EXPR:
4113 case VEC_WIDEN_LSHIFT_HI_EXPR:
4114 case VEC_WIDEN_LSHIFT_LO_EXPR:
4115 case VEC_DUPLICATE_EXPR:
4116 case VEC_SERIES_EXPR:
4117
4118 return 1;
4119
4120 /* Few special cases of expensive operations. This is useful
4121 to avoid inlining on functions having too many of these. */
4122 case TRUNC_DIV_EXPR:
4123 case CEIL_DIV_EXPR:
4124 case FLOOR_DIV_EXPR:
4125 case ROUND_DIV_EXPR:
4126 case EXACT_DIV_EXPR:
4127 case TRUNC_MOD_EXPR:
4128 case CEIL_MOD_EXPR:
4129 case FLOOR_MOD_EXPR:
4130 case ROUND_MOD_EXPR:
4131 case RDIV_EXPR:
4132 if (TREE_CODE (op2) != INTEGER_CST)
4133 return weights->div_mod_cost;
4134 return 1;
4135
4136 /* Bit-field insertion needs several shift and mask operations. */
4137 case BIT_INSERT_EXPR:
4138 return 3;
4139
4140 default:
4141 /* We expect a copy assignment with no operator. */
4142 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4143 return 0;
4144 }
4145 }
4146
4147
4148 /* Estimate number of instructions that will be created by expanding
4149 the statements in the statement sequence STMTS.
4150 WEIGHTS contains weights attributed to various constructs. */
4151
4152 int
4153 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4154 {
4155 int cost;
4156 gimple_stmt_iterator gsi;
4157
4158 cost = 0;
4159 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4160 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4161
4162 return cost;
4163 }
4164
4165
4166 /* Estimate number of instructions that will be created by expanding STMT.
4167 WEIGHTS contains weights attributed to various constructs. */
4168
4169 int
4170 estimate_num_insns (gimple *stmt, eni_weights *weights)
4171 {
4172 unsigned cost, i;
4173 enum gimple_code code = gimple_code (stmt);
4174 tree lhs;
4175 tree rhs;
4176
4177 switch (code)
4178 {
4179 case GIMPLE_ASSIGN:
4180 /* Try to estimate the cost of assignments. We have three cases to
4181 deal with:
4182 1) Simple assignments to registers;
4183 2) Stores to things that must live in memory. This includes
4184 "normal" stores to scalars, but also assignments of large
4185 structures, or constructors of big arrays;
4186
4187 Let us look at the first two cases, assuming we have "a = b + C":
4188 <GIMPLE_ASSIGN <var_decl "a">
4189 <plus_expr <var_decl "b"> <constant C>>
4190 If "a" is a GIMPLE register, the assignment to it is free on almost
4191 any target, because "a" usually ends up in a real register. Hence
4192 the only cost of this expression comes from the PLUS_EXPR, and we
4193 can ignore the GIMPLE_ASSIGN.
4194 If "a" is not a GIMPLE register, the assignment to "a" will most
4195 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4196 of moving something into "a", which we compute using the function
4197 estimate_move_cost. */
4198 if (gimple_clobber_p (stmt))
4199 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4200
4201 lhs = gimple_assign_lhs (stmt);
4202 rhs = gimple_assign_rhs1 (stmt);
4203
4204 cost = 0;
4205
4206 /* Account for the cost of moving to / from memory. */
4207 if (gimple_store_p (stmt))
4208 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4209 if (gimple_assign_load_p (stmt))
4210 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4211
4212 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4213 gimple_assign_rhs1 (stmt),
4214 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4215 == GIMPLE_BINARY_RHS
4216 ? gimple_assign_rhs2 (stmt) : NULL);
4217 break;
4218
4219 case GIMPLE_COND:
4220 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4221 gimple_op (stmt, 0),
4222 gimple_op (stmt, 1));
4223 break;
4224
4225 case GIMPLE_SWITCH:
4226 {
4227 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4228 /* Take into account cost of the switch + guess 2 conditional jumps for
4229 each case label.
4230
4231 TODO: once the switch expansion logic is sufficiently separated, we can
4232 do better job on estimating cost of the switch. */
4233 if (weights->time_based)
4234 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4235 else
4236 cost = gimple_switch_num_labels (switch_stmt) * 2;
4237 }
4238 break;
4239
4240 case GIMPLE_CALL:
4241 {
4242 tree decl;
4243
4244 if (gimple_call_internal_p (stmt))
4245 return 0;
4246 else if ((decl = gimple_call_fndecl (stmt))
4247 && fndecl_built_in_p (decl))
4248 {
4249 /* Do not special case builtins where we see the body.
4250 This just confuse inliner. */
4251 struct cgraph_node *node;
4252 if (!(node = cgraph_node::get (decl))
4253 || node->definition)
4254 ;
4255 /* For buitins that are likely expanded to nothing or
4256 inlined do not account operand costs. */
4257 else if (is_simple_builtin (decl))
4258 return 0;
4259 else if (is_inexpensive_builtin (decl))
4260 return weights->target_builtin_call_cost;
4261 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4262 {
4263 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4264 specialize the cheap expansion we do here.
4265 ??? This asks for a more general solution. */
4266 switch (DECL_FUNCTION_CODE (decl))
4267 {
4268 case BUILT_IN_POW:
4269 case BUILT_IN_POWF:
4270 case BUILT_IN_POWL:
4271 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4272 && (real_equal
4273 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4274 &dconst2)))
4275 return estimate_operator_cost
4276 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4277 gimple_call_arg (stmt, 0));
4278 break;
4279
4280 default:
4281 break;
4282 }
4283 }
4284 }
4285
4286 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4287 if (gimple_call_lhs (stmt))
4288 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4289 weights->time_based);
4290 for (i = 0; i < gimple_call_num_args (stmt); i++)
4291 {
4292 tree arg = gimple_call_arg (stmt, i);
4293 cost += estimate_move_cost (TREE_TYPE (arg),
4294 weights->time_based);
4295 }
4296 break;
4297 }
4298
4299 case GIMPLE_RETURN:
4300 return weights->return_cost;
4301
4302 case GIMPLE_GOTO:
4303 case GIMPLE_LABEL:
4304 case GIMPLE_NOP:
4305 case GIMPLE_PHI:
4306 case GIMPLE_PREDICT:
4307 case GIMPLE_DEBUG:
4308 return 0;
4309
4310 case GIMPLE_ASM:
4311 {
4312 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4313 /* 1000 means infinity. This avoids overflows later
4314 with very long asm statements. */
4315 if (count > 1000)
4316 count = 1000;
4317 /* If this asm is asm inline, count anything as minimum size. */
4318 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4319 count = MIN (1, count);
4320 return MAX (1, count);
4321 }
4322
4323 case GIMPLE_RESX:
4324 /* This is either going to be an external function call with one
4325 argument, or two register copy statements plus a goto. */
4326 return 2;
4327
4328 case GIMPLE_EH_DISPATCH:
4329 /* ??? This is going to turn into a switch statement. Ideally
4330 we'd have a look at the eh region and estimate the number of
4331 edges involved. */
4332 return 10;
4333
4334 case GIMPLE_BIND:
4335 return estimate_num_insns_seq (
4336 gimple_bind_body (as_a <gbind *> (stmt)),
4337 weights);
4338
4339 case GIMPLE_EH_FILTER:
4340 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4341
4342 case GIMPLE_CATCH:
4343 return estimate_num_insns_seq (gimple_catch_handler (
4344 as_a <gcatch *> (stmt)),
4345 weights);
4346
4347 case GIMPLE_TRY:
4348 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4349 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4350
4351 /* OMP directives are generally very expensive. */
4352
4353 case GIMPLE_OMP_RETURN:
4354 case GIMPLE_OMP_SECTIONS_SWITCH:
4355 case GIMPLE_OMP_ATOMIC_STORE:
4356 case GIMPLE_OMP_CONTINUE:
4357 /* ...except these, which are cheap. */
4358 return 0;
4359
4360 case GIMPLE_OMP_ATOMIC_LOAD:
4361 return weights->omp_cost;
4362
4363 case GIMPLE_OMP_FOR:
4364 return (weights->omp_cost
4365 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4366 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4367
4368 case GIMPLE_OMP_PARALLEL:
4369 case GIMPLE_OMP_TASK:
4370 case GIMPLE_OMP_CRITICAL:
4371 case GIMPLE_OMP_MASTER:
4372 case GIMPLE_OMP_TASKGROUP:
4373 case GIMPLE_OMP_ORDERED:
4374 case GIMPLE_OMP_SCAN:
4375 case GIMPLE_OMP_SECTION:
4376 case GIMPLE_OMP_SECTIONS:
4377 case GIMPLE_OMP_SINGLE:
4378 case GIMPLE_OMP_TARGET:
4379 case GIMPLE_OMP_TEAMS:
4380 return (weights->omp_cost
4381 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4382
4383 case GIMPLE_TRANSACTION:
4384 return (weights->tm_cost
4385 + estimate_num_insns_seq (gimple_transaction_body (
4386 as_a <gtransaction *> (stmt)),
4387 weights));
4388
4389 default:
4390 gcc_unreachable ();
4391 }
4392
4393 return cost;
4394 }
4395
4396 /* Estimate number of instructions that will be created by expanding
4397 function FNDECL. WEIGHTS contains weights attributed to various
4398 constructs. */
4399
4400 int
4401 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4402 {
4403 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4404 gimple_stmt_iterator bsi;
4405 basic_block bb;
4406 int n = 0;
4407
4408 gcc_assert (my_function && my_function->cfg);
4409 FOR_EACH_BB_FN (bb, my_function)
4410 {
4411 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4412 n += estimate_num_insns (gsi_stmt (bsi), weights);
4413 }
4414
4415 return n;
4416 }
4417
4418
4419 /* Initializes weights used by estimate_num_insns. */
4420
4421 void
4422 init_inline_once (void)
4423 {
4424 eni_size_weights.call_cost = 1;
4425 eni_size_weights.indirect_call_cost = 3;
4426 eni_size_weights.target_builtin_call_cost = 1;
4427 eni_size_weights.div_mod_cost = 1;
4428 eni_size_weights.omp_cost = 40;
4429 eni_size_weights.tm_cost = 10;
4430 eni_size_weights.time_based = false;
4431 eni_size_weights.return_cost = 1;
4432
4433 /* Estimating time for call is difficult, since we have no idea what the
4434 called function does. In the current uses of eni_time_weights,
4435 underestimating the cost does less harm than overestimating it, so
4436 we choose a rather small value here. */
4437 eni_time_weights.call_cost = 10;
4438 eni_time_weights.indirect_call_cost = 15;
4439 eni_time_weights.target_builtin_call_cost = 1;
4440 eni_time_weights.div_mod_cost = 10;
4441 eni_time_weights.omp_cost = 40;
4442 eni_time_weights.tm_cost = 40;
4443 eni_time_weights.time_based = true;
4444 eni_time_weights.return_cost = 2;
4445 }
4446
4447
4448 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4449
4450 static void
4451 prepend_lexical_block (tree current_block, tree new_block)
4452 {
4453 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4454 BLOCK_SUBBLOCKS (current_block) = new_block;
4455 BLOCK_SUPERCONTEXT (new_block) = current_block;
4456 }
4457
4458 /* Add local variables from CALLEE to CALLER. */
4459
4460 static inline void
4461 add_local_variables (struct function *callee, struct function *caller,
4462 copy_body_data *id)
4463 {
4464 tree var;
4465 unsigned ix;
4466
4467 FOR_EACH_LOCAL_DECL (callee, ix, var)
4468 if (!can_be_nonlocal (var, id))
4469 {
4470 tree new_var = remap_decl (var, id);
4471
4472 /* Remap debug-expressions. */
4473 if (VAR_P (new_var)
4474 && DECL_HAS_DEBUG_EXPR_P (var)
4475 && new_var != var)
4476 {
4477 tree tem = DECL_DEBUG_EXPR (var);
4478 bool old_regimplify = id->regimplify;
4479 id->remapping_type_depth++;
4480 walk_tree (&tem, copy_tree_body_r, id, NULL);
4481 id->remapping_type_depth--;
4482 id->regimplify = old_regimplify;
4483 SET_DECL_DEBUG_EXPR (new_var, tem);
4484 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4485 }
4486 add_local_decl (caller, new_var);
4487 }
4488 }
4489
4490 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4491 have brought in or introduced any debug stmts for SRCVAR. */
4492
4493 static inline void
4494 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4495 {
4496 tree *remappedvarp = id->decl_map->get (srcvar);
4497
4498 if (!remappedvarp)
4499 return;
4500
4501 if (!VAR_P (*remappedvarp))
4502 return;
4503
4504 if (*remappedvarp == id->retvar)
4505 return;
4506
4507 tree tvar = target_for_debug_bind (*remappedvarp);
4508 if (!tvar)
4509 return;
4510
4511 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4512 id->call_stmt);
4513 gimple_seq_add_stmt (bindings, stmt);
4514 }
4515
4516 /* For each inlined variable for which we may have debug bind stmts,
4517 add before GSI a final debug stmt resetting it, marking the end of
4518 its life, so that var-tracking knows it doesn't have to compute
4519 further locations for it. */
4520
4521 static inline void
4522 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4523 {
4524 tree var;
4525 unsigned ix;
4526 gimple_seq bindings = NULL;
4527
4528 if (!gimple_in_ssa_p (id->src_cfun))
4529 return;
4530
4531 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4532 return;
4533
4534 for (var = DECL_ARGUMENTS (id->src_fn);
4535 var; var = DECL_CHAIN (var))
4536 reset_debug_binding (id, var, &bindings);
4537
4538 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4539 reset_debug_binding (id, var, &bindings);
4540
4541 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4542 }
4543
4544 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4545
4546 static bool
4547 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4548 {
4549 tree use_retvar;
4550 tree fn;
4551 hash_map<tree, tree> *dst;
4552 hash_map<tree, tree> *st = NULL;
4553 tree return_slot;
4554 tree modify_dest;
4555 struct cgraph_edge *cg_edge;
4556 cgraph_inline_failed_t reason;
4557 basic_block return_block;
4558 edge e;
4559 gimple_stmt_iterator gsi, stmt_gsi;
4560 bool successfully_inlined = false;
4561 bool purge_dead_abnormal_edges;
4562 gcall *call_stmt;
4563 unsigned int prop_mask, src_properties;
4564 struct function *dst_cfun;
4565 tree simduid;
4566 use_operand_p use;
4567 gimple *simtenter_stmt = NULL;
4568 vec<tree> *simtvars_save;
4569
4570 /* The gimplifier uses input_location in too many places, such as
4571 internal_get_tmp_var (). */
4572 location_t saved_location = input_location;
4573 input_location = gimple_location (stmt);
4574
4575 /* From here on, we're only interested in CALL_EXPRs. */
4576 call_stmt = dyn_cast <gcall *> (stmt);
4577 if (!call_stmt)
4578 goto egress;
4579
4580 cg_edge = id->dst_node->get_edge (stmt);
4581 gcc_checking_assert (cg_edge);
4582 /* First, see if we can figure out what function is being called.
4583 If we cannot, then there is no hope of inlining the function. */
4584 if (cg_edge->indirect_unknown_callee)
4585 goto egress;
4586 fn = cg_edge->callee->decl;
4587 gcc_checking_assert (fn);
4588
4589 /* If FN is a declaration of a function in a nested scope that was
4590 globally declared inline, we don't set its DECL_INITIAL.
4591 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4592 C++ front-end uses it for cdtors to refer to their internal
4593 declarations, that are not real functions. Fortunately those
4594 don't have trees to be saved, so we can tell by checking their
4595 gimple_body. */
4596 if (!DECL_INITIAL (fn)
4597 && DECL_ABSTRACT_ORIGIN (fn)
4598 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4599 fn = DECL_ABSTRACT_ORIGIN (fn);
4600
4601 /* Don't try to inline functions that are not well-suited to inlining. */
4602 if (cg_edge->inline_failed)
4603 {
4604 reason = cg_edge->inline_failed;
4605 /* If this call was originally indirect, we do not want to emit any
4606 inlining related warnings or sorry messages because there are no
4607 guarantees regarding those. */
4608 if (cg_edge->indirect_inlining_edge)
4609 goto egress;
4610
4611 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4612 /* For extern inline functions that get redefined we always
4613 silently ignored always_inline flag. Better behavior would
4614 be to be able to keep both bodies and use extern inline body
4615 for inlining, but we can't do that because frontends overwrite
4616 the body. */
4617 && !cg_edge->callee->local.redefined_extern_inline
4618 /* During early inline pass, report only when optimization is
4619 not turned on. */
4620 && (symtab->global_info_ready
4621 || !optimize
4622 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4623 /* PR 20090218-1_0.c. Body can be provided by another module. */
4624 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4625 {
4626 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4627 cgraph_inline_failed_string (reason));
4628 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4629 inform (gimple_location (stmt), "called from here");
4630 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4631 inform (DECL_SOURCE_LOCATION (cfun->decl),
4632 "called from this function");
4633 }
4634 else if (warn_inline
4635 && DECL_DECLARED_INLINE_P (fn)
4636 && !DECL_NO_INLINE_WARNING_P (fn)
4637 && !DECL_IN_SYSTEM_HEADER (fn)
4638 && reason != CIF_UNSPECIFIED
4639 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4640 /* Do not warn about not inlined recursive calls. */
4641 && !cg_edge->recursive_p ()
4642 /* Avoid warnings during early inline pass. */
4643 && symtab->global_info_ready)
4644 {
4645 auto_diagnostic_group d;
4646 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4647 fn, _(cgraph_inline_failed_string (reason))))
4648 {
4649 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4650 inform (gimple_location (stmt), "called from here");
4651 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4652 inform (DECL_SOURCE_LOCATION (cfun->decl),
4653 "called from this function");
4654 }
4655 }
4656 goto egress;
4657 }
4658 id->src_node = cg_edge->callee;
4659
4660 /* If callee is thunk, all we need is to adjust the THIS pointer
4661 and redirect to function being thunked. */
4662 if (id->src_node->thunk.thunk_p)
4663 {
4664 cgraph_edge *edge;
4665 tree virtual_offset = NULL;
4666 profile_count count = cg_edge->count;
4667 tree op;
4668 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4669
4670 cg_edge->remove ();
4671 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4672 gimple_uid (stmt),
4673 profile_count::one (),
4674 profile_count::one (),
4675 true);
4676 edge->count = count;
4677 if (id->src_node->thunk.virtual_offset_p)
4678 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4679 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4680 NULL);
4681 gsi_insert_before (&iter, gimple_build_assign (op,
4682 gimple_call_arg (stmt, 0)),
4683 GSI_NEW_STMT);
4684 gcc_assert (id->src_node->thunk.this_adjusting);
4685 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4686 virtual_offset, id->src_node->thunk.indirect_offset);
4687
4688 gimple_call_set_arg (stmt, 0, op);
4689 gimple_call_set_fndecl (stmt, edge->callee->decl);
4690 update_stmt (stmt);
4691 id->src_node->remove ();
4692 expand_call_inline (bb, stmt, id);
4693 maybe_remove_unused_call_args (cfun, stmt);
4694 return true;
4695 }
4696 fn = cg_edge->callee->decl;
4697 cg_edge->callee->get_untransformed_body ();
4698
4699 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4700 cg_edge->callee->verify ();
4701
4702 /* We will be inlining this callee. */
4703 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4704
4705 /* Update the callers EH personality. */
4706 if (DECL_FUNCTION_PERSONALITY (fn))
4707 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4708 = DECL_FUNCTION_PERSONALITY (fn);
4709
4710 /* Split the block before the GIMPLE_CALL. */
4711 stmt_gsi = gsi_for_stmt (stmt);
4712 gsi_prev (&stmt_gsi);
4713 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4714 bb = e->src;
4715 return_block = e->dest;
4716 remove_edge (e);
4717
4718 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4719 been the source of abnormal edges. In this case, schedule
4720 the removal of dead abnormal edges. */
4721 gsi = gsi_start_bb (return_block);
4722 gsi_next (&gsi);
4723 purge_dead_abnormal_edges = gsi_end_p (gsi);
4724
4725 stmt_gsi = gsi_start_bb (return_block);
4726
4727 /* Build a block containing code to initialize the arguments, the
4728 actual inline expansion of the body, and a label for the return
4729 statements within the function to jump to. The type of the
4730 statement expression is the return type of the function call.
4731 ??? If the call does not have an associated block then we will
4732 remap all callee blocks to NULL, effectively dropping most of
4733 its debug information. This should only happen for calls to
4734 artificial decls inserted by the compiler itself. We need to
4735 either link the inlined blocks into the caller block tree or
4736 not refer to them in any way to not break GC for locations. */
4737 if (tree block = gimple_block (stmt))
4738 {
4739 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4740 to make inlined_function_outer_scope_p return true on this BLOCK. */
4741 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4742 if (loc == UNKNOWN_LOCATION)
4743 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4744 if (loc == UNKNOWN_LOCATION)
4745 loc = BUILTINS_LOCATION;
4746 id->block = make_node (BLOCK);
4747 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4748 BLOCK_SOURCE_LOCATION (id->block) = loc;
4749 prepend_lexical_block (block, id->block);
4750 }
4751
4752 /* Local declarations will be replaced by their equivalents in this map. */
4753 st = id->decl_map;
4754 id->decl_map = new hash_map<tree, tree>;
4755 dst = id->debug_map;
4756 id->debug_map = NULL;
4757 if (flag_stack_reuse != SR_NONE)
4758 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4759
4760 /* Record the function we are about to inline. */
4761 id->src_fn = fn;
4762 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4763 id->reset_location = DECL_IGNORED_P (fn);
4764 id->call_stmt = call_stmt;
4765
4766 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4767 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4768 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4769 simtvars_save = id->dst_simt_vars;
4770 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4771 && (simduid = bb->loop_father->simduid) != NULL_TREE
4772 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4773 && single_imm_use (simduid, &use, &simtenter_stmt)
4774 && is_gimple_call (simtenter_stmt)
4775 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4776 vec_alloc (id->dst_simt_vars, 0);
4777 else
4778 id->dst_simt_vars = NULL;
4779
4780 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4781 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4782
4783 /* If the src function contains an IFN_VA_ARG, then so will the dst
4784 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4785 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4786 src_properties = id->src_cfun->curr_properties & prop_mask;
4787 if (src_properties != prop_mask)
4788 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4789 dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
4790
4791 gcc_assert (!id->src_cfun->after_inlining);
4792
4793 id->entry_bb = bb;
4794 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4795 {
4796 gimple_stmt_iterator si = gsi_last_bb (bb);
4797 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4798 NOT_TAKEN),
4799 GSI_NEW_STMT);
4800 }
4801 initialize_inlined_parameters (id, stmt, fn, bb);
4802 if (debug_nonbind_markers_p && debug_inline_points && id->block
4803 && inlined_function_outer_scope_p (id->block))
4804 {
4805 gimple_stmt_iterator si = gsi_last_bb (bb);
4806 gsi_insert_after (&si, gimple_build_debug_inline_entry
4807 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4808 GSI_NEW_STMT);
4809 }
4810
4811 if (DECL_INITIAL (fn))
4812 {
4813 if (gimple_block (stmt))
4814 {
4815 tree *var;
4816
4817 prepend_lexical_block (id->block,
4818 remap_blocks (DECL_INITIAL (fn), id));
4819 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4820 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4821 == NULL_TREE));
4822 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4823 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4824 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4825 under it. The parameters can be then evaluated in the debugger,
4826 but don't show in backtraces. */
4827 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4828 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4829 {
4830 tree v = *var;
4831 *var = TREE_CHAIN (v);
4832 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4833 BLOCK_VARS (id->block) = v;
4834 }
4835 else
4836 var = &TREE_CHAIN (*var);
4837 }
4838 else
4839 remap_blocks_to_null (DECL_INITIAL (fn), id);
4840 }
4841
4842 /* Return statements in the function body will be replaced by jumps
4843 to the RET_LABEL. */
4844 gcc_assert (DECL_INITIAL (fn));
4845 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4846
4847 /* Find the LHS to which the result of this call is assigned. */
4848 return_slot = NULL;
4849 if (gimple_call_lhs (stmt))
4850 {
4851 modify_dest = gimple_call_lhs (stmt);
4852
4853 /* The function which we are inlining might not return a value,
4854 in which case we should issue a warning that the function
4855 does not return a value. In that case the optimizers will
4856 see that the variable to which the value is assigned was not
4857 initialized. We do not want to issue a warning about that
4858 uninitialized variable. */
4859 if (DECL_P (modify_dest))
4860 TREE_NO_WARNING (modify_dest) = 1;
4861
4862 if (gimple_call_return_slot_opt_p (call_stmt))
4863 {
4864 return_slot = modify_dest;
4865 modify_dest = NULL;
4866 }
4867 }
4868 else
4869 modify_dest = NULL;
4870
4871 /* If we are inlining a call to the C++ operator new, we don't want
4872 to use type based alias analysis on the return value. Otherwise
4873 we may get confused if the compiler sees that the inlined new
4874 function returns a pointer which was just deleted. See bug
4875 33407. */
4876 if (DECL_IS_OPERATOR_NEW (fn))
4877 {
4878 return_slot = NULL;
4879 modify_dest = NULL;
4880 }
4881
4882 /* Declare the return variable for the function. */
4883 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4884
4885 /* Add local vars in this inlined callee to caller. */
4886 add_local_variables (id->src_cfun, cfun, id);
4887
4888 if (dump_enabled_p ())
4889 {
4890 char buf[128];
4891 snprintf (buf, sizeof(buf), "%4.2f",
4892 cg_edge->sreal_frequency ().to_double ());
4893 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
4894 call_stmt,
4895 "Inlining %C to %C with frequency %s\n",
4896 id->src_node, id->dst_node, buf);
4897 if (dump_file && (dump_flags & TDF_DETAILS))
4898 {
4899 id->src_node->dump (dump_file);
4900 id->dst_node->dump (dump_file);
4901 }
4902 }
4903
4904 /* This is it. Duplicate the callee body. Assume callee is
4905 pre-gimplified. Note that we must not alter the caller
4906 function in any way before this point, as this CALL_EXPR may be
4907 a self-referential call; if we're calling ourselves, we need to
4908 duplicate our body before altering anything. */
4909 copy_body (id, bb, return_block, NULL);
4910
4911 reset_debug_bindings (id, stmt_gsi);
4912
4913 if (flag_stack_reuse != SR_NONE)
4914 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4915 if (!TREE_THIS_VOLATILE (p))
4916 {
4917 tree *varp = id->decl_map->get (p);
4918 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4919 {
4920 tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4921 gimple *clobber_stmt;
4922 TREE_THIS_VOLATILE (clobber) = 1;
4923 clobber_stmt = gimple_build_assign (*varp, clobber);
4924 gimple_set_location (clobber_stmt, gimple_location (stmt));
4925 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4926 }
4927 }
4928
4929 /* Reset the escaped solution. */
4930 if (cfun->gimple_df)
4931 pt_solution_reset (&cfun->gimple_df->escaped);
4932
4933 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4934 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4935 {
4936 size_t nargs = gimple_call_num_args (simtenter_stmt);
4937 vec<tree> *vars = id->dst_simt_vars;
4938 auto_vec<tree> newargs (nargs + vars->length ());
4939 for (size_t i = 0; i < nargs; i++)
4940 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4941 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4942 {
4943 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4944 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4945 }
4946 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4947 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4948 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4949 gsi_replace (&gsi, g, false);
4950 }
4951 vec_free (id->dst_simt_vars);
4952 id->dst_simt_vars = simtvars_save;
4953
4954 /* Clean up. */
4955 if (id->debug_map)
4956 {
4957 delete id->debug_map;
4958 id->debug_map = dst;
4959 }
4960 delete id->decl_map;
4961 id->decl_map = st;
4962
4963 /* Unlink the calls virtual operands before replacing it. */
4964 unlink_stmt_vdef (stmt);
4965 if (gimple_vdef (stmt)
4966 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4967 release_ssa_name (gimple_vdef (stmt));
4968
4969 /* If the inlined function returns a result that we care about,
4970 substitute the GIMPLE_CALL with an assignment of the return
4971 variable to the LHS of the call. That is, if STMT was
4972 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4973 if (use_retvar && gimple_call_lhs (stmt))
4974 {
4975 gimple *old_stmt = stmt;
4976 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4977 gimple_set_location (stmt, gimple_location (old_stmt));
4978 gsi_replace (&stmt_gsi, stmt, false);
4979 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4980 /* Append a clobber for id->retvar if easily possible. */
4981 if (flag_stack_reuse != SR_NONE
4982 && id->retvar
4983 && VAR_P (id->retvar)
4984 && id->retvar != return_slot
4985 && id->retvar != modify_dest
4986 && !TREE_THIS_VOLATILE (id->retvar)
4987 && !is_gimple_reg (id->retvar)
4988 && !stmt_ends_bb_p (stmt))
4989 {
4990 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4991 gimple *clobber_stmt;
4992 TREE_THIS_VOLATILE (clobber) = 1;
4993 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4994 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4995 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4996 }
4997 }
4998 else
4999 {
5000 /* Handle the case of inlining a function with no return
5001 statement, which causes the return value to become undefined. */
5002 if (gimple_call_lhs (stmt)
5003 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5004 {
5005 tree name = gimple_call_lhs (stmt);
5006 tree var = SSA_NAME_VAR (name);
5007 tree def = var ? ssa_default_def (cfun, var) : NULL;
5008
5009 if (def)
5010 {
5011 /* If the variable is used undefined, make this name
5012 undefined via a move. */
5013 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5014 gsi_replace (&stmt_gsi, stmt, true);
5015 }
5016 else
5017 {
5018 if (!var)
5019 {
5020 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5021 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5022 }
5023 /* Otherwise make this variable undefined. */
5024 gsi_remove (&stmt_gsi, true);
5025 set_ssa_default_def (cfun, var, name);
5026 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5027 }
5028 }
5029 /* Replace with a clobber for id->retvar. */
5030 else if (flag_stack_reuse != SR_NONE
5031 && id->retvar
5032 && VAR_P (id->retvar)
5033 && id->retvar != return_slot
5034 && id->retvar != modify_dest
5035 && !TREE_THIS_VOLATILE (id->retvar)
5036 && !is_gimple_reg (id->retvar))
5037 {
5038 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
5039 gimple *clobber_stmt;
5040 TREE_THIS_VOLATILE (clobber) = 1;
5041 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5042 gimple_set_location (clobber_stmt, gimple_location (stmt));
5043 gsi_replace (&stmt_gsi, clobber_stmt, false);
5044 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5045 }
5046 else
5047 gsi_remove (&stmt_gsi, true);
5048 }
5049
5050 if (purge_dead_abnormal_edges)
5051 {
5052 gimple_purge_dead_eh_edges (return_block);
5053 gimple_purge_dead_abnormal_call_edges (return_block);
5054 }
5055
5056 /* If the value of the new expression is ignored, that's OK. We
5057 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5058 the equivalent inlined version either. */
5059 if (is_gimple_assign (stmt))
5060 {
5061 gcc_assert (gimple_assign_single_p (stmt)
5062 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5063 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5064 }
5065
5066 id->add_clobbers_to_eh_landing_pads = 0;
5067
5068 /* Output the inlining info for this abstract function, since it has been
5069 inlined. If we don't do this now, we can lose the information about the
5070 variables in the function when the blocks get blown away as soon as we
5071 remove the cgraph node. */
5072 if (gimple_block (stmt))
5073 (*debug_hooks->outlining_inline_function) (fn);
5074
5075 /* Update callgraph if needed. */
5076 cg_edge->callee->remove ();
5077
5078 id->block = NULL_TREE;
5079 id->retvar = NULL_TREE;
5080 successfully_inlined = true;
5081
5082 egress:
5083 input_location = saved_location;
5084 return successfully_inlined;
5085 }
5086
5087 /* Expand call statements reachable from STMT_P.
5088 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5089 in a MODIFY_EXPR. */
5090
5091 static bool
5092 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
5093 {
5094 gimple_stmt_iterator gsi;
5095 bool inlined = false;
5096
5097 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5098 {
5099 gimple *stmt = gsi_stmt (gsi);
5100 gsi_prev (&gsi);
5101
5102 if (is_gimple_call (stmt)
5103 && !gimple_call_internal_p (stmt))
5104 inlined |= expand_call_inline (bb, stmt, id);
5105 }
5106
5107 return inlined;
5108 }
5109
5110
5111 /* Walk all basic blocks created after FIRST and try to fold every statement
5112 in the STATEMENTS pointer set. */
5113
5114 static void
5115 fold_marked_statements (int first, hash_set<gimple *> *statements)
5116 {
5117 for (; first < last_basic_block_for_fn (cfun); first++)
5118 if (BASIC_BLOCK_FOR_FN (cfun, first))
5119 {
5120 gimple_stmt_iterator gsi;
5121
5122 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5123 !gsi_end_p (gsi);
5124 gsi_next (&gsi))
5125 if (statements->contains (gsi_stmt (gsi)))
5126 {
5127 gimple *old_stmt = gsi_stmt (gsi);
5128 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
5129
5130 if (old_decl && fndecl_built_in_p (old_decl))
5131 {
5132 /* Folding builtins can create multiple instructions,
5133 we need to look at all of them. */
5134 gimple_stmt_iterator i2 = gsi;
5135 gsi_prev (&i2);
5136 if (fold_stmt (&gsi))
5137 {
5138 gimple *new_stmt;
5139 /* If a builtin at the end of a bb folded into nothing,
5140 the following loop won't work. */
5141 if (gsi_end_p (gsi))
5142 {
5143 cgraph_update_edges_for_call_stmt (old_stmt,
5144 old_decl, NULL);
5145 break;
5146 }
5147 if (gsi_end_p (i2))
5148 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5149 else
5150 gsi_next (&i2);
5151 while (1)
5152 {
5153 new_stmt = gsi_stmt (i2);
5154 update_stmt (new_stmt);
5155 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5156 new_stmt);
5157
5158 if (new_stmt == gsi_stmt (gsi))
5159 {
5160 /* It is okay to check only for the very last
5161 of these statements. If it is a throwing
5162 statement nothing will change. If it isn't
5163 this can remove EH edges. If that weren't
5164 correct then because some intermediate stmts
5165 throw, but not the last one. That would mean
5166 we'd have to split the block, which we can't
5167 here and we'd loose anyway. And as builtins
5168 probably never throw, this all
5169 is mood anyway. */
5170 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5171 new_stmt))
5172 gimple_purge_dead_eh_edges (
5173 BASIC_BLOCK_FOR_FN (cfun, first));
5174 break;
5175 }
5176 gsi_next (&i2);
5177 }
5178 }
5179 }
5180 else if (fold_stmt (&gsi))
5181 {
5182 /* Re-read the statement from GSI as fold_stmt() may
5183 have changed it. */
5184 gimple *new_stmt = gsi_stmt (gsi);
5185 update_stmt (new_stmt);
5186
5187 if (is_gimple_call (old_stmt)
5188 || is_gimple_call (new_stmt))
5189 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5190 new_stmt);
5191
5192 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5193 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
5194 first));
5195 }
5196 }
5197 }
5198 }
5199
5200 /* Expand calls to inline functions in the body of FN. */
5201
5202 unsigned int
5203 optimize_inline_calls (tree fn)
5204 {
5205 copy_body_data id;
5206 basic_block bb;
5207 int last = n_basic_blocks_for_fn (cfun);
5208 bool inlined_p = false;
5209
5210 /* Clear out ID. */
5211 memset (&id, 0, sizeof (id));
5212
5213 id.src_node = id.dst_node = cgraph_node::get (fn);
5214 gcc_assert (id.dst_node->definition);
5215 id.dst_fn = fn;
5216 /* Or any functions that aren't finished yet. */
5217 if (current_function_decl)
5218 id.dst_fn = current_function_decl;
5219
5220 id.copy_decl = copy_decl_maybe_to_var;
5221 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5222 id.transform_new_cfg = false;
5223 id.transform_return_to_modify = true;
5224 id.transform_parameter = true;
5225 id.transform_lang_insert_block = NULL;
5226 id.statements_to_fold = new hash_set<gimple *>;
5227
5228 push_gimplify_context ();
5229
5230 /* We make no attempts to keep dominance info up-to-date. */
5231 free_dominance_info (CDI_DOMINATORS);
5232 free_dominance_info (CDI_POST_DOMINATORS);
5233
5234 /* Register specific gimple functions. */
5235 gimple_register_cfg_hooks ();
5236
5237 /* Reach the trees by walking over the CFG, and note the
5238 enclosing basic-blocks in the call edges. */
5239 /* We walk the blocks going forward, because inlined function bodies
5240 will split id->current_basic_block, and the new blocks will
5241 follow it; we'll trudge through them, processing their CALL_EXPRs
5242 along the way. */
5243 FOR_EACH_BB_FN (bb, cfun)
5244 inlined_p |= gimple_expand_calls_inline (bb, &id);
5245
5246 pop_gimplify_context (NULL);
5247
5248 if (flag_checking)
5249 {
5250 struct cgraph_edge *e;
5251
5252 id.dst_node->verify ();
5253
5254 /* Double check that we inlined everything we are supposed to inline. */
5255 for (e = id.dst_node->callees; e; e = e->next_callee)
5256 gcc_assert (e->inline_failed);
5257 }
5258
5259 /* Fold queued statements. */
5260 update_max_bb_count ();
5261 fold_marked_statements (last, id.statements_to_fold);
5262 delete id.statements_to_fold;
5263
5264 gcc_assert (!id.debug_stmts.exists ());
5265
5266 /* If we didn't inline into the function there is nothing to do. */
5267 if (!inlined_p)
5268 return 0;
5269
5270 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5271 number_blocks (fn);
5272
5273 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5274
5275 if (flag_checking)
5276 id.dst_node->verify ();
5277
5278 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5279 not possible yet - the IPA passes might make various functions to not
5280 throw and they don't care to proactively update local EH info. This is
5281 done later in fixup_cfg pass that also execute the verification. */
5282 return (TODO_update_ssa
5283 | TODO_cleanup_cfg
5284 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5285 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5286 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5287 ? TODO_rebuild_frequencies : 0));
5288 }
5289
5290 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5291
5292 tree
5293 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5294 {
5295 enum tree_code code = TREE_CODE (*tp);
5296 enum tree_code_class cl = TREE_CODE_CLASS (code);
5297
5298 /* We make copies of most nodes. */
5299 if (IS_EXPR_CODE_CLASS (cl)
5300 || code == TREE_LIST
5301 || code == TREE_VEC
5302 || code == TYPE_DECL
5303 || code == OMP_CLAUSE)
5304 {
5305 /* Because the chain gets clobbered when we make a copy, we save it
5306 here. */
5307 tree chain = NULL_TREE, new_tree;
5308
5309 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5310 chain = TREE_CHAIN (*tp);
5311
5312 /* Copy the node. */
5313 new_tree = copy_node (*tp);
5314
5315 *tp = new_tree;
5316
5317 /* Now, restore the chain, if appropriate. That will cause
5318 walk_tree to walk into the chain as well. */
5319 if (code == PARM_DECL
5320 || code == TREE_LIST
5321 || code == OMP_CLAUSE)
5322 TREE_CHAIN (*tp) = chain;
5323
5324 /* For now, we don't update BLOCKs when we make copies. So, we
5325 have to nullify all BIND_EXPRs. */
5326 if (TREE_CODE (*tp) == BIND_EXPR)
5327 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5328 }
5329 else if (code == CONSTRUCTOR)
5330 {
5331 /* CONSTRUCTOR nodes need special handling because
5332 we need to duplicate the vector of elements. */
5333 tree new_tree;
5334
5335 new_tree = copy_node (*tp);
5336 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5337 *tp = new_tree;
5338 }
5339 else if (code == STATEMENT_LIST)
5340 /* We used to just abort on STATEMENT_LIST, but we can run into them
5341 with statement-expressions (c++/40975). */
5342 copy_statement_list (tp);
5343 else if (TREE_CODE_CLASS (code) == tcc_type)
5344 *walk_subtrees = 0;
5345 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5346 *walk_subtrees = 0;
5347 else if (TREE_CODE_CLASS (code) == tcc_constant)
5348 *walk_subtrees = 0;
5349 return NULL_TREE;
5350 }
5351
5352 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5353 information indicating to what new SAVE_EXPR this one should be mapped,
5354 use that one. Otherwise, create a new node and enter it in ST. FN is
5355 the function into which the copy will be placed. */
5356
5357 static void
5358 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5359 {
5360 tree *n;
5361 tree t;
5362
5363 /* See if we already encountered this SAVE_EXPR. */
5364 n = st->get (*tp);
5365
5366 /* If we didn't already remap this SAVE_EXPR, do so now. */
5367 if (!n)
5368 {
5369 t = copy_node (*tp);
5370
5371 /* Remember this SAVE_EXPR. */
5372 st->put (*tp, t);
5373 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5374 st->put (t, t);
5375 }
5376 else
5377 {
5378 /* We've already walked into this SAVE_EXPR; don't do it again. */
5379 *walk_subtrees = 0;
5380 t = *n;
5381 }
5382
5383 /* Replace this SAVE_EXPR with the copy. */
5384 *tp = t;
5385 }
5386
5387 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5388 label, copies the declaration and enters it in the splay_tree in DATA (which
5389 is really a 'copy_body_data *'. */
5390
5391 static tree
5392 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5393 bool *handled_ops_p ATTRIBUTE_UNUSED,
5394 struct walk_stmt_info *wi)
5395 {
5396 copy_body_data *id = (copy_body_data *) wi->info;
5397 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5398
5399 if (stmt)
5400 {
5401 tree decl = gimple_label_label (stmt);
5402
5403 /* Copy the decl and remember the copy. */
5404 insert_decl_map (id, decl, id->copy_decl (decl, id));
5405 }
5406
5407 return NULL_TREE;
5408 }
5409
5410 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5411 struct walk_stmt_info *wi);
5412
5413 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5414 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5415 remaps all local declarations to appropriate replacements in gimple
5416 operands. */
5417
5418 static tree
5419 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5420 {
5421 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5422 copy_body_data *id = (copy_body_data *) wi->info;
5423 hash_map<tree, tree> *st = id->decl_map;
5424 tree *n;
5425 tree expr = *tp;
5426
5427 /* For recursive invocations this is no longer the LHS itself. */
5428 bool is_lhs = wi->is_lhs;
5429 wi->is_lhs = false;
5430
5431 if (TREE_CODE (expr) == SSA_NAME)
5432 {
5433 *tp = remap_ssa_name (*tp, id);
5434 *walk_subtrees = 0;
5435 if (is_lhs)
5436 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5437 }
5438 /* Only a local declaration (variable or label). */
5439 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5440 || TREE_CODE (expr) == LABEL_DECL)
5441 {
5442 /* Lookup the declaration. */
5443 n = st->get (expr);
5444
5445 /* If it's there, remap it. */
5446 if (n)
5447 *tp = *n;
5448 *walk_subtrees = 0;
5449 }
5450 else if (TREE_CODE (expr) == STATEMENT_LIST
5451 || TREE_CODE (expr) == BIND_EXPR
5452 || TREE_CODE (expr) == SAVE_EXPR)
5453 gcc_unreachable ();
5454 else if (TREE_CODE (expr) == TARGET_EXPR)
5455 {
5456 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5457 It's OK for this to happen if it was part of a subtree that
5458 isn't immediately expanded, such as operand 2 of another
5459 TARGET_EXPR. */
5460 if (!TREE_OPERAND (expr, 1))
5461 {
5462 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5463 TREE_OPERAND (expr, 3) = NULL_TREE;
5464 }
5465 }
5466 else if (TREE_CODE (expr) == OMP_CLAUSE)
5467 {
5468 /* Before the omplower pass completes, some OMP clauses can contain
5469 sequences that are neither copied by gimple_seq_copy nor walked by
5470 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5471 in those situations, we have to copy and process them explicitely. */
5472
5473 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5474 {
5475 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5476 seq = duplicate_remap_omp_clause_seq (seq, wi);
5477 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5478 }
5479 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5480 {
5481 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5482 seq = duplicate_remap_omp_clause_seq (seq, wi);
5483 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5484 }
5485 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5486 {
5487 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5488 seq = duplicate_remap_omp_clause_seq (seq, wi);
5489 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5490 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5491 seq = duplicate_remap_omp_clause_seq (seq, wi);
5492 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5493 }
5494 }
5495
5496 /* Keep iterating. */
5497 return NULL_TREE;
5498 }
5499
5500
5501 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5502 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5503 remaps all local declarations to appropriate replacements in gimple
5504 statements. */
5505
5506 static tree
5507 replace_locals_stmt (gimple_stmt_iterator *gsip,
5508 bool *handled_ops_p ATTRIBUTE_UNUSED,
5509 struct walk_stmt_info *wi)
5510 {
5511 copy_body_data *id = (copy_body_data *) wi->info;
5512 gimple *gs = gsi_stmt (*gsip);
5513
5514 if (gbind *stmt = dyn_cast <gbind *> (gs))
5515 {
5516 tree block = gimple_bind_block (stmt);
5517
5518 if (block)
5519 {
5520 remap_block (&block, id);
5521 gimple_bind_set_block (stmt, block);
5522 }
5523
5524 /* This will remap a lot of the same decls again, but this should be
5525 harmless. */
5526 if (gimple_bind_vars (stmt))
5527 {
5528 tree old_var, decls = gimple_bind_vars (stmt);
5529
5530 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5531 if (!can_be_nonlocal (old_var, id)
5532 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5533 remap_decl (old_var, id);
5534
5535 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5536 id->prevent_decl_creation_for_types = true;
5537 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5538 id->prevent_decl_creation_for_types = false;
5539 }
5540 }
5541
5542 /* Keep iterating. */
5543 return NULL_TREE;
5544 }
5545
5546 /* Create a copy of SEQ and remap all decls in it. */
5547
5548 static gimple_seq
5549 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5550 {
5551 if (!seq)
5552 return NULL;
5553
5554 /* If there are any labels in OMP sequences, they can be only referred to in
5555 the sequence itself and therefore we can do both here. */
5556 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5557 gimple_seq copy = gimple_seq_copy (seq);
5558 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5559 return copy;
5560 }
5561
5562 /* Copies everything in SEQ and replaces variables and labels local to
5563 current_function_decl. */
5564
5565 gimple_seq
5566 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5567 {
5568 copy_body_data id;
5569 struct walk_stmt_info wi;
5570 gimple_seq copy;
5571
5572 /* There's nothing to do for NULL_TREE. */
5573 if (seq == NULL)
5574 return seq;
5575
5576 /* Set up ID. */
5577 memset (&id, 0, sizeof (id));
5578 id.src_fn = current_function_decl;
5579 id.dst_fn = current_function_decl;
5580 id.src_cfun = cfun;
5581 id.decl_map = new hash_map<tree, tree>;
5582 id.debug_map = NULL;
5583
5584 id.copy_decl = copy_decl_no_change;
5585 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5586 id.transform_new_cfg = false;
5587 id.transform_return_to_modify = false;
5588 id.transform_parameter = false;
5589 id.transform_lang_insert_block = NULL;
5590
5591 /* Walk the tree once to find local labels. */
5592 memset (&wi, 0, sizeof (wi));
5593 hash_set<tree> visited;
5594 wi.info = &id;
5595 wi.pset = &visited;
5596 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5597
5598 copy = gimple_seq_copy (seq);
5599
5600 /* Walk the copy, remapping decls. */
5601 memset (&wi, 0, sizeof (wi));
5602 wi.info = &id;
5603 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5604
5605 /* Clean up. */
5606 delete id.decl_map;
5607 if (id.debug_map)
5608 delete id.debug_map;
5609 if (id.dependence_map)
5610 {
5611 delete id.dependence_map;
5612 id.dependence_map = NULL;
5613 }
5614
5615 return copy;
5616 }
5617
5618
5619 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5620
5621 static tree
5622 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5623 {
5624 if (*tp == data)
5625 return (tree) data;
5626 else
5627 return NULL;
5628 }
5629
5630 DEBUG_FUNCTION bool
5631 debug_find_tree (tree top, tree search)
5632 {
5633 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5634 }
5635
5636
5637 /* Declare the variables created by the inliner. Add all the variables in
5638 VARS to BIND_EXPR. */
5639
5640 static void
5641 declare_inline_vars (tree block, tree vars)
5642 {
5643 tree t;
5644 for (t = vars; t; t = DECL_CHAIN (t))
5645 {
5646 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5647 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5648 add_local_decl (cfun, t);
5649 }
5650
5651 if (block)
5652 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5653 }
5654
5655 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5656 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5657 VAR_DECL translation. */
5658
5659 tree
5660 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5661 {
5662 /* Don't generate debug information for the copy if we wouldn't have
5663 generated it for the copy either. */
5664 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5665 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5666
5667 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5668 declaration inspired this copy. */
5669 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5670
5671 /* The new variable/label has no RTL, yet. */
5672 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5673 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5674 SET_DECL_RTL (copy, 0);
5675 /* For vector typed decls make sure to update DECL_MODE according
5676 to the new function context. */
5677 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5678 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5679
5680 /* These args would always appear unused, if not for this. */
5681 TREE_USED (copy) = 1;
5682
5683 /* Set the context for the new declaration. */
5684 if (!DECL_CONTEXT (decl))
5685 /* Globals stay global. */
5686 ;
5687 else if (DECL_CONTEXT (decl) != id->src_fn)
5688 /* Things that weren't in the scope of the function we're inlining
5689 from aren't in the scope we're inlining to, either. */
5690 ;
5691 else if (TREE_STATIC (decl))
5692 /* Function-scoped static variables should stay in the original
5693 function. */
5694 ;
5695 else
5696 {
5697 /* Ordinary automatic local variables are now in the scope of the
5698 new function. */
5699 DECL_CONTEXT (copy) = id->dst_fn;
5700 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5701 {
5702 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5703 DECL_ATTRIBUTES (copy)
5704 = tree_cons (get_identifier ("omp simt private"), NULL,
5705 DECL_ATTRIBUTES (copy));
5706 id->dst_simt_vars->safe_push (copy);
5707 }
5708 }
5709
5710 return copy;
5711 }
5712
5713 static tree
5714 copy_decl_to_var (tree decl, copy_body_data *id)
5715 {
5716 tree copy, type;
5717
5718 gcc_assert (TREE_CODE (decl) == PARM_DECL
5719 || TREE_CODE (decl) == RESULT_DECL);
5720
5721 type = TREE_TYPE (decl);
5722
5723 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5724 VAR_DECL, DECL_NAME (decl), type);
5725 if (DECL_PT_UID_SET_P (decl))
5726 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5727 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5728 TREE_READONLY (copy) = TREE_READONLY (decl);
5729 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5730 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5731
5732 return copy_decl_for_dup_finish (id, decl, copy);
5733 }
5734
5735 /* Like copy_decl_to_var, but create a return slot object instead of a
5736 pointer variable for return by invisible reference. */
5737
5738 static tree
5739 copy_result_decl_to_var (tree decl, copy_body_data *id)
5740 {
5741 tree copy, type;
5742
5743 gcc_assert (TREE_CODE (decl) == PARM_DECL
5744 || TREE_CODE (decl) == RESULT_DECL);
5745
5746 type = TREE_TYPE (decl);
5747 if (DECL_BY_REFERENCE (decl))
5748 type = TREE_TYPE (type);
5749
5750 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5751 VAR_DECL, DECL_NAME (decl), type);
5752 if (DECL_PT_UID_SET_P (decl))
5753 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5754 TREE_READONLY (copy) = TREE_READONLY (decl);
5755 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5756 if (!DECL_BY_REFERENCE (decl))
5757 {
5758 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5759 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5760 }
5761
5762 return copy_decl_for_dup_finish (id, decl, copy);
5763 }
5764
5765 tree
5766 copy_decl_no_change (tree decl, copy_body_data *id)
5767 {
5768 tree copy;
5769
5770 copy = copy_node (decl);
5771
5772 /* The COPY is not abstract; it will be generated in DST_FN. */
5773 DECL_ABSTRACT_P (copy) = false;
5774 lang_hooks.dup_lang_specific_decl (copy);
5775
5776 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5777 been taken; it's for internal bookkeeping in expand_goto_internal. */
5778 if (TREE_CODE (copy) == LABEL_DECL)
5779 {
5780 TREE_ADDRESSABLE (copy) = 0;
5781 LABEL_DECL_UID (copy) = -1;
5782 }
5783
5784 return copy_decl_for_dup_finish (id, decl, copy);
5785 }
5786
5787 static tree
5788 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5789 {
5790 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5791 return copy_decl_to_var (decl, id);
5792 else
5793 return copy_decl_no_change (decl, id);
5794 }
5795
5796 /* Return a copy of the function's argument tree. */
5797 static tree
5798 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5799 bitmap args_to_skip, tree *vars)
5800 {
5801 tree arg, *parg;
5802 tree new_parm = NULL;
5803 int i = 0;
5804
5805 parg = &new_parm;
5806
5807 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5808 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5809 {
5810 tree new_tree = remap_decl (arg, id);
5811 if (TREE_CODE (new_tree) != PARM_DECL)
5812 new_tree = id->copy_decl (arg, id);
5813 lang_hooks.dup_lang_specific_decl (new_tree);
5814 *parg = new_tree;
5815 parg = &DECL_CHAIN (new_tree);
5816 }
5817 else if (!id->decl_map->get (arg))
5818 {
5819 /* Make an equivalent VAR_DECL. If the argument was used
5820 as temporary variable later in function, the uses will be
5821 replaced by local variable. */
5822 tree var = copy_decl_to_var (arg, id);
5823 insert_decl_map (id, arg, var);
5824 /* Declare this new variable. */
5825 DECL_CHAIN (var) = *vars;
5826 *vars = var;
5827 }
5828 return new_parm;
5829 }
5830
5831 /* Return a copy of the function's static chain. */
5832 static tree
5833 copy_static_chain (tree static_chain, copy_body_data * id)
5834 {
5835 tree *chain_copy, *pvar;
5836
5837 chain_copy = &static_chain;
5838 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5839 {
5840 tree new_tree = remap_decl (*pvar, id);
5841 lang_hooks.dup_lang_specific_decl (new_tree);
5842 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5843 *pvar = new_tree;
5844 }
5845 return static_chain;
5846 }
5847
5848 /* Return true if the function is allowed to be versioned.
5849 This is a guard for the versioning functionality. */
5850
5851 bool
5852 tree_versionable_function_p (tree fndecl)
5853 {
5854 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5855 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5856 }
5857
5858 /* Update clone info after duplication. */
5859
5860 static void
5861 update_clone_info (copy_body_data * id)
5862 {
5863 struct cgraph_node *node;
5864 if (!id->dst_node->clones)
5865 return;
5866 for (node = id->dst_node->clones; node != id->dst_node;)
5867 {
5868 /* First update replace maps to match the new body. */
5869 if (node->clone.tree_map)
5870 {
5871 unsigned int i;
5872 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5873 {
5874 struct ipa_replace_map *replace_info;
5875 replace_info = (*node->clone.tree_map)[i];
5876 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5877 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5878 }
5879 }
5880 if (node->clones)
5881 node = node->clones;
5882 else if (node->next_sibling_clone)
5883 node = node->next_sibling_clone;
5884 else
5885 {
5886 while (node != id->dst_node && !node->next_sibling_clone)
5887 node = node->clone_of;
5888 if (node != id->dst_node)
5889 node = node->next_sibling_clone;
5890 }
5891 }
5892 }
5893
5894 /* Create a copy of a function's tree.
5895 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5896 of the original function and the new copied function
5897 respectively. In case we want to replace a DECL
5898 tree with another tree while duplicating the function's
5899 body, TREE_MAP represents the mapping between these
5900 trees. If UPDATE_CLONES is set, the call_stmt fields
5901 of edges of clones of the function will be updated.
5902
5903 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5904 from new version.
5905 If SKIP_RETURN is true, the new version will return void.
5906 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5907 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5908 */
5909 void
5910 tree_function_versioning (tree old_decl, tree new_decl,
5911 vec<ipa_replace_map *, va_gc> *tree_map,
5912 bool update_clones, bitmap args_to_skip,
5913 bool skip_return, bitmap blocks_to_copy,
5914 basic_block new_entry)
5915 {
5916 struct cgraph_node *old_version_node;
5917 struct cgraph_node *new_version_node;
5918 copy_body_data id;
5919 tree p;
5920 unsigned i;
5921 struct ipa_replace_map *replace_info;
5922 basic_block old_entry_block, bb;
5923 auto_vec<gimple *, 10> init_stmts;
5924 tree vars = NULL_TREE;
5925 bitmap debug_args_to_skip = args_to_skip;
5926
5927 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5928 && TREE_CODE (new_decl) == FUNCTION_DECL);
5929 DECL_POSSIBLY_INLINED (old_decl) = 1;
5930
5931 old_version_node = cgraph_node::get (old_decl);
5932 gcc_checking_assert (old_version_node);
5933 new_version_node = cgraph_node::get (new_decl);
5934 gcc_checking_assert (new_version_node);
5935
5936 /* Copy over debug args. */
5937 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5938 {
5939 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5940 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5941 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5942 old_debug_args = decl_debug_args_lookup (old_decl);
5943 if (old_debug_args)
5944 {
5945 new_debug_args = decl_debug_args_insert (new_decl);
5946 *new_debug_args = vec_safe_copy (*old_debug_args);
5947 }
5948 }
5949
5950 /* Output the inlining info for this abstract function, since it has been
5951 inlined. If we don't do this now, we can lose the information about the
5952 variables in the function when the blocks get blown away as soon as we
5953 remove the cgraph node. */
5954 (*debug_hooks->outlining_inline_function) (old_decl);
5955
5956 DECL_ARTIFICIAL (new_decl) = 1;
5957 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5958 if (DECL_ORIGIN (old_decl) == old_decl)
5959 old_version_node->used_as_abstract_origin = true;
5960 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5961
5962 /* Prepare the data structures for the tree copy. */
5963 memset (&id, 0, sizeof (id));
5964
5965 /* Generate a new name for the new version. */
5966 id.statements_to_fold = new hash_set<gimple *>;
5967
5968 id.decl_map = new hash_map<tree, tree>;
5969 id.debug_map = NULL;
5970 id.src_fn = old_decl;
5971 id.dst_fn = new_decl;
5972 id.src_node = old_version_node;
5973 id.dst_node = new_version_node;
5974 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5975 id.blocks_to_copy = blocks_to_copy;
5976
5977 id.copy_decl = copy_decl_no_change;
5978 id.transform_call_graph_edges
5979 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5980 id.transform_new_cfg = true;
5981 id.transform_return_to_modify = false;
5982 id.transform_parameter = false;
5983 id.transform_lang_insert_block = NULL;
5984
5985 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5986 (DECL_STRUCT_FUNCTION (old_decl));
5987 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5988 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5989 initialize_cfun (new_decl, old_decl,
5990 new_entry ? new_entry->count : old_entry_block->count);
5991 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5992 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5993 = id.src_cfun->gimple_df->ipa_pta;
5994
5995 /* Copy the function's static chain. */
5996 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5997 if (p)
5998 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5999 = copy_static_chain (p, &id);
6000
6001 /* If there's a tree_map, prepare for substitution. */
6002 if (tree_map)
6003 for (i = 0; i < tree_map->length (); i++)
6004 {
6005 gimple *init;
6006 replace_info = (*tree_map)[i];
6007 if (replace_info->replace_p)
6008 {
6009 int parm_num = -1;
6010 if (!replace_info->old_tree)
6011 {
6012 int p = replace_info->parm_num;
6013 tree parm;
6014 tree req_type, new_type;
6015
6016 for (parm = DECL_ARGUMENTS (old_decl); p;
6017 parm = DECL_CHAIN (parm))
6018 p--;
6019 replace_info->old_tree = parm;
6020 parm_num = replace_info->parm_num;
6021 req_type = TREE_TYPE (parm);
6022 new_type = TREE_TYPE (replace_info->new_tree);
6023 if (!useless_type_conversion_p (req_type, new_type))
6024 {
6025 if (fold_convertible_p (req_type, replace_info->new_tree))
6026 replace_info->new_tree
6027 = fold_build1 (NOP_EXPR, req_type,
6028 replace_info->new_tree);
6029 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
6030 replace_info->new_tree
6031 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
6032 replace_info->new_tree);
6033 else
6034 {
6035 if (dump_file)
6036 {
6037 fprintf (dump_file, " const ");
6038 print_generic_expr (dump_file,
6039 replace_info->new_tree);
6040 fprintf (dump_file,
6041 " can't be converted to param ");
6042 print_generic_expr (dump_file, parm);
6043 fprintf (dump_file, "\n");
6044 }
6045 replace_info->old_tree = NULL;
6046 }
6047 }
6048 }
6049 else
6050 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
6051 if (replace_info->old_tree)
6052 {
6053 init = setup_one_parameter (&id, replace_info->old_tree,
6054 replace_info->new_tree, id.src_fn,
6055 NULL,
6056 &vars);
6057 if (init)
6058 init_stmts.safe_push (init);
6059 if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
6060 {
6061 if (parm_num == -1)
6062 {
6063 tree parm;
6064 int p;
6065 for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
6066 parm = DECL_CHAIN (parm), p++)
6067 if (parm == replace_info->old_tree)
6068 {
6069 parm_num = p;
6070 break;
6071 }
6072 }
6073 if (parm_num != -1)
6074 {
6075 if (debug_args_to_skip == args_to_skip)
6076 {
6077 debug_args_to_skip = BITMAP_ALLOC (NULL);
6078 bitmap_copy (debug_args_to_skip, args_to_skip);
6079 }
6080 bitmap_clear_bit (debug_args_to_skip, parm_num);
6081 }
6082 }
6083 }
6084 }
6085 }
6086 /* Copy the function's arguments. */
6087 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6088 DECL_ARGUMENTS (new_decl)
6089 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
6090 args_to_skip, &vars);
6091
6092 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6093 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6094
6095 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6096
6097 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6098 /* Add local vars. */
6099 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6100
6101 if (DECL_RESULT (old_decl) == NULL_TREE)
6102 ;
6103 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6104 {
6105 DECL_RESULT (new_decl)
6106 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6107 RESULT_DECL, NULL_TREE, void_type_node);
6108 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6109 cfun->returns_struct = 0;
6110 cfun->returns_pcc_struct = 0;
6111 }
6112 else
6113 {
6114 tree old_name;
6115 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6116 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6117 if (gimple_in_ssa_p (id.src_cfun)
6118 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6119 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6120 {
6121 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6122 insert_decl_map (&id, old_name, new_name);
6123 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6124 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6125 }
6126 }
6127
6128 /* Set up the destination functions loop tree. */
6129 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6130 {
6131 cfun->curr_properties &= ~PROP_loops;
6132 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6133 cfun->curr_properties |= PROP_loops;
6134 }
6135
6136 /* Copy the Function's body. */
6137 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6138 new_entry);
6139
6140 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6141 number_blocks (new_decl);
6142
6143 /* We want to create the BB unconditionally, so that the addition of
6144 debug stmts doesn't affect BB count, which may in the end cause
6145 codegen differences. */
6146 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6147 while (init_stmts.length ())
6148 insert_init_stmt (&id, bb, init_stmts.pop ());
6149 update_clone_info (&id);
6150
6151 /* Remap the nonlocal_goto_save_area, if any. */
6152 if (cfun->nonlocal_goto_save_area)
6153 {
6154 struct walk_stmt_info wi;
6155
6156 memset (&wi, 0, sizeof (wi));
6157 wi.info = &id;
6158 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6159 }
6160
6161 /* Clean up. */
6162 delete id.decl_map;
6163 if (id.debug_map)
6164 delete id.debug_map;
6165 free_dominance_info (CDI_DOMINATORS);
6166 free_dominance_info (CDI_POST_DOMINATORS);
6167
6168 update_max_bb_count ();
6169 fold_marked_statements (0, id.statements_to_fold);
6170 delete id.statements_to_fold;
6171 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6172 if (id.dst_node->definition)
6173 cgraph_edge::rebuild_references ();
6174 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6175 {
6176 calculate_dominance_info (CDI_DOMINATORS);
6177 fix_loop_structure (NULL);
6178 }
6179 update_ssa (TODO_update_ssa);
6180
6181 /* After partial cloning we need to rescale frequencies, so they are
6182 within proper range in the cloned function. */
6183 if (new_entry)
6184 {
6185 struct cgraph_edge *e;
6186 rebuild_frequencies ();
6187
6188 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6189 for (e = new_version_node->callees; e; e = e->next_callee)
6190 {
6191 basic_block bb = gimple_bb (e->call_stmt);
6192 e->count = bb->count;
6193 }
6194 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6195 {
6196 basic_block bb = gimple_bb (e->call_stmt);
6197 e->count = bb->count;
6198 }
6199 }
6200
6201 if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6202 {
6203 tree parm;
6204 vec<tree, va_gc> **debug_args = NULL;
6205 unsigned int len = 0;
6206 for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6207 parm; parm = DECL_CHAIN (parm), i++)
6208 if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6209 {
6210 tree ddecl;
6211
6212 if (debug_args == NULL)
6213 {
6214 debug_args = decl_debug_args_insert (new_decl);
6215 len = vec_safe_length (*debug_args);
6216 }
6217 ddecl = make_node (DEBUG_EXPR_DECL);
6218 DECL_ARTIFICIAL (ddecl) = 1;
6219 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6220 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6221 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6222 vec_safe_push (*debug_args, ddecl);
6223 }
6224 if (debug_args != NULL)
6225 {
6226 /* On the callee side, add
6227 DEBUG D#Y s=> parm
6228 DEBUG var => D#Y
6229 stmts to the first bb where var is a VAR_DECL created for the
6230 optimized away parameter in DECL_INITIAL block. This hints
6231 in the debug info that var (whole DECL_ORIGIN is the parm
6232 PARM_DECL) is optimized away, but could be looked up at the
6233 call site as value of D#X there. */
6234 tree var = vars, vexpr;
6235 gimple_stmt_iterator cgsi
6236 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6237 gimple *def_temp;
6238 var = vars;
6239 i = vec_safe_length (*debug_args);
6240 do
6241 {
6242 i -= 2;
6243 while (var != NULL_TREE
6244 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6245 var = TREE_CHAIN (var);
6246 if (var == NULL_TREE)
6247 break;
6248 vexpr = make_node (DEBUG_EXPR_DECL);
6249 parm = (**debug_args)[i];
6250 DECL_ARTIFICIAL (vexpr) = 1;
6251 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6252 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6253 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6254 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6255 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6256 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6257 }
6258 while (i > len);
6259 }
6260 }
6261
6262 if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6263 BITMAP_FREE (debug_args_to_skip);
6264 free_dominance_info (CDI_DOMINATORS);
6265 free_dominance_info (CDI_POST_DOMINATORS);
6266
6267 gcc_assert (!id.debug_stmts.exists ());
6268 pop_cfun ();
6269 return;
6270 }
6271
6272 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6273 the callee and return the inlined body on success. */
6274
6275 tree
6276 maybe_inline_call_in_expr (tree exp)
6277 {
6278 tree fn = get_callee_fndecl (exp);
6279
6280 /* We can only try to inline "const" functions. */
6281 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6282 {
6283 call_expr_arg_iterator iter;
6284 copy_body_data id;
6285 tree param, arg, t;
6286 hash_map<tree, tree> decl_map;
6287
6288 /* Remap the parameters. */
6289 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6290 param;
6291 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6292 decl_map.put (param, arg);
6293
6294 memset (&id, 0, sizeof (id));
6295 id.src_fn = fn;
6296 id.dst_fn = current_function_decl;
6297 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6298 id.decl_map = &decl_map;
6299
6300 id.copy_decl = copy_decl_no_change;
6301 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6302 id.transform_new_cfg = false;
6303 id.transform_return_to_modify = true;
6304 id.transform_parameter = true;
6305 id.transform_lang_insert_block = NULL;
6306
6307 /* Make sure not to unshare trees behind the front-end's back
6308 since front-end specific mechanisms may rely on sharing. */
6309 id.regimplify = false;
6310 id.do_not_unshare = true;
6311
6312 /* We're not inside any EH region. */
6313 id.eh_lp_nr = 0;
6314
6315 t = copy_tree_body (&id);
6316
6317 /* We can only return something suitable for use in a GENERIC
6318 expression tree. */
6319 if (TREE_CODE (t) == MODIFY_EXPR)
6320 return TREE_OPERAND (t, 1);
6321 }
6322
6323 return NULL_TREE;
6324 }
6325
6326 /* Duplicate a type, fields and all. */
6327
6328 tree
6329 build_duplicate_type (tree type)
6330 {
6331 struct copy_body_data id;
6332
6333 memset (&id, 0, sizeof (id));
6334 id.src_fn = current_function_decl;
6335 id.dst_fn = current_function_decl;
6336 id.src_cfun = cfun;
6337 id.decl_map = new hash_map<tree, tree>;
6338 id.debug_map = NULL;
6339 id.copy_decl = copy_decl_no_change;
6340
6341 type = remap_type_1 (type, &id);
6342
6343 delete id.decl_map;
6344 if (id.debug_map)
6345 delete id.debug_map;
6346
6347 TYPE_CANONICAL (type) = type;
6348
6349 return type;
6350 }
6351
6352 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6353 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6354 evaluation. */
6355
6356 tree
6357 copy_fn (tree fn, tree& parms, tree& result)
6358 {
6359 copy_body_data id;
6360 tree param;
6361 hash_map<tree, tree> decl_map;
6362
6363 tree *p = &parms;
6364 *p = NULL_TREE;
6365
6366 memset (&id, 0, sizeof (id));
6367 id.src_fn = fn;
6368 id.dst_fn = current_function_decl;
6369 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6370 id.decl_map = &decl_map;
6371
6372 id.copy_decl = copy_decl_no_change;
6373 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6374 id.transform_new_cfg = false;
6375 id.transform_return_to_modify = false;
6376 id.transform_parameter = true;
6377 id.transform_lang_insert_block = NULL;
6378
6379 /* Make sure not to unshare trees behind the front-end's back
6380 since front-end specific mechanisms may rely on sharing. */
6381 id.regimplify = false;
6382 id.do_not_unshare = true;
6383 id.do_not_fold = true;
6384
6385 /* We're not inside any EH region. */
6386 id.eh_lp_nr = 0;
6387
6388 /* Remap the parameters and result and return them to the caller. */
6389 for (param = DECL_ARGUMENTS (fn);
6390 param;
6391 param = DECL_CHAIN (param))
6392 {
6393 *p = remap_decl (param, &id);
6394 p = &DECL_CHAIN (*p);
6395 }
6396
6397 if (DECL_RESULT (fn))
6398 result = remap_decl (DECL_RESULT (fn), &id);
6399 else
6400 result = NULL_TREE;
6401
6402 return copy_tree_body (&id);
6403 }