]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-inline.c
re PR c++/59813 (tail-call elimination didn't fire for left-shift of char to cout)
[thirdparty/gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "sreal.h"
63 #include "tree-cfgcleanup.h"
64 #include "tree-ssa-live.h"
65
66 /* I'm not real happy about this, but we need to handle gimple and
67 non-gimple trees. */
68
69 /* Inlining, Cloning, Versioning, Parallelization
70
71 Inlining: a function body is duplicated, but the PARM_DECLs are
72 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
73 MODIFY_EXPRs that store to a dedicated returned-value variable.
74 The duplicated eh_region info of the copy will later be appended
75 to the info for the caller; the eh_region info in copied throwing
76 statements and RESX statements are adjusted accordingly.
77
78 Cloning: (only in C++) We have one body for a con/de/structor, and
79 multiple function decls, each with a unique parameter list.
80 Duplicate the body, using the given splay tree; some parameters
81 will become constants (like 0 or 1).
82
83 Versioning: a function body is duplicated and the result is a new
84 function rather than into blocks of an existing function as with
85 inlining. Some parameters will become constants.
86
87 Parallelization: a region of a function is duplicated resulting in
88 a new function. Variables may be replaced with complex expressions
89 to enable shared variable semantics.
90
91 All of these will simultaneously lookup any callgraph edges. If
92 we're going to inline the duplicated function body, and the given
93 function has some cloned callgraph nodes (one for each place this
94 function will be inlined) those callgraph edges will be duplicated.
95 If we're cloning the body, those callgraph edges will be
96 updated to point into the new body. (Note that the original
97 callgraph node and edge list will not be altered.)
98
99 See the CALL_EXPR handling case in copy_tree_body_r (). */
100
101 /* To Do:
102
103 o In order to make inlining-on-trees work, we pessimized
104 function-local static constants. In particular, they are now
105 always output, even when not addressed. Fix this by treating
106 function-local static constants just like global static
107 constants; the back-end already knows not to output them if they
108 are not needed.
109
110 o Provide heuristics to clamp inlining of recursive template
111 calls? */
112
113
114 /* Weights that estimate_num_insns uses to estimate the size of the
115 produced code. */
116
117 eni_weights eni_size_weights;
118
119 /* Weights that estimate_num_insns uses to estimate the time necessary
120 to execute the produced code. */
121
122 eni_weights eni_time_weights;
123
124 /* Prototypes. */
125
126 static tree declare_return_variable (copy_body_data *, tree, tree,
127 basic_block);
128 static void remap_block (tree *, copy_body_data *);
129 static void copy_bind_expr (tree *, int *, copy_body_data *);
130 static void declare_inline_vars (tree, tree);
131 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
132 static void prepend_lexical_block (tree current_block, tree new_block);
133 static tree copy_decl_to_var (tree, copy_body_data *);
134 static tree copy_result_decl_to_var (tree, copy_body_data *);
135 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
136 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
137 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
138
139 /* Insert a tree->tree mapping for ID. Despite the name suggests
140 that the trees should be variables, it is used for more than that. */
141
142 void
143 insert_decl_map (copy_body_data *id, tree key, tree value)
144 {
145 id->decl_map->put (key, value);
146
147 /* Always insert an identity map as well. If we see this same new
148 node again, we won't want to duplicate it a second time. */
149 if (key != value)
150 id->decl_map->put (value, value);
151 }
152
153 /* Insert a tree->tree mapping for ID. This is only used for
154 variables. */
155
156 static void
157 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
158 {
159 if (!gimple_in_ssa_p (id->src_cfun))
160 return;
161
162 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
163 return;
164
165 if (!target_for_debug_bind (key))
166 return;
167
168 gcc_assert (TREE_CODE (key) == PARM_DECL);
169 gcc_assert (VAR_P (value));
170
171 if (!id->debug_map)
172 id->debug_map = new hash_map<tree, tree>;
173
174 id->debug_map->put (key, value);
175 }
176
177 /* If nonzero, we're remapping the contents of inlined debug
178 statements. If negative, an error has occurred, such as a
179 reference to a variable that isn't available in the inlined
180 context. */
181 static int processing_debug_stmt = 0;
182
183 /* Construct new SSA name for old NAME. ID is the inline context. */
184
185 static tree
186 remap_ssa_name (tree name, copy_body_data *id)
187 {
188 tree new_tree, var;
189 tree *n;
190
191 gcc_assert (TREE_CODE (name) == SSA_NAME);
192
193 n = id->decl_map->get (name);
194 if (n)
195 return unshare_expr (*n);
196
197 if (processing_debug_stmt)
198 {
199 if (SSA_NAME_IS_DEFAULT_DEF (name)
200 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
201 && id->entry_bb == NULL
202 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
203 {
204 tree vexpr = make_node (DEBUG_EXPR_DECL);
205 gimple *def_temp;
206 gimple_stmt_iterator gsi;
207 tree val = SSA_NAME_VAR (name);
208
209 n = id->decl_map->get (val);
210 if (n != NULL)
211 val = *n;
212 if (TREE_CODE (val) != PARM_DECL
213 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
214 {
215 processing_debug_stmt = -1;
216 return name;
217 }
218 n = id->decl_map->get (val);
219 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
220 return *n;
221 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
222 DECL_ARTIFICIAL (vexpr) = 1;
223 TREE_TYPE (vexpr) = TREE_TYPE (name);
224 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
225 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
226 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
227 insert_decl_map (id, val, vexpr);
228 return vexpr;
229 }
230
231 processing_debug_stmt = -1;
232 return name;
233 }
234
235 /* Remap anonymous SSA names or SSA names of anonymous decls. */
236 var = SSA_NAME_VAR (name);
237 if (!var
238 || (!SSA_NAME_IS_DEFAULT_DEF (name)
239 && VAR_P (var)
240 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
241 && DECL_ARTIFICIAL (var)
242 && DECL_IGNORED_P (var)
243 && !DECL_NAME (var)))
244 {
245 struct ptr_info_def *pi;
246 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
247 if (!var && SSA_NAME_IDENTIFIER (name))
248 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
249 insert_decl_map (id, name, new_tree);
250 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
251 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
252 /* At least IPA points-to info can be directly transferred. */
253 if (id->src_cfun->gimple_df
254 && id->src_cfun->gimple_df->ipa_pta
255 && POINTER_TYPE_P (TREE_TYPE (name))
256 && (pi = SSA_NAME_PTR_INFO (name))
257 && !pi->pt.anything)
258 {
259 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
260 new_pi->pt = pi->pt;
261 }
262 return new_tree;
263 }
264
265 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
266 in copy_bb. */
267 new_tree = remap_decl (var, id);
268
269 /* We might've substituted constant or another SSA_NAME for
270 the variable.
271
272 Replace the SSA name representing RESULT_DECL by variable during
273 inlining: this saves us from need to introduce PHI node in a case
274 return value is just partly initialized. */
275 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
276 && (!SSA_NAME_VAR (name)
277 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
278 || !id->transform_return_to_modify))
279 {
280 struct ptr_info_def *pi;
281 new_tree = make_ssa_name (new_tree);
282 insert_decl_map (id, name, new_tree);
283 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
284 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
285 /* At least IPA points-to info can be directly transferred. */
286 if (id->src_cfun->gimple_df
287 && id->src_cfun->gimple_df->ipa_pta
288 && POINTER_TYPE_P (TREE_TYPE (name))
289 && (pi = SSA_NAME_PTR_INFO (name))
290 && !pi->pt.anything)
291 {
292 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
293 new_pi->pt = pi->pt;
294 }
295 if (SSA_NAME_IS_DEFAULT_DEF (name))
296 {
297 /* By inlining function having uninitialized variable, we might
298 extend the lifetime (variable might get reused). This cause
299 ICE in the case we end up extending lifetime of SSA name across
300 abnormal edge, but also increase register pressure.
301
302 We simply initialize all uninitialized vars by 0 except
303 for case we are inlining to very first BB. We can avoid
304 this for all BBs that are not inside strongly connected
305 regions of the CFG, but this is expensive to test. */
306 if (id->entry_bb
307 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
308 && (!SSA_NAME_VAR (name)
309 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
310 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
311 0)->dest
312 || EDGE_COUNT (id->entry_bb->preds) != 1))
313 {
314 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
315 gimple *init_stmt;
316 tree zero = build_zero_cst (TREE_TYPE (new_tree));
317
318 init_stmt = gimple_build_assign (new_tree, zero);
319 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
320 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
321 }
322 else
323 {
324 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
325 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
326 }
327 }
328 }
329 else
330 insert_decl_map (id, name, new_tree);
331 return new_tree;
332 }
333
334 /* Remap DECL during the copying of the BLOCK tree for the function. */
335
336 tree
337 remap_decl (tree decl, copy_body_data *id)
338 {
339 tree *n;
340
341 /* We only remap local variables in the current function. */
342
343 /* See if we have remapped this declaration. */
344
345 n = id->decl_map->get (decl);
346
347 if (!n && processing_debug_stmt)
348 {
349 processing_debug_stmt = -1;
350 return decl;
351 }
352
353 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
354 necessary DECLs have already been remapped and we do not want to duplicate
355 a decl coming from outside of the sequence we are copying. */
356 if (!n
357 && id->prevent_decl_creation_for_types
358 && id->remapping_type_depth > 0
359 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
360 return decl;
361
362 /* If we didn't already have an equivalent for this declaration, create one
363 now. */
364 if (!n)
365 {
366 /* Make a copy of the variable or label. */
367 tree t = id->copy_decl (decl, id);
368
369 /* Remember it, so that if we encounter this local entity again
370 we can reuse this copy. Do this early because remap_type may
371 need this decl for TYPE_STUB_DECL. */
372 insert_decl_map (id, decl, t);
373
374 if (!DECL_P (t))
375 return t;
376
377 /* Remap types, if necessary. */
378 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
379 if (TREE_CODE (t) == TYPE_DECL)
380 {
381 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
382
383 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
384 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
385 is not set on the TYPE_DECL, for example in LTO mode. */
386 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
387 {
388 tree x = build_variant_type_copy (TREE_TYPE (t));
389 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
390 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
391 DECL_ORIGINAL_TYPE (t) = x;
392 }
393 }
394
395 /* Remap sizes as necessary. */
396 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
397 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
398
399 /* If fields, do likewise for offset and qualifier. */
400 if (TREE_CODE (t) == FIELD_DECL)
401 {
402 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
403 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
404 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
405 }
406
407 return t;
408 }
409
410 if (id->do_not_unshare)
411 return *n;
412 else
413 return unshare_expr (*n);
414 }
415
416 static tree
417 remap_type_1 (tree type, copy_body_data *id)
418 {
419 tree new_tree, t;
420
421 /* We do need a copy. build and register it now. If this is a pointer or
422 reference type, remap the designated type and make a new pointer or
423 reference type. */
424 if (TREE_CODE (type) == POINTER_TYPE)
425 {
426 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
427 TYPE_MODE (type),
428 TYPE_REF_CAN_ALIAS_ALL (type));
429 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
430 new_tree = build_type_attribute_qual_variant (new_tree,
431 TYPE_ATTRIBUTES (type),
432 TYPE_QUALS (type));
433 insert_decl_map (id, type, new_tree);
434 return new_tree;
435 }
436 else if (TREE_CODE (type) == REFERENCE_TYPE)
437 {
438 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
439 TYPE_MODE (type),
440 TYPE_REF_CAN_ALIAS_ALL (type));
441 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
442 new_tree = build_type_attribute_qual_variant (new_tree,
443 TYPE_ATTRIBUTES (type),
444 TYPE_QUALS (type));
445 insert_decl_map (id, type, new_tree);
446 return new_tree;
447 }
448 else
449 new_tree = copy_node (type);
450
451 insert_decl_map (id, type, new_tree);
452
453 /* This is a new type, not a copy of an old type. Need to reassociate
454 variants. We can handle everything except the main variant lazily. */
455 t = TYPE_MAIN_VARIANT (type);
456 if (type != t)
457 {
458 t = remap_type (t, id);
459 TYPE_MAIN_VARIANT (new_tree) = t;
460 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
461 TYPE_NEXT_VARIANT (t) = new_tree;
462 }
463 else
464 {
465 TYPE_MAIN_VARIANT (new_tree) = new_tree;
466 TYPE_NEXT_VARIANT (new_tree) = NULL;
467 }
468
469 if (TYPE_STUB_DECL (type))
470 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
471
472 /* Lazily create pointer and reference types. */
473 TYPE_POINTER_TO (new_tree) = NULL;
474 TYPE_REFERENCE_TO (new_tree) = NULL;
475
476 /* Copy all types that may contain references to local variables; be sure to
477 preserve sharing in between type and its main variant when possible. */
478 switch (TREE_CODE (new_tree))
479 {
480 case INTEGER_TYPE:
481 case REAL_TYPE:
482 case FIXED_POINT_TYPE:
483 case ENUMERAL_TYPE:
484 case BOOLEAN_TYPE:
485 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
486 {
487 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
488 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
489
490 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
491 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
492 }
493 else
494 {
495 t = TYPE_MIN_VALUE (new_tree);
496 if (t && TREE_CODE (t) != INTEGER_CST)
497 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
498
499 t = TYPE_MAX_VALUE (new_tree);
500 if (t && TREE_CODE (t) != INTEGER_CST)
501 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
502 }
503 return new_tree;
504
505 case FUNCTION_TYPE:
506 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
507 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
508 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
509 else
510 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
511 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
512 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
513 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
514 else
515 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
516 return new_tree;
517
518 case ARRAY_TYPE:
519 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
520 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
521 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
522 else
523 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
524
525 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
526 {
527 gcc_checking_assert (TYPE_DOMAIN (type)
528 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
529 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
530 }
531 else
532 {
533 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
534 /* For array bounds where we have decided not to copy over the bounds
535 variable which isn't used in OpenMP/OpenACC region, change them to
536 an uninitialized VAR_DECL temporary. */
537 if (TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
538 && id->adjust_array_error_bounds
539 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
540 {
541 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
542 DECL_ATTRIBUTES (v)
543 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
544 DECL_ATTRIBUTES (v));
545 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
546 }
547 }
548 break;
549
550 case RECORD_TYPE:
551 case UNION_TYPE:
552 case QUAL_UNION_TYPE:
553 if (TYPE_MAIN_VARIANT (type) != type
554 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
555 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
556 else
557 {
558 tree f, nf = NULL;
559
560 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
561 {
562 t = remap_decl (f, id);
563 DECL_CONTEXT (t) = new_tree;
564 DECL_CHAIN (t) = nf;
565 nf = t;
566 }
567 TYPE_FIELDS (new_tree) = nreverse (nf);
568 }
569 break;
570
571 case OFFSET_TYPE:
572 default:
573 /* Shouldn't have been thought variable sized. */
574 gcc_unreachable ();
575 }
576
577 /* All variants of type share the same size, so use the already remaped data. */
578 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
579 {
580 tree s = TYPE_SIZE (type);
581 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
582 tree su = TYPE_SIZE_UNIT (type);
583 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
584 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
585 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
586 || s == mvs);
587 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
588 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
589 || su == mvsu);
590 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
591 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
592 }
593 else
594 {
595 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
596 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
597 }
598
599 return new_tree;
600 }
601
602 /* Helper function for remap_type_2, called through walk_tree. */
603
604 static tree
605 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
606 {
607 copy_body_data *id = (copy_body_data *) data;
608
609 if (TYPE_P (*tp))
610 *walk_subtrees = 0;
611
612 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
613 return *tp;
614
615 return NULL_TREE;
616 }
617
618 /* Return true if TYPE needs to be remapped because remap_decl on any
619 needed embedded decl returns something other than that decl. */
620
621 static bool
622 remap_type_2 (tree type, copy_body_data *id)
623 {
624 tree t;
625
626 #define RETURN_TRUE_IF_VAR(T) \
627 do \
628 { \
629 tree _t = (T); \
630 if (_t) \
631 { \
632 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
633 return true; \
634 if (!TYPE_SIZES_GIMPLIFIED (type) \
635 && walk_tree (&_t, remap_type_3, id, NULL)) \
636 return true; \
637 } \
638 } \
639 while (0)
640
641 switch (TREE_CODE (type))
642 {
643 case POINTER_TYPE:
644 case REFERENCE_TYPE:
645 case FUNCTION_TYPE:
646 case METHOD_TYPE:
647 return remap_type_2 (TREE_TYPE (type), id);
648
649 case INTEGER_TYPE:
650 case REAL_TYPE:
651 case FIXED_POINT_TYPE:
652 case ENUMERAL_TYPE:
653 case BOOLEAN_TYPE:
654 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
655 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
656 return false;
657
658 case ARRAY_TYPE:
659 if (remap_type_2 (TREE_TYPE (type), id)
660 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
661 return true;
662 break;
663
664 case RECORD_TYPE:
665 case UNION_TYPE:
666 case QUAL_UNION_TYPE:
667 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
668 if (TREE_CODE (t) == FIELD_DECL)
669 {
670 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
671 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
672 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
673 if (TREE_CODE (type) == QUAL_UNION_TYPE)
674 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
675 }
676 break;
677
678 default:
679 return false;
680 }
681
682 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
683 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
684 return false;
685 #undef RETURN_TRUE_IF_VAR
686 }
687
688 tree
689 remap_type (tree type, copy_body_data *id)
690 {
691 tree *node;
692 tree tmp;
693
694 if (type == NULL)
695 return type;
696
697 /* See if we have remapped this type. */
698 node = id->decl_map->get (type);
699 if (node)
700 return *node;
701
702 /* The type only needs remapping if it's variably modified. */
703 if (! variably_modified_type_p (type, id->src_fn)
704 /* Don't remap if copy_decl method doesn't always return a new
705 decl and for all embedded decls returns the passed in decl. */
706 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
707 {
708 insert_decl_map (id, type, type);
709 return type;
710 }
711
712 id->remapping_type_depth++;
713 tmp = remap_type_1 (type, id);
714 id->remapping_type_depth--;
715
716 return tmp;
717 }
718
719 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
720
721 static bool
722 can_be_nonlocal (tree decl, copy_body_data *id)
723 {
724 /* We cannot duplicate function decls. */
725 if (TREE_CODE (decl) == FUNCTION_DECL)
726 return true;
727
728 /* Local static vars must be non-local or we get multiple declaration
729 problems. */
730 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
731 return true;
732
733 return false;
734 }
735
736 static tree
737 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
738 copy_body_data *id)
739 {
740 tree old_var;
741 tree new_decls = NULL_TREE;
742
743 /* Remap its variables. */
744 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
745 {
746 tree new_var;
747
748 if (can_be_nonlocal (old_var, id))
749 {
750 /* We need to add this variable to the local decls as otherwise
751 nothing else will do so. */
752 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
753 add_local_decl (cfun, old_var);
754 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
755 && !DECL_IGNORED_P (old_var)
756 && nonlocalized_list)
757 vec_safe_push (*nonlocalized_list, old_var);
758 continue;
759 }
760
761 /* Remap the variable. */
762 new_var = remap_decl (old_var, id);
763
764 /* If we didn't remap this variable, we can't mess with its
765 TREE_CHAIN. If we remapped this variable to the return slot, it's
766 already declared somewhere else, so don't declare it here. */
767
768 if (new_var == id->retvar)
769 ;
770 else if (!new_var)
771 {
772 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
773 && !DECL_IGNORED_P (old_var)
774 && nonlocalized_list)
775 vec_safe_push (*nonlocalized_list, old_var);
776 }
777 else
778 {
779 gcc_assert (DECL_P (new_var));
780 DECL_CHAIN (new_var) = new_decls;
781 new_decls = new_var;
782
783 /* Also copy value-expressions. */
784 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
785 {
786 tree tem = DECL_VALUE_EXPR (new_var);
787 bool old_regimplify = id->regimplify;
788 id->remapping_type_depth++;
789 walk_tree (&tem, copy_tree_body_r, id, NULL);
790 id->remapping_type_depth--;
791 id->regimplify = old_regimplify;
792 SET_DECL_VALUE_EXPR (new_var, tem);
793 }
794 }
795 }
796
797 return nreverse (new_decls);
798 }
799
800 /* Copy the BLOCK to contain remapped versions of the variables
801 therein. And hook the new block into the block-tree. */
802
803 static void
804 remap_block (tree *block, copy_body_data *id)
805 {
806 tree old_block;
807 tree new_block;
808
809 /* Make the new block. */
810 old_block = *block;
811 new_block = make_node (BLOCK);
812 TREE_USED (new_block) = TREE_USED (old_block);
813 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
814 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
815 BLOCK_NONLOCALIZED_VARS (new_block)
816 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
817 *block = new_block;
818
819 /* Remap its variables. */
820 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
821 &BLOCK_NONLOCALIZED_VARS (new_block),
822 id);
823
824 if (id->transform_lang_insert_block)
825 id->transform_lang_insert_block (new_block);
826
827 /* Remember the remapped block. */
828 insert_decl_map (id, old_block, new_block);
829 }
830
831 /* Copy the whole block tree and root it in id->block. */
832
833 static tree
834 remap_blocks (tree block, copy_body_data *id)
835 {
836 tree t;
837 tree new_tree = block;
838
839 if (!block)
840 return NULL;
841
842 remap_block (&new_tree, id);
843 gcc_assert (new_tree != block);
844 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
845 prepend_lexical_block (new_tree, remap_blocks (t, id));
846 /* Blocks are in arbitrary order, but make things slightly prettier and do
847 not swap order when producing a copy. */
848 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
849 return new_tree;
850 }
851
852 /* Remap the block tree rooted at BLOCK to nothing. */
853
854 static void
855 remap_blocks_to_null (tree block, copy_body_data *id)
856 {
857 tree t;
858 insert_decl_map (id, block, NULL_TREE);
859 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
860 remap_blocks_to_null (t, id);
861 }
862
863 /* Remap the location info pointed to by LOCUS. */
864
865 static location_t
866 remap_location (location_t locus, copy_body_data *id)
867 {
868 if (LOCATION_BLOCK (locus))
869 {
870 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
871 gcc_assert (n);
872 if (*n)
873 return set_block (locus, *n);
874 }
875
876 locus = LOCATION_LOCUS (locus);
877
878 if (locus != UNKNOWN_LOCATION && id->block)
879 return set_block (locus, id->block);
880
881 return locus;
882 }
883
884 static void
885 copy_statement_list (tree *tp)
886 {
887 tree_stmt_iterator oi, ni;
888 tree new_tree;
889
890 new_tree = alloc_stmt_list ();
891 ni = tsi_start (new_tree);
892 oi = tsi_start (*tp);
893 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
894 *tp = new_tree;
895
896 for (; !tsi_end_p (oi); tsi_next (&oi))
897 {
898 tree stmt = tsi_stmt (oi);
899 if (TREE_CODE (stmt) == STATEMENT_LIST)
900 /* This copy is not redundant; tsi_link_after will smash this
901 STATEMENT_LIST into the end of the one we're building, and we
902 don't want to do that with the original. */
903 copy_statement_list (&stmt);
904 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
905 }
906 }
907
908 static void
909 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
910 {
911 tree block = BIND_EXPR_BLOCK (*tp);
912 /* Copy (and replace) the statement. */
913 copy_tree_r (tp, walk_subtrees, NULL);
914 if (block)
915 {
916 remap_block (&block, id);
917 BIND_EXPR_BLOCK (*tp) = block;
918 }
919
920 if (BIND_EXPR_VARS (*tp))
921 /* This will remap a lot of the same decls again, but this should be
922 harmless. */
923 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
924 }
925
926
927 /* Create a new gimple_seq by remapping all the statements in BODY
928 using the inlining information in ID. */
929
930 static gimple_seq
931 remap_gimple_seq (gimple_seq body, copy_body_data *id)
932 {
933 gimple_stmt_iterator si;
934 gimple_seq new_body = NULL;
935
936 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
937 {
938 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
939 gimple_seq_add_seq (&new_body, new_stmts);
940 }
941
942 return new_body;
943 }
944
945
946 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
947 block using the mapping information in ID. */
948
949 static gimple *
950 copy_gimple_bind (gbind *stmt, copy_body_data *id)
951 {
952 gimple *new_bind;
953 tree new_block, new_vars;
954 gimple_seq body, new_body;
955
956 /* Copy the statement. Note that we purposely don't use copy_stmt
957 here because we need to remap statements as we copy. */
958 body = gimple_bind_body (stmt);
959 new_body = remap_gimple_seq (body, id);
960
961 new_block = gimple_bind_block (stmt);
962 if (new_block)
963 remap_block (&new_block, id);
964
965 /* This will remap a lot of the same decls again, but this should be
966 harmless. */
967 new_vars = gimple_bind_vars (stmt);
968 if (new_vars)
969 new_vars = remap_decls (new_vars, NULL, id);
970
971 new_bind = gimple_build_bind (new_vars, new_body, new_block);
972
973 return new_bind;
974 }
975
976 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
977
978 static bool
979 is_parm (tree decl)
980 {
981 if (TREE_CODE (decl) == SSA_NAME)
982 {
983 decl = SSA_NAME_VAR (decl);
984 if (!decl)
985 return false;
986 }
987
988 return (TREE_CODE (decl) == PARM_DECL);
989 }
990
991 /* Remap the dependence CLIQUE from the source to the destination function
992 as specified in ID. */
993
994 static unsigned short
995 remap_dependence_clique (copy_body_data *id, unsigned short clique)
996 {
997 if (clique == 0 || processing_debug_stmt)
998 return 0;
999 if (!id->dependence_map)
1000 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1001 bool existed;
1002 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1003 if (!existed)
1004 {
1005 /* Clique 1 is reserved for local ones set by PTA. */
1006 if (cfun->last_clique == 0)
1007 cfun->last_clique = 1;
1008 newc = ++cfun->last_clique;
1009 }
1010 return newc;
1011 }
1012
1013 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1014 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1015 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1016 recursing into the children nodes of *TP. */
1017
1018 static tree
1019 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1020 {
1021 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1022 copy_body_data *id = (copy_body_data *) wi_p->info;
1023 tree fn = id->src_fn;
1024
1025 /* For recursive invocations this is no longer the LHS itself. */
1026 bool is_lhs = wi_p->is_lhs;
1027 wi_p->is_lhs = false;
1028
1029 if (TREE_CODE (*tp) == SSA_NAME)
1030 {
1031 *tp = remap_ssa_name (*tp, id);
1032 *walk_subtrees = 0;
1033 if (is_lhs)
1034 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1035 return NULL;
1036 }
1037 else if (auto_var_in_fn_p (*tp, fn))
1038 {
1039 /* Local variables and labels need to be replaced by equivalent
1040 variables. We don't want to copy static variables; there's
1041 only one of those, no matter how many times we inline the
1042 containing function. Similarly for globals from an outer
1043 function. */
1044 tree new_decl;
1045
1046 /* Remap the declaration. */
1047 new_decl = remap_decl (*tp, id);
1048 gcc_assert (new_decl);
1049 /* Replace this variable with the copy. */
1050 STRIP_TYPE_NOPS (new_decl);
1051 /* ??? The C++ frontend uses void * pointer zero to initialize
1052 any other type. This confuses the middle-end type verification.
1053 As cloned bodies do not go through gimplification again the fixup
1054 there doesn't trigger. */
1055 if (TREE_CODE (new_decl) == INTEGER_CST
1056 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1057 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1058 *tp = new_decl;
1059 *walk_subtrees = 0;
1060 }
1061 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1062 gcc_unreachable ();
1063 else if (TREE_CODE (*tp) == SAVE_EXPR)
1064 gcc_unreachable ();
1065 else if (TREE_CODE (*tp) == LABEL_DECL
1066 && (!DECL_CONTEXT (*tp)
1067 || decl_function_context (*tp) == id->src_fn))
1068 /* These may need to be remapped for EH handling. */
1069 *tp = remap_decl (*tp, id);
1070 else if (TREE_CODE (*tp) == FIELD_DECL)
1071 {
1072 /* If the enclosing record type is variably_modified_type_p, the field
1073 has already been remapped. Otherwise, it need not be. */
1074 tree *n = id->decl_map->get (*tp);
1075 if (n)
1076 *tp = *n;
1077 *walk_subtrees = 0;
1078 }
1079 else if (TYPE_P (*tp))
1080 /* Types may need remapping as well. */
1081 *tp = remap_type (*tp, id);
1082 else if (CONSTANT_CLASS_P (*tp))
1083 {
1084 /* If this is a constant, we have to copy the node iff the type
1085 will be remapped. copy_tree_r will not copy a constant. */
1086 tree new_type = remap_type (TREE_TYPE (*tp), id);
1087
1088 if (new_type == TREE_TYPE (*tp))
1089 *walk_subtrees = 0;
1090
1091 else if (TREE_CODE (*tp) == INTEGER_CST)
1092 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1093 else
1094 {
1095 *tp = copy_node (*tp);
1096 TREE_TYPE (*tp) = new_type;
1097 }
1098 }
1099 else
1100 {
1101 /* Otherwise, just copy the node. Note that copy_tree_r already
1102 knows not to copy VAR_DECLs, etc., so this is safe. */
1103
1104 if (TREE_CODE (*tp) == MEM_REF)
1105 {
1106 /* We need to re-canonicalize MEM_REFs from inline substitutions
1107 that can happen when a pointer argument is an ADDR_EXPR.
1108 Recurse here manually to allow that. */
1109 tree ptr = TREE_OPERAND (*tp, 0);
1110 tree type = remap_type (TREE_TYPE (*tp), id);
1111 tree old = *tp;
1112 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1113 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1114 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1115 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1116 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1117 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1118 {
1119 MR_DEPENDENCE_CLIQUE (*tp)
1120 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1121 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1122 }
1123 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1124 remapped a parameter as the property might be valid only
1125 for the parameter itself. */
1126 if (TREE_THIS_NOTRAP (old)
1127 && (!is_parm (TREE_OPERAND (old, 0))
1128 || (!id->transform_parameter && is_parm (ptr))))
1129 TREE_THIS_NOTRAP (*tp) = 1;
1130 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1131 *walk_subtrees = 0;
1132 return NULL;
1133 }
1134
1135 /* Here is the "usual case". Copy this tree node, and then
1136 tweak some special cases. */
1137 copy_tree_r (tp, walk_subtrees, NULL);
1138
1139 if (TREE_CODE (*tp) != OMP_CLAUSE)
1140 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1141
1142 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1143 {
1144 /* The copied TARGET_EXPR has never been expanded, even if the
1145 original node was expanded already. */
1146 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1147 TREE_OPERAND (*tp, 3) = NULL_TREE;
1148 }
1149 else if (TREE_CODE (*tp) == ADDR_EXPR)
1150 {
1151 /* Variable substitution need not be simple. In particular,
1152 the MEM_REF substitution above. Make sure that
1153 TREE_CONSTANT and friends are up-to-date. */
1154 int invariant = is_gimple_min_invariant (*tp);
1155 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1156 recompute_tree_invariant_for_addr_expr (*tp);
1157
1158 /* If this used to be invariant, but is not any longer,
1159 then regimplification is probably needed. */
1160 if (invariant && !is_gimple_min_invariant (*tp))
1161 id->regimplify = true;
1162
1163 *walk_subtrees = 0;
1164 }
1165 }
1166
1167 /* Update the TREE_BLOCK for the cloned expr. */
1168 if (EXPR_P (*tp))
1169 {
1170 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1171 tree old_block = TREE_BLOCK (*tp);
1172 if (old_block)
1173 {
1174 tree *n;
1175 n = id->decl_map->get (TREE_BLOCK (*tp));
1176 if (n)
1177 new_block = *n;
1178 }
1179 TREE_SET_BLOCK (*tp, new_block);
1180 }
1181
1182 /* Keep iterating. */
1183 return NULL_TREE;
1184 }
1185
1186
1187 /* Called from copy_body_id via walk_tree. DATA is really a
1188 `copy_body_data *'. */
1189
1190 tree
1191 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1192 {
1193 copy_body_data *id = (copy_body_data *) data;
1194 tree fn = id->src_fn;
1195 tree new_block;
1196
1197 /* Begin by recognizing trees that we'll completely rewrite for the
1198 inlining context. Our output for these trees is completely
1199 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1200 into an edge). Further down, we'll handle trees that get
1201 duplicated and/or tweaked. */
1202
1203 /* When requested, RETURN_EXPRs should be transformed to just the
1204 contained MODIFY_EXPR. The branch semantics of the return will
1205 be handled elsewhere by manipulating the CFG rather than a statement. */
1206 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1207 {
1208 tree assignment = TREE_OPERAND (*tp, 0);
1209
1210 /* If we're returning something, just turn that into an
1211 assignment into the equivalent of the original RESULT_DECL.
1212 If the "assignment" is just the result decl, the result
1213 decl has already been set (e.g. a recent "foo (&result_decl,
1214 ...)"); just toss the entire RETURN_EXPR. */
1215 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1216 {
1217 /* Replace the RETURN_EXPR with (a copy of) the
1218 MODIFY_EXPR hanging underneath. */
1219 *tp = copy_node (assignment);
1220 }
1221 else /* Else the RETURN_EXPR returns no value. */
1222 {
1223 *tp = NULL;
1224 return (tree) (void *)1;
1225 }
1226 }
1227 else if (TREE_CODE (*tp) == SSA_NAME)
1228 {
1229 *tp = remap_ssa_name (*tp, id);
1230 *walk_subtrees = 0;
1231 return NULL;
1232 }
1233
1234 /* Local variables and labels need to be replaced by equivalent
1235 variables. We don't want to copy static variables; there's only
1236 one of those, no matter how many times we inline the containing
1237 function. Similarly for globals from an outer function. */
1238 else if (auto_var_in_fn_p (*tp, fn))
1239 {
1240 tree new_decl;
1241
1242 /* Remap the declaration. */
1243 new_decl = remap_decl (*tp, id);
1244 gcc_assert (new_decl);
1245 /* Replace this variable with the copy. */
1246 STRIP_TYPE_NOPS (new_decl);
1247 *tp = new_decl;
1248 *walk_subtrees = 0;
1249 }
1250 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1251 copy_statement_list (tp);
1252 else if (TREE_CODE (*tp) == SAVE_EXPR
1253 || TREE_CODE (*tp) == TARGET_EXPR)
1254 remap_save_expr (tp, id->decl_map, walk_subtrees);
1255 else if (TREE_CODE (*tp) == LABEL_DECL
1256 && (! DECL_CONTEXT (*tp)
1257 || decl_function_context (*tp) == id->src_fn))
1258 /* These may need to be remapped for EH handling. */
1259 *tp = remap_decl (*tp, id);
1260 else if (TREE_CODE (*tp) == BIND_EXPR)
1261 copy_bind_expr (tp, walk_subtrees, id);
1262 /* Types may need remapping as well. */
1263 else if (TYPE_P (*tp))
1264 *tp = remap_type (*tp, id);
1265
1266 /* If this is a constant, we have to copy the node iff the type will be
1267 remapped. copy_tree_r will not copy a constant. */
1268 else if (CONSTANT_CLASS_P (*tp))
1269 {
1270 tree new_type = remap_type (TREE_TYPE (*tp), id);
1271
1272 if (new_type == TREE_TYPE (*tp))
1273 *walk_subtrees = 0;
1274
1275 else if (TREE_CODE (*tp) == INTEGER_CST)
1276 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1277 else
1278 {
1279 *tp = copy_node (*tp);
1280 TREE_TYPE (*tp) = new_type;
1281 }
1282 }
1283
1284 /* Otherwise, just copy the node. Note that copy_tree_r already
1285 knows not to copy VAR_DECLs, etc., so this is safe. */
1286 else
1287 {
1288 /* Here we handle trees that are not completely rewritten.
1289 First we detect some inlining-induced bogosities for
1290 discarding. */
1291 if (TREE_CODE (*tp) == MODIFY_EXPR
1292 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1293 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1294 {
1295 /* Some assignments VAR = VAR; don't generate any rtl code
1296 and thus don't count as variable modification. Avoid
1297 keeping bogosities like 0 = 0. */
1298 tree decl = TREE_OPERAND (*tp, 0), value;
1299 tree *n;
1300
1301 n = id->decl_map->get (decl);
1302 if (n)
1303 {
1304 value = *n;
1305 STRIP_TYPE_NOPS (value);
1306 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1307 {
1308 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1309 return copy_tree_body_r (tp, walk_subtrees, data);
1310 }
1311 }
1312 }
1313 else if (TREE_CODE (*tp) == INDIRECT_REF)
1314 {
1315 /* Get rid of *& from inline substitutions that can happen when a
1316 pointer argument is an ADDR_EXPR. */
1317 tree decl = TREE_OPERAND (*tp, 0);
1318 tree *n = id->decl_map->get (decl);
1319 if (n)
1320 {
1321 /* If we happen to get an ADDR_EXPR in n->value, strip
1322 it manually here as we'll eventually get ADDR_EXPRs
1323 which lie about their types pointed to. In this case
1324 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1325 but we absolutely rely on that. As fold_indirect_ref
1326 does other useful transformations, try that first, though. */
1327 tree type = TREE_TYPE (*tp);
1328 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1329 tree old = *tp;
1330 *tp = gimple_fold_indirect_ref (ptr);
1331 if (! *tp)
1332 {
1333 type = remap_type (type, id);
1334 if (TREE_CODE (ptr) == ADDR_EXPR)
1335 {
1336 *tp
1337 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1338 /* ??? We should either assert here or build
1339 a VIEW_CONVERT_EXPR instead of blindly leaking
1340 incompatible types to our IL. */
1341 if (! *tp)
1342 *tp = TREE_OPERAND (ptr, 0);
1343 }
1344 else
1345 {
1346 *tp = build1 (INDIRECT_REF, type, ptr);
1347 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1348 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1349 TREE_READONLY (*tp) = TREE_READONLY (old);
1350 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1351 have remapped a parameter as the property might be
1352 valid only for the parameter itself. */
1353 if (TREE_THIS_NOTRAP (old)
1354 && (!is_parm (TREE_OPERAND (old, 0))
1355 || (!id->transform_parameter && is_parm (ptr))))
1356 TREE_THIS_NOTRAP (*tp) = 1;
1357 }
1358 }
1359 *walk_subtrees = 0;
1360 return NULL;
1361 }
1362 }
1363 else if (TREE_CODE (*tp) == MEM_REF)
1364 {
1365 /* We need to re-canonicalize MEM_REFs from inline substitutions
1366 that can happen when a pointer argument is an ADDR_EXPR.
1367 Recurse here manually to allow that. */
1368 tree ptr = TREE_OPERAND (*tp, 0);
1369 tree type = remap_type (TREE_TYPE (*tp), id);
1370 tree old = *tp;
1371 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1372 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1373 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1374 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1375 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1376 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1377 {
1378 MR_DEPENDENCE_CLIQUE (*tp)
1379 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1380 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1381 }
1382 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1383 remapped a parameter as the property might be valid only
1384 for the parameter itself. */
1385 if (TREE_THIS_NOTRAP (old)
1386 && (!is_parm (TREE_OPERAND (old, 0))
1387 || (!id->transform_parameter && is_parm (ptr))))
1388 TREE_THIS_NOTRAP (*tp) = 1;
1389 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1390 *walk_subtrees = 0;
1391 return NULL;
1392 }
1393
1394 /* Here is the "usual case". Copy this tree node, and then
1395 tweak some special cases. */
1396 copy_tree_r (tp, walk_subtrees, NULL);
1397
1398 /* If EXPR has block defined, map it to newly constructed block.
1399 When inlining we want EXPRs without block appear in the block
1400 of function call if we are not remapping a type. */
1401 if (EXPR_P (*tp))
1402 {
1403 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1404 if (TREE_BLOCK (*tp))
1405 {
1406 tree *n;
1407 n = id->decl_map->get (TREE_BLOCK (*tp));
1408 if (n)
1409 new_block = *n;
1410 }
1411 TREE_SET_BLOCK (*tp, new_block);
1412 }
1413
1414 if (TREE_CODE (*tp) != OMP_CLAUSE)
1415 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1416
1417 /* The copied TARGET_EXPR has never been expanded, even if the
1418 original node was expanded already. */
1419 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1420 {
1421 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1422 TREE_OPERAND (*tp, 3) = NULL_TREE;
1423 }
1424
1425 /* Variable substitution need not be simple. In particular, the
1426 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1427 and friends are up-to-date. */
1428 else if (TREE_CODE (*tp) == ADDR_EXPR)
1429 {
1430 int invariant = is_gimple_min_invariant (*tp);
1431 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1432
1433 /* Handle the case where we substituted an INDIRECT_REF
1434 into the operand of the ADDR_EXPR. */
1435 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1436 {
1437 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1438 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1439 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1440 *tp = t;
1441 }
1442 else
1443 recompute_tree_invariant_for_addr_expr (*tp);
1444
1445 /* If this used to be invariant, but is not any longer,
1446 then regimplification is probably needed. */
1447 if (invariant && !is_gimple_min_invariant (*tp))
1448 id->regimplify = true;
1449
1450 *walk_subtrees = 0;
1451 }
1452 }
1453
1454 /* Keep iterating. */
1455 return NULL_TREE;
1456 }
1457
1458 /* Helper for remap_gimple_stmt. Given an EH region number for the
1459 source function, map that to the duplicate EH region number in
1460 the destination function. */
1461
1462 static int
1463 remap_eh_region_nr (int old_nr, copy_body_data *id)
1464 {
1465 eh_region old_r, new_r;
1466
1467 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1468 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1469
1470 return new_r->index;
1471 }
1472
1473 /* Similar, but operate on INTEGER_CSTs. */
1474
1475 static tree
1476 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1477 {
1478 int old_nr, new_nr;
1479
1480 old_nr = tree_to_shwi (old_t_nr);
1481 new_nr = remap_eh_region_nr (old_nr, id);
1482
1483 return build_int_cst (integer_type_node, new_nr);
1484 }
1485
1486 /* Helper for copy_bb. Remap statement STMT using the inlining
1487 information in ID. Return the new statement copy. */
1488
1489 static gimple_seq
1490 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1491 {
1492 gimple *copy = NULL;
1493 struct walk_stmt_info wi;
1494 bool skip_first = false;
1495 gimple_seq stmts = NULL;
1496
1497 if (is_gimple_debug (stmt)
1498 && (gimple_debug_nonbind_marker_p (stmt)
1499 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1500 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1501 return NULL;
1502
1503 /* Begin by recognizing trees that we'll completely rewrite for the
1504 inlining context. Our output for these trees is completely
1505 different from our input (e.g. RETURN_EXPR is deleted and morphs
1506 into an edge). Further down, we'll handle trees that get
1507 duplicated and/or tweaked. */
1508
1509 /* When requested, GIMPLE_RETURN should be transformed to just the
1510 contained GIMPLE_ASSIGN. The branch semantics of the return will
1511 be handled elsewhere by manipulating the CFG rather than the
1512 statement. */
1513 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1514 {
1515 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1516
1517 /* If we're returning something, just turn that into an
1518 assignment to the equivalent of the original RESULT_DECL.
1519 If RETVAL is just the result decl, the result decl has
1520 already been set (e.g. a recent "foo (&result_decl, ...)");
1521 just toss the entire GIMPLE_RETURN. */
1522 if (retval
1523 && (TREE_CODE (retval) != RESULT_DECL
1524 && (TREE_CODE (retval) != SSA_NAME
1525 || ! SSA_NAME_VAR (retval)
1526 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1527 {
1528 copy = gimple_build_assign (id->do_not_unshare
1529 ? id->retvar : unshare_expr (id->retvar),
1530 retval);
1531 /* id->retvar is already substituted. Skip it on later remapping. */
1532 skip_first = true;
1533 }
1534 else
1535 return NULL;
1536 }
1537 else if (gimple_has_substatements (stmt))
1538 {
1539 gimple_seq s1, s2;
1540
1541 /* When cloning bodies from the C++ front end, we will be handed bodies
1542 in High GIMPLE form. Handle here all the High GIMPLE statements that
1543 have embedded statements. */
1544 switch (gimple_code (stmt))
1545 {
1546 case GIMPLE_BIND:
1547 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1548 break;
1549
1550 case GIMPLE_CATCH:
1551 {
1552 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1553 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1554 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1555 }
1556 break;
1557
1558 case GIMPLE_EH_FILTER:
1559 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1560 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1561 break;
1562
1563 case GIMPLE_TRY:
1564 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1565 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1566 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1567 break;
1568
1569 case GIMPLE_WITH_CLEANUP_EXPR:
1570 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1571 copy = gimple_build_wce (s1);
1572 break;
1573
1574 case GIMPLE_OMP_PARALLEL:
1575 {
1576 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1577 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1578 copy = gimple_build_omp_parallel
1579 (s1,
1580 gimple_omp_parallel_clauses (omp_par_stmt),
1581 gimple_omp_parallel_child_fn (omp_par_stmt),
1582 gimple_omp_parallel_data_arg (omp_par_stmt));
1583 }
1584 break;
1585
1586 case GIMPLE_OMP_TASK:
1587 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1588 copy = gimple_build_omp_task
1589 (s1,
1590 gimple_omp_task_clauses (stmt),
1591 gimple_omp_task_child_fn (stmt),
1592 gimple_omp_task_data_arg (stmt),
1593 gimple_omp_task_copy_fn (stmt),
1594 gimple_omp_task_arg_size (stmt),
1595 gimple_omp_task_arg_align (stmt));
1596 break;
1597
1598 case GIMPLE_OMP_FOR:
1599 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1600 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1601 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1602 gimple_omp_for_clauses (stmt),
1603 gimple_omp_for_collapse (stmt), s2);
1604 {
1605 size_t i;
1606 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1607 {
1608 gimple_omp_for_set_index (copy, i,
1609 gimple_omp_for_index (stmt, i));
1610 gimple_omp_for_set_initial (copy, i,
1611 gimple_omp_for_initial (stmt, i));
1612 gimple_omp_for_set_final (copy, i,
1613 gimple_omp_for_final (stmt, i));
1614 gimple_omp_for_set_incr (copy, i,
1615 gimple_omp_for_incr (stmt, i));
1616 gimple_omp_for_set_cond (copy, i,
1617 gimple_omp_for_cond (stmt, i));
1618 }
1619 }
1620 break;
1621
1622 case GIMPLE_OMP_MASTER:
1623 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1624 copy = gimple_build_omp_master (s1);
1625 break;
1626
1627 case GIMPLE_OMP_TASKGROUP:
1628 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1629 copy = gimple_build_omp_taskgroup
1630 (s1, gimple_omp_taskgroup_clauses (stmt));
1631 break;
1632
1633 case GIMPLE_OMP_ORDERED:
1634 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1635 copy = gimple_build_omp_ordered
1636 (s1,
1637 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1638 break;
1639
1640 case GIMPLE_OMP_SECTION:
1641 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1642 copy = gimple_build_omp_section (s1);
1643 break;
1644
1645 case GIMPLE_OMP_SECTIONS:
1646 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1647 copy = gimple_build_omp_sections
1648 (s1, gimple_omp_sections_clauses (stmt));
1649 break;
1650
1651 case GIMPLE_OMP_SINGLE:
1652 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1653 copy = gimple_build_omp_single
1654 (s1, gimple_omp_single_clauses (stmt));
1655 break;
1656
1657 case GIMPLE_OMP_TARGET:
1658 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1659 copy = gimple_build_omp_target
1660 (s1, gimple_omp_target_kind (stmt),
1661 gimple_omp_target_clauses (stmt));
1662 break;
1663
1664 case GIMPLE_OMP_TEAMS:
1665 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1666 copy = gimple_build_omp_teams
1667 (s1, gimple_omp_teams_clauses (stmt));
1668 break;
1669
1670 case GIMPLE_OMP_CRITICAL:
1671 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1672 copy = gimple_build_omp_critical (s1,
1673 gimple_omp_critical_name
1674 (as_a <gomp_critical *> (stmt)),
1675 gimple_omp_critical_clauses
1676 (as_a <gomp_critical *> (stmt)));
1677 break;
1678
1679 case GIMPLE_TRANSACTION:
1680 {
1681 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1682 gtransaction *new_trans_stmt;
1683 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1684 id);
1685 copy = new_trans_stmt = gimple_build_transaction (s1);
1686 gimple_transaction_set_subcode (new_trans_stmt,
1687 gimple_transaction_subcode (old_trans_stmt));
1688 gimple_transaction_set_label_norm (new_trans_stmt,
1689 gimple_transaction_label_norm (old_trans_stmt));
1690 gimple_transaction_set_label_uninst (new_trans_stmt,
1691 gimple_transaction_label_uninst (old_trans_stmt));
1692 gimple_transaction_set_label_over (new_trans_stmt,
1693 gimple_transaction_label_over (old_trans_stmt));
1694 }
1695 break;
1696
1697 default:
1698 gcc_unreachable ();
1699 }
1700 }
1701 else
1702 {
1703 if (gimple_assign_copy_p (stmt)
1704 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1705 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1706 {
1707 /* Here we handle statements that are not completely rewritten.
1708 First we detect some inlining-induced bogosities for
1709 discarding. */
1710
1711 /* Some assignments VAR = VAR; don't generate any rtl code
1712 and thus don't count as variable modification. Avoid
1713 keeping bogosities like 0 = 0. */
1714 tree decl = gimple_assign_lhs (stmt), value;
1715 tree *n;
1716
1717 n = id->decl_map->get (decl);
1718 if (n)
1719 {
1720 value = *n;
1721 STRIP_TYPE_NOPS (value);
1722 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1723 return NULL;
1724 }
1725 }
1726
1727 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1728 in a block that we aren't copying during tree_function_versioning,
1729 just drop the clobber stmt. */
1730 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1731 {
1732 tree lhs = gimple_assign_lhs (stmt);
1733 if (TREE_CODE (lhs) == MEM_REF
1734 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1735 {
1736 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1737 if (gimple_bb (def_stmt)
1738 && !bitmap_bit_p (id->blocks_to_copy,
1739 gimple_bb (def_stmt)->index))
1740 return NULL;
1741 }
1742 }
1743
1744 if (gimple_debug_bind_p (stmt))
1745 {
1746 gdebug *copy
1747 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1748 gimple_debug_bind_get_value (stmt),
1749 stmt);
1750 if (id->reset_location)
1751 gimple_set_location (copy, input_location);
1752 id->debug_stmts.safe_push (copy);
1753 gimple_seq_add_stmt (&stmts, copy);
1754 return stmts;
1755 }
1756 if (gimple_debug_source_bind_p (stmt))
1757 {
1758 gdebug *copy = gimple_build_debug_source_bind
1759 (gimple_debug_source_bind_get_var (stmt),
1760 gimple_debug_source_bind_get_value (stmt),
1761 stmt);
1762 if (id->reset_location)
1763 gimple_set_location (copy, input_location);
1764 id->debug_stmts.safe_push (copy);
1765 gimple_seq_add_stmt (&stmts, copy);
1766 return stmts;
1767 }
1768 if (gimple_debug_nonbind_marker_p (stmt))
1769 {
1770 /* If the inlined function has too many debug markers,
1771 don't copy them. */
1772 if (id->src_cfun->debug_marker_count
1773 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1774 return stmts;
1775
1776 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1777 if (id->reset_location)
1778 gimple_set_location (copy, input_location);
1779 id->debug_stmts.safe_push (copy);
1780 gimple_seq_add_stmt (&stmts, copy);
1781 return stmts;
1782 }
1783
1784 /* Create a new deep copy of the statement. */
1785 copy = gimple_copy (stmt);
1786
1787 /* Clear flags that need revisiting. */
1788 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1789 {
1790 if (gimple_call_tail_p (call_stmt))
1791 gimple_call_set_tail (call_stmt, false);
1792 if (gimple_call_from_thunk_p (call_stmt))
1793 gimple_call_set_from_thunk (call_stmt, false);
1794 if (gimple_call_internal_p (call_stmt))
1795 switch (gimple_call_internal_fn (call_stmt))
1796 {
1797 case IFN_GOMP_SIMD_LANE:
1798 case IFN_GOMP_SIMD_VF:
1799 case IFN_GOMP_SIMD_LAST_LANE:
1800 case IFN_GOMP_SIMD_ORDERED_START:
1801 case IFN_GOMP_SIMD_ORDERED_END:
1802 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1803 break;
1804 default:
1805 break;
1806 }
1807 }
1808
1809 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1810 RESX and EH_DISPATCH. */
1811 if (id->eh_map)
1812 switch (gimple_code (copy))
1813 {
1814 case GIMPLE_CALL:
1815 {
1816 tree r, fndecl = gimple_call_fndecl (copy);
1817 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1818 switch (DECL_FUNCTION_CODE (fndecl))
1819 {
1820 case BUILT_IN_EH_COPY_VALUES:
1821 r = gimple_call_arg (copy, 1);
1822 r = remap_eh_region_tree_nr (r, id);
1823 gimple_call_set_arg (copy, 1, r);
1824 /* FALLTHRU */
1825
1826 case BUILT_IN_EH_POINTER:
1827 case BUILT_IN_EH_FILTER:
1828 r = gimple_call_arg (copy, 0);
1829 r = remap_eh_region_tree_nr (r, id);
1830 gimple_call_set_arg (copy, 0, r);
1831 break;
1832
1833 default:
1834 break;
1835 }
1836
1837 /* Reset alias info if we didn't apply measures to
1838 keep it valid over inlining by setting DECL_PT_UID. */
1839 if (!id->src_cfun->gimple_df
1840 || !id->src_cfun->gimple_df->ipa_pta)
1841 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1842 }
1843 break;
1844
1845 case GIMPLE_RESX:
1846 {
1847 gresx *resx_stmt = as_a <gresx *> (copy);
1848 int r = gimple_resx_region (resx_stmt);
1849 r = remap_eh_region_nr (r, id);
1850 gimple_resx_set_region (resx_stmt, r);
1851 }
1852 break;
1853
1854 case GIMPLE_EH_DISPATCH:
1855 {
1856 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1857 int r = gimple_eh_dispatch_region (eh_dispatch);
1858 r = remap_eh_region_nr (r, id);
1859 gimple_eh_dispatch_set_region (eh_dispatch, r);
1860 }
1861 break;
1862
1863 default:
1864 break;
1865 }
1866 }
1867
1868 /* If STMT has a block defined, map it to the newly constructed block. */
1869 if (tree block = gimple_block (copy))
1870 {
1871 tree *n;
1872 n = id->decl_map->get (block);
1873 gcc_assert (n);
1874 gimple_set_block (copy, *n);
1875 }
1876
1877 if (id->reset_location)
1878 gimple_set_location (copy, input_location);
1879
1880 /* Debug statements ought to be rebuilt and not copied. */
1881 gcc_checking_assert (!is_gimple_debug (copy));
1882
1883 /* Remap all the operands in COPY. */
1884 memset (&wi, 0, sizeof (wi));
1885 wi.info = id;
1886 if (skip_first)
1887 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1888 else
1889 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1890
1891 /* Clear the copied virtual operands. We are not remapping them here
1892 but are going to recreate them from scratch. */
1893 if (gimple_has_mem_ops (copy))
1894 {
1895 gimple_set_vdef (copy, NULL_TREE);
1896 gimple_set_vuse (copy, NULL_TREE);
1897 }
1898
1899 gimple_seq_add_stmt (&stmts, copy);
1900 return stmts;
1901 }
1902
1903
1904 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1905 later */
1906
1907 static basic_block
1908 copy_bb (copy_body_data *id, basic_block bb,
1909 profile_count num, profile_count den)
1910 {
1911 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1912 basic_block copy_basic_block;
1913 tree decl;
1914 basic_block prev;
1915
1916 profile_count::adjust_for_ipa_scaling (&num, &den);
1917
1918 /* Search for previous copied basic block. */
1919 prev = bb->prev_bb;
1920 while (!prev->aux)
1921 prev = prev->prev_bb;
1922
1923 /* create_basic_block() will append every new block to
1924 basic_block_info automatically. */
1925 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1926 copy_basic_block->count = bb->count.apply_scale (num, den);
1927
1928 copy_gsi = gsi_start_bb (copy_basic_block);
1929
1930 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1931 {
1932 gimple_seq stmts;
1933 gimple *stmt = gsi_stmt (gsi);
1934 gimple *orig_stmt = stmt;
1935 gimple_stmt_iterator stmts_gsi;
1936 bool stmt_added = false;
1937
1938 id->regimplify = false;
1939 stmts = remap_gimple_stmt (stmt, id);
1940
1941 if (gimple_seq_empty_p (stmts))
1942 continue;
1943
1944 seq_gsi = copy_gsi;
1945
1946 for (stmts_gsi = gsi_start (stmts);
1947 !gsi_end_p (stmts_gsi); )
1948 {
1949 stmt = gsi_stmt (stmts_gsi);
1950
1951 /* Advance iterator now before stmt is moved to seq_gsi. */
1952 gsi_next (&stmts_gsi);
1953
1954 if (gimple_nop_p (stmt))
1955 continue;
1956
1957 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1958 orig_stmt);
1959
1960 /* With return slot optimization we can end up with
1961 non-gimple (foo *)&this->m, fix that here. */
1962 if (is_gimple_assign (stmt)
1963 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1964 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1965 {
1966 tree new_rhs;
1967 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1968 gimple_assign_rhs1 (stmt),
1969 true, NULL, false,
1970 GSI_CONTINUE_LINKING);
1971 gimple_assign_set_rhs1 (stmt, new_rhs);
1972 id->regimplify = false;
1973 }
1974
1975 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1976
1977 if (id->regimplify)
1978 gimple_regimplify_operands (stmt, &seq_gsi);
1979
1980 stmt_added = true;
1981 }
1982
1983 if (!stmt_added)
1984 continue;
1985
1986 /* If copy_basic_block has been empty at the start of this iteration,
1987 call gsi_start_bb again to get at the newly added statements. */
1988 if (gsi_end_p (copy_gsi))
1989 copy_gsi = gsi_start_bb (copy_basic_block);
1990 else
1991 gsi_next (&copy_gsi);
1992
1993 /* Process the new statement. The call to gimple_regimplify_operands
1994 possibly turned the statement into multiple statements, we
1995 need to process all of them. */
1996 do
1997 {
1998 tree fn;
1999 gcall *call_stmt;
2000
2001 stmt = gsi_stmt (copy_gsi);
2002 call_stmt = dyn_cast <gcall *> (stmt);
2003 if (call_stmt
2004 && gimple_call_va_arg_pack_p (call_stmt)
2005 && id->call_stmt
2006 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2007 {
2008 /* __builtin_va_arg_pack () should be replaced by
2009 all arguments corresponding to ... in the caller. */
2010 tree p;
2011 gcall *new_call;
2012 vec<tree> argarray;
2013 size_t nargs = gimple_call_num_args (id->call_stmt);
2014 size_t n;
2015
2016 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2017 nargs--;
2018
2019 /* Create the new array of arguments. */
2020 n = nargs + gimple_call_num_args (call_stmt);
2021 argarray.create (n);
2022 argarray.safe_grow_cleared (n);
2023
2024 /* Copy all the arguments before '...' */
2025 memcpy (argarray.address (),
2026 gimple_call_arg_ptr (call_stmt, 0),
2027 gimple_call_num_args (call_stmt) * sizeof (tree));
2028
2029 /* Append the arguments passed in '...' */
2030 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2031 gimple_call_arg_ptr (id->call_stmt, 0)
2032 + (gimple_call_num_args (id->call_stmt) - nargs),
2033 nargs * sizeof (tree));
2034
2035 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2036 argarray);
2037
2038 argarray.release ();
2039
2040 /* Copy all GIMPLE_CALL flags, location and block, except
2041 GF_CALL_VA_ARG_PACK. */
2042 gimple_call_copy_flags (new_call, call_stmt);
2043 gimple_call_set_va_arg_pack (new_call, false);
2044 /* location includes block. */
2045 gimple_set_location (new_call, gimple_location (stmt));
2046 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2047
2048 gsi_replace (&copy_gsi, new_call, false);
2049 stmt = new_call;
2050 }
2051 else if (call_stmt
2052 && id->call_stmt
2053 && (decl = gimple_call_fndecl (stmt))
2054 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2055 {
2056 /* __builtin_va_arg_pack_len () should be replaced by
2057 the number of anonymous arguments. */
2058 size_t nargs = gimple_call_num_args (id->call_stmt);
2059 tree count, p;
2060 gimple *new_stmt;
2061
2062 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2063 nargs--;
2064
2065 if (!gimple_call_lhs (stmt))
2066 {
2067 /* Drop unused calls. */
2068 gsi_remove (&copy_gsi, false);
2069 continue;
2070 }
2071 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2072 {
2073 count = build_int_cst (integer_type_node, nargs);
2074 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2075 gsi_replace (&copy_gsi, new_stmt, false);
2076 stmt = new_stmt;
2077 }
2078 else if (nargs != 0)
2079 {
2080 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2081 count = build_int_cst (integer_type_node, nargs);
2082 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2083 PLUS_EXPR, newlhs, count);
2084 gimple_call_set_lhs (stmt, newlhs);
2085 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2086 }
2087 }
2088 else if (call_stmt
2089 && id->call_stmt
2090 && gimple_call_internal_p (stmt)
2091 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2092 {
2093 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2094 gsi_remove (&copy_gsi, false);
2095 continue;
2096 }
2097
2098 /* Statements produced by inlining can be unfolded, especially
2099 when we constant propagated some operands. We can't fold
2100 them right now for two reasons:
2101 1) folding require SSA_NAME_DEF_STMTs to be correct
2102 2) we can't change function calls to builtins.
2103 So we just mark statement for later folding. We mark
2104 all new statements, instead just statements that has changed
2105 by some nontrivial substitution so even statements made
2106 foldable indirectly are updated. If this turns out to be
2107 expensive, copy_body can be told to watch for nontrivial
2108 changes. */
2109 if (id->statements_to_fold)
2110 id->statements_to_fold->add (stmt);
2111
2112 /* We're duplicating a CALL_EXPR. Find any corresponding
2113 callgraph edges and update or duplicate them. */
2114 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2115 {
2116 struct cgraph_edge *edge;
2117
2118 switch (id->transform_call_graph_edges)
2119 {
2120 case CB_CGE_DUPLICATE:
2121 edge = id->src_node->get_edge (orig_stmt);
2122 if (edge)
2123 {
2124 struct cgraph_edge *old_edge = edge;
2125 profile_count old_cnt = edge->count;
2126 edge = edge->clone (id->dst_node, call_stmt,
2127 gimple_uid (stmt),
2128 num, den,
2129 true);
2130
2131 /* Speculative calls consist of two edges - direct and
2132 indirect. Duplicate the whole thing and distribute
2133 frequencies accordingly. */
2134 if (edge->speculative)
2135 {
2136 struct cgraph_edge *direct, *indirect;
2137 struct ipa_ref *ref;
2138
2139 gcc_assert (!edge->indirect_unknown_callee);
2140 old_edge->speculative_call_info (direct, indirect, ref);
2141
2142 profile_count indir_cnt = indirect->count;
2143 indirect = indirect->clone (id->dst_node, call_stmt,
2144 gimple_uid (stmt),
2145 num, den,
2146 true);
2147
2148 profile_probability prob
2149 = indir_cnt.probability_in (old_cnt + indir_cnt);
2150 indirect->count
2151 = copy_basic_block->count.apply_probability (prob);
2152 edge->count = copy_basic_block->count - indirect->count;
2153 id->dst_node->clone_reference (ref, stmt);
2154 }
2155 else
2156 edge->count = copy_basic_block->count;
2157 }
2158 break;
2159
2160 case CB_CGE_MOVE_CLONES:
2161 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2162 call_stmt);
2163 edge = id->dst_node->get_edge (stmt);
2164 break;
2165
2166 case CB_CGE_MOVE:
2167 edge = id->dst_node->get_edge (orig_stmt);
2168 if (edge)
2169 edge->set_call_stmt (call_stmt);
2170 break;
2171
2172 default:
2173 gcc_unreachable ();
2174 }
2175
2176 /* Constant propagation on argument done during inlining
2177 may create new direct call. Produce an edge for it. */
2178 if ((!edge
2179 || (edge->indirect_inlining_edge
2180 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2181 && id->dst_node->definition
2182 && (fn = gimple_call_fndecl (stmt)) != NULL)
2183 {
2184 struct cgraph_node *dest = cgraph_node::get_create (fn);
2185
2186 /* We have missing edge in the callgraph. This can happen
2187 when previous inlining turned an indirect call into a
2188 direct call by constant propagating arguments or we are
2189 producing dead clone (for further cloning). In all
2190 other cases we hit a bug (incorrect node sharing is the
2191 most common reason for missing edges). */
2192 gcc_assert (!dest->definition
2193 || dest->address_taken
2194 || !id->src_node->definition
2195 || !id->dst_node->definition);
2196 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2197 id->dst_node->create_edge_including_clones
2198 (dest, orig_stmt, call_stmt, bb->count,
2199 CIF_ORIGINALLY_INDIRECT_CALL);
2200 else
2201 id->dst_node->create_edge (dest, call_stmt,
2202 bb->count)->inline_failed
2203 = CIF_ORIGINALLY_INDIRECT_CALL;
2204 if (dump_file)
2205 {
2206 fprintf (dump_file, "Created new direct edge to %s\n",
2207 dest->name ());
2208 }
2209 }
2210
2211 notice_special_calls (as_a <gcall *> (stmt));
2212 }
2213
2214 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2215 id->eh_map, id->eh_lp_nr);
2216
2217 gsi_next (&copy_gsi);
2218 }
2219 while (!gsi_end_p (copy_gsi));
2220
2221 copy_gsi = gsi_last_bb (copy_basic_block);
2222 }
2223
2224 return copy_basic_block;
2225 }
2226
2227 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2228 form is quite easy, since dominator relationship for old basic blocks does
2229 not change.
2230
2231 There is however exception where inlining might change dominator relation
2232 across EH edges from basic block within inlined functions destinating
2233 to landing pads in function we inline into.
2234
2235 The function fills in PHI_RESULTs of such PHI nodes if they refer
2236 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2237 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2238 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2239 set, and this means that there will be no overlapping live ranges
2240 for the underlying symbol.
2241
2242 This might change in future if we allow redirecting of EH edges and
2243 we might want to change way build CFG pre-inlining to include
2244 all the possible edges then. */
2245 static void
2246 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2247 bool can_throw, bool nonlocal_goto)
2248 {
2249 edge e;
2250 edge_iterator ei;
2251
2252 FOR_EACH_EDGE (e, ei, bb->succs)
2253 if (!e->dest->aux
2254 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2255 {
2256 gphi *phi;
2257 gphi_iterator si;
2258
2259 if (!nonlocal_goto)
2260 gcc_assert (e->flags & EDGE_EH);
2261
2262 if (!can_throw)
2263 gcc_assert (!(e->flags & EDGE_EH));
2264
2265 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2266 {
2267 edge re;
2268
2269 phi = si.phi ();
2270
2271 /* For abnormal goto/call edges the receiver can be the
2272 ENTRY_BLOCK. Do not assert this cannot happen. */
2273
2274 gcc_assert ((e->flags & EDGE_EH)
2275 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2276
2277 re = find_edge (ret_bb, e->dest);
2278 gcc_checking_assert (re);
2279 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2280 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2281
2282 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2283 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2284 }
2285 }
2286 }
2287
2288 /* Insert clobbers for automatic variables of inlined ID->src_fn
2289 function at the start of basic block ID->eh_landing_pad_dest. */
2290
2291 static void
2292 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2293 {
2294 tree var;
2295 basic_block bb = id->eh_landing_pad_dest;
2296 live_vars_map *vars = NULL;
2297 unsigned int cnt = 0;
2298 unsigned int i;
2299 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2300 if (VAR_P (var)
2301 && !DECL_HARD_REGISTER (var)
2302 && !TREE_THIS_VOLATILE (var)
2303 && !DECL_HAS_VALUE_EXPR_P (var)
2304 && !is_gimple_reg (var)
2305 && auto_var_in_fn_p (var, id->src_fn)
2306 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2307 {
2308 tree *t = id->decl_map->get (var);
2309 if (!t)
2310 continue;
2311 tree new_var = *t;
2312 if (VAR_P (new_var)
2313 && !DECL_HARD_REGISTER (new_var)
2314 && !TREE_THIS_VOLATILE (new_var)
2315 && !DECL_HAS_VALUE_EXPR_P (new_var)
2316 && !is_gimple_reg (new_var)
2317 && auto_var_in_fn_p (new_var, id->dst_fn))
2318 {
2319 if (vars == NULL)
2320 vars = new live_vars_map;
2321 vars->put (DECL_UID (var), cnt++);
2322 }
2323 }
2324 if (vars == NULL)
2325 return;
2326
2327 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2328 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2329 if (VAR_P (var))
2330 {
2331 edge e;
2332 edge_iterator ei;
2333 bool needed = false;
2334 unsigned int *v = vars->get (DECL_UID (var));
2335 if (v == NULL)
2336 continue;
2337 FOR_EACH_EDGE (e, ei, bb->preds)
2338 if ((e->flags & EDGE_EH) != 0
2339 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2340 {
2341 basic_block src_bb = (basic_block) e->src->aux;
2342
2343 if (bitmap_bit_p (&live[src_bb->index], *v))
2344 {
2345 needed = true;
2346 break;
2347 }
2348 }
2349 if (needed)
2350 {
2351 tree new_var = *id->decl_map->get (var);
2352 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2353 tree clobber = build_clobber (TREE_TYPE (new_var));
2354 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2355 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2356 }
2357 }
2358 destroy_live_vars (live);
2359 delete vars;
2360 }
2361
2362 /* Copy edges from BB into its copy constructed earlier, scale profile
2363 accordingly. Edges will be taken care of later. Assume aux
2364 pointers to point to the copies of each BB. Return true if any
2365 debug stmts are left after a statement that must end the basic block. */
2366
2367 static bool
2368 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2369 basic_block ret_bb, basic_block abnormal_goto_dest,
2370 copy_body_data *id)
2371 {
2372 basic_block new_bb = (basic_block) bb->aux;
2373 edge_iterator ei;
2374 edge old_edge;
2375 gimple_stmt_iterator si;
2376 bool need_debug_cleanup = false;
2377
2378 /* Use the indices from the original blocks to create edges for the
2379 new ones. */
2380 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2381 if (!(old_edge->flags & EDGE_EH))
2382 {
2383 edge new_edge;
2384 int flags = old_edge->flags;
2385 location_t locus = old_edge->goto_locus;
2386
2387 /* Return edges do get a FALLTHRU flag when they get inlined. */
2388 if (old_edge->dest->index == EXIT_BLOCK
2389 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2390 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2391 flags |= EDGE_FALLTHRU;
2392
2393 new_edge
2394 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2395 new_edge->probability = old_edge->probability;
2396 if (!id->reset_location)
2397 new_edge->goto_locus = remap_location (locus, id);
2398 }
2399
2400 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2401 return false;
2402
2403 /* When doing function splitting, we must decrease count of the return block
2404 which was previously reachable by block we did not copy. */
2405 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2406 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2407 if (old_edge->src->index != ENTRY_BLOCK
2408 && !old_edge->src->aux)
2409 new_bb->count -= old_edge->count ().apply_scale (num, den);
2410
2411 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2412 {
2413 gimple *copy_stmt;
2414 bool can_throw, nonlocal_goto;
2415
2416 copy_stmt = gsi_stmt (si);
2417 if (!is_gimple_debug (copy_stmt))
2418 update_stmt (copy_stmt);
2419
2420 /* Do this before the possible split_block. */
2421 gsi_next (&si);
2422
2423 /* If this tree could throw an exception, there are two
2424 cases where we need to add abnormal edge(s): the
2425 tree wasn't in a region and there is a "current
2426 region" in the caller; or the original tree had
2427 EH edges. In both cases split the block after the tree,
2428 and add abnormal edge(s) as needed; we need both
2429 those from the callee and the caller.
2430 We check whether the copy can throw, because the const
2431 propagation can change an INDIRECT_REF which throws
2432 into a COMPONENT_REF which doesn't. If the copy
2433 can throw, the original could also throw. */
2434 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2435 nonlocal_goto
2436 = (stmt_can_make_abnormal_goto (copy_stmt)
2437 && !computed_goto_p (copy_stmt));
2438
2439 if (can_throw || nonlocal_goto)
2440 {
2441 if (!gsi_end_p (si))
2442 {
2443 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2444 gsi_next (&si);
2445 if (gsi_end_p (si))
2446 need_debug_cleanup = true;
2447 }
2448 if (!gsi_end_p (si))
2449 /* Note that bb's predecessor edges aren't necessarily
2450 right at this point; split_block doesn't care. */
2451 {
2452 edge e = split_block (new_bb, copy_stmt);
2453
2454 new_bb = e->dest;
2455 new_bb->aux = e->src->aux;
2456 si = gsi_start_bb (new_bb);
2457 }
2458 }
2459
2460 bool update_probs = false;
2461
2462 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2463 {
2464 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2465 update_probs = true;
2466 }
2467 else if (can_throw)
2468 {
2469 make_eh_edges (copy_stmt);
2470 update_probs = true;
2471 }
2472
2473 /* EH edges may not match old edges. Copy as much as possible. */
2474 if (update_probs)
2475 {
2476 edge e;
2477 edge_iterator ei;
2478 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2479
2480 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2481 if ((old_edge->flags & EDGE_EH)
2482 && (e = find_edge (copy_stmt_bb,
2483 (basic_block) old_edge->dest->aux))
2484 && (e->flags & EDGE_EH))
2485 e->probability = old_edge->probability;
2486
2487 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2488 if (e->flags & EDGE_EH)
2489 {
2490 if (!e->probability.initialized_p ())
2491 e->probability = profile_probability::never ();
2492 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2493 {
2494 if (id->eh_landing_pad_dest == NULL)
2495 id->eh_landing_pad_dest = e->dest;
2496 else
2497 gcc_assert (id->eh_landing_pad_dest == e->dest);
2498 }
2499 }
2500 }
2501
2502
2503 /* If the call we inline cannot make abnormal goto do not add
2504 additional abnormal edges but only retain those already present
2505 in the original function body. */
2506 if (abnormal_goto_dest == NULL)
2507 nonlocal_goto = false;
2508 if (nonlocal_goto)
2509 {
2510 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2511
2512 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2513 nonlocal_goto = false;
2514 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2515 in OpenMP regions which aren't allowed to be left abnormally.
2516 So, no need to add abnormal edge in that case. */
2517 else if (is_gimple_call (copy_stmt)
2518 && gimple_call_internal_p (copy_stmt)
2519 && (gimple_call_internal_fn (copy_stmt)
2520 == IFN_ABNORMAL_DISPATCHER)
2521 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2522 nonlocal_goto = false;
2523 else
2524 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2525 EDGE_ABNORMAL);
2526 }
2527
2528 if ((can_throw || nonlocal_goto)
2529 && gimple_in_ssa_p (cfun))
2530 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2531 can_throw, nonlocal_goto);
2532 }
2533 return need_debug_cleanup;
2534 }
2535
2536 /* Copy the PHIs. All blocks and edges are copied, some blocks
2537 was possibly split and new outgoing EH edges inserted.
2538 BB points to the block of original function and AUX pointers links
2539 the original and newly copied blocks. */
2540
2541 static void
2542 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2543 {
2544 basic_block const new_bb = (basic_block) bb->aux;
2545 edge_iterator ei;
2546 gphi *phi;
2547 gphi_iterator si;
2548 edge new_edge;
2549 bool inserted = false;
2550
2551 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2552 {
2553 tree res, new_res;
2554 gphi *new_phi;
2555
2556 phi = si.phi ();
2557 res = PHI_RESULT (phi);
2558 new_res = res;
2559 if (!virtual_operand_p (res))
2560 {
2561 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2562 if (EDGE_COUNT (new_bb->preds) == 0)
2563 {
2564 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2565 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2566 }
2567 else
2568 {
2569 new_phi = create_phi_node (new_res, new_bb);
2570 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2571 {
2572 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2573 bb);
2574 tree arg;
2575 tree new_arg;
2576 edge_iterator ei2;
2577 location_t locus;
2578
2579 /* When doing partial cloning, we allow PHIs on the entry
2580 block as long as all the arguments are the same.
2581 Find any input edge to see argument to copy. */
2582 if (!old_edge)
2583 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2584 if (!old_edge->src->aux)
2585 break;
2586
2587 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2588 new_arg = arg;
2589 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2590 gcc_assert (new_arg);
2591 /* With return slot optimization we can end up with
2592 non-gimple (foo *)&this->m, fix that here. */
2593 if (TREE_CODE (new_arg) != SSA_NAME
2594 && TREE_CODE (new_arg) != FUNCTION_DECL
2595 && !is_gimple_val (new_arg))
2596 {
2597 gimple_seq stmts = NULL;
2598 new_arg = force_gimple_operand (new_arg, &stmts, true,
2599 NULL);
2600 gsi_insert_seq_on_edge (new_edge, stmts);
2601 inserted = true;
2602 }
2603 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2604 if (id->reset_location)
2605 locus = input_location;
2606 else
2607 locus = remap_location (locus, id);
2608 add_phi_arg (new_phi, new_arg, new_edge, locus);
2609 }
2610 }
2611 }
2612 }
2613
2614 /* Commit the delayed edge insertions. */
2615 if (inserted)
2616 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2617 gsi_commit_one_edge_insert (new_edge, NULL);
2618 }
2619
2620
2621 /* Wrapper for remap_decl so it can be used as a callback. */
2622
2623 static tree
2624 remap_decl_1 (tree decl, void *data)
2625 {
2626 return remap_decl (decl, (copy_body_data *) data);
2627 }
2628
2629 /* Build struct function and associated datastructures for the new clone
2630 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2631 the cfun to the function of new_fndecl (and current_function_decl too). */
2632
2633 static void
2634 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2635 {
2636 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2637
2638 if (!DECL_ARGUMENTS (new_fndecl))
2639 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2640 if (!DECL_RESULT (new_fndecl))
2641 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2642
2643 /* Register specific tree functions. */
2644 gimple_register_cfg_hooks ();
2645
2646 /* Get clean struct function. */
2647 push_struct_function (new_fndecl);
2648
2649 /* We will rebuild these, so just sanity check that they are empty. */
2650 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2651 gcc_assert (cfun->local_decls == NULL);
2652 gcc_assert (cfun->cfg == NULL);
2653 gcc_assert (cfun->decl == new_fndecl);
2654
2655 /* Copy items we preserve during cloning. */
2656 cfun->static_chain_decl = src_cfun->static_chain_decl;
2657 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2658 cfun->function_end_locus = src_cfun->function_end_locus;
2659 cfun->curr_properties = src_cfun->curr_properties;
2660 cfun->last_verified = src_cfun->last_verified;
2661 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2662 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2663 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2664 cfun->stdarg = src_cfun->stdarg;
2665 cfun->after_inlining = src_cfun->after_inlining;
2666 cfun->can_throw_non_call_exceptions
2667 = src_cfun->can_throw_non_call_exceptions;
2668 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2669 cfun->returns_struct = src_cfun->returns_struct;
2670 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2671
2672 init_empty_tree_cfg ();
2673
2674 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2675
2676 profile_count num = count;
2677 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2678 profile_count::adjust_for_ipa_scaling (&num, &den);
2679
2680 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2681 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2682 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2683 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2684 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2685 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2686 if (src_cfun->eh)
2687 init_eh_for_function ();
2688
2689 if (src_cfun->gimple_df)
2690 {
2691 init_tree_ssa (cfun);
2692 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2693 if (cfun->gimple_df->in_ssa_p)
2694 init_ssa_operands (cfun);
2695 }
2696 }
2697
2698 /* Helper function for copy_cfg_body. Move debug stmts from the end
2699 of NEW_BB to the beginning of successor basic blocks when needed. If the
2700 successor has multiple predecessors, reset them, otherwise keep
2701 their value. */
2702
2703 static void
2704 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2705 {
2706 edge e;
2707 edge_iterator ei;
2708 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2709
2710 if (gsi_end_p (si)
2711 || gsi_one_before_end_p (si)
2712 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2713 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2714 return;
2715
2716 FOR_EACH_EDGE (e, ei, new_bb->succs)
2717 {
2718 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2719 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2720 while (is_gimple_debug (gsi_stmt (ssi)))
2721 {
2722 gimple *stmt = gsi_stmt (ssi);
2723 gdebug *new_stmt;
2724 tree var;
2725 tree value;
2726
2727 /* For the last edge move the debug stmts instead of copying
2728 them. */
2729 if (ei_one_before_end_p (ei))
2730 {
2731 si = ssi;
2732 gsi_prev (&ssi);
2733 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2734 {
2735 gimple_debug_bind_reset_value (stmt);
2736 gimple_set_location (stmt, UNKNOWN_LOCATION);
2737 }
2738 gsi_remove (&si, false);
2739 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2740 continue;
2741 }
2742
2743 if (gimple_debug_bind_p (stmt))
2744 {
2745 var = gimple_debug_bind_get_var (stmt);
2746 if (single_pred_p (e->dest))
2747 {
2748 value = gimple_debug_bind_get_value (stmt);
2749 value = unshare_expr (value);
2750 new_stmt = gimple_build_debug_bind (var, value, stmt);
2751 }
2752 else
2753 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2754 }
2755 else if (gimple_debug_source_bind_p (stmt))
2756 {
2757 var = gimple_debug_source_bind_get_var (stmt);
2758 value = gimple_debug_source_bind_get_value (stmt);
2759 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2760 }
2761 else if (gimple_debug_nonbind_marker_p (stmt))
2762 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2763 else
2764 gcc_unreachable ();
2765 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2766 id->debug_stmts.safe_push (new_stmt);
2767 gsi_prev (&ssi);
2768 }
2769 }
2770 }
2771
2772 /* Make a copy of the sub-loops of SRC_PARENT and place them
2773 as siblings of DEST_PARENT. */
2774
2775 static void
2776 copy_loops (copy_body_data *id,
2777 struct loop *dest_parent, struct loop *src_parent)
2778 {
2779 struct loop *src_loop = src_parent->inner;
2780 while (src_loop)
2781 {
2782 if (!id->blocks_to_copy
2783 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2784 {
2785 struct loop *dest_loop = alloc_loop ();
2786
2787 /* Assign the new loop its header and latch and associate
2788 those with the new loop. */
2789 dest_loop->header = (basic_block)src_loop->header->aux;
2790 dest_loop->header->loop_father = dest_loop;
2791 if (src_loop->latch != NULL)
2792 {
2793 dest_loop->latch = (basic_block)src_loop->latch->aux;
2794 dest_loop->latch->loop_father = dest_loop;
2795 }
2796
2797 /* Copy loop meta-data. */
2798 copy_loop_info (src_loop, dest_loop);
2799 if (dest_loop->unroll)
2800 cfun->has_unroll = true;
2801 if (dest_loop->force_vectorize)
2802 cfun->has_force_vectorize_loops = true;
2803 if (id->src_cfun->last_clique != 0)
2804 dest_loop->owned_clique
2805 = remap_dependence_clique (id,
2806 src_loop->owned_clique
2807 ? src_loop->owned_clique : 1);
2808
2809 /* Finally place it into the loop array and the loop tree. */
2810 place_new_loop (cfun, dest_loop);
2811 flow_loop_tree_node_add (dest_parent, dest_loop);
2812
2813 if (src_loop->simduid)
2814 {
2815 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2816 cfun->has_simduid_loops = true;
2817 }
2818
2819 /* Recurse. */
2820 copy_loops (id, dest_loop, src_loop);
2821 }
2822 src_loop = src_loop->next;
2823 }
2824 }
2825
2826 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2827
2828 void
2829 redirect_all_calls (copy_body_data * id, basic_block bb)
2830 {
2831 gimple_stmt_iterator si;
2832 gimple *last = last_stmt (bb);
2833 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2834 {
2835 gimple *stmt = gsi_stmt (si);
2836 if (is_gimple_call (stmt))
2837 {
2838 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2839 if (edge)
2840 {
2841 edge->redirect_call_stmt_to_callee ();
2842 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2843 gimple_purge_dead_eh_edges (bb);
2844 }
2845 }
2846 }
2847 }
2848
2849 /* Make a copy of the body of FN so that it can be inserted inline in
2850 another function. Walks FN via CFG, returns new fndecl. */
2851
2852 static tree
2853 copy_cfg_body (copy_body_data * id,
2854 basic_block entry_block_map, basic_block exit_block_map,
2855 basic_block new_entry)
2856 {
2857 tree callee_fndecl = id->src_fn;
2858 /* Original cfun for the callee, doesn't change. */
2859 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2860 struct function *cfun_to_copy;
2861 basic_block bb;
2862 tree new_fndecl = NULL;
2863 bool need_debug_cleanup = false;
2864 int last;
2865 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2866 profile_count num = entry_block_map->count;
2867
2868 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2869
2870 /* Register specific tree functions. */
2871 gimple_register_cfg_hooks ();
2872
2873 /* If we are inlining just region of the function, make sure to connect
2874 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2875 part of loop, we must compute frequency and probability of
2876 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2877 probabilities of edges incoming from nonduplicated region. */
2878 if (new_entry)
2879 {
2880 edge e;
2881 edge_iterator ei;
2882 den = profile_count::zero ();
2883
2884 FOR_EACH_EDGE (e, ei, new_entry->preds)
2885 if (!e->src->aux)
2886 den += e->count ();
2887 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2888 }
2889
2890 profile_count::adjust_for_ipa_scaling (&num, &den);
2891
2892 /* Must have a CFG here at this point. */
2893 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2894 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2895
2896
2897 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2898 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2899 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2900 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2901
2902 /* Duplicate any exception-handling regions. */
2903 if (cfun->eh)
2904 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2905 remap_decl_1, id);
2906
2907 /* Use aux pointers to map the original blocks to copy. */
2908 FOR_EACH_BB_FN (bb, cfun_to_copy)
2909 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2910 {
2911 basic_block new_bb = copy_bb (id, bb, num, den);
2912 bb->aux = new_bb;
2913 new_bb->aux = bb;
2914 new_bb->loop_father = entry_block_map->loop_father;
2915 }
2916
2917 last = last_basic_block_for_fn (cfun);
2918
2919 /* Now that we've duplicated the blocks, duplicate their edges. */
2920 basic_block abnormal_goto_dest = NULL;
2921 if (id->call_stmt
2922 && stmt_can_make_abnormal_goto (id->call_stmt))
2923 {
2924 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2925
2926 bb = gimple_bb (id->call_stmt);
2927 gsi_next (&gsi);
2928 if (gsi_end_p (gsi))
2929 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2930 }
2931 FOR_ALL_BB_FN (bb, cfun_to_copy)
2932 if (!id->blocks_to_copy
2933 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2934 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2935 abnormal_goto_dest, id);
2936
2937 if (id->eh_landing_pad_dest)
2938 {
2939 add_clobbers_to_eh_landing_pad (id);
2940 id->eh_landing_pad_dest = NULL;
2941 }
2942
2943 if (new_entry)
2944 {
2945 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2946 EDGE_FALLTHRU);
2947 e->probability = profile_probability::always ();
2948 }
2949
2950 /* Duplicate the loop tree, if available and wanted. */
2951 if (loops_for_fn (src_cfun) != NULL
2952 && current_loops != NULL)
2953 {
2954 copy_loops (id, entry_block_map->loop_father,
2955 get_loop (src_cfun, 0));
2956 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2957 loops_state_set (LOOPS_NEED_FIXUP);
2958 }
2959
2960 /* If the loop tree in the source function needed fixup, mark the
2961 destination loop tree for fixup, too. */
2962 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2963 loops_state_set (LOOPS_NEED_FIXUP);
2964
2965 if (gimple_in_ssa_p (cfun))
2966 FOR_ALL_BB_FN (bb, cfun_to_copy)
2967 if (!id->blocks_to_copy
2968 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2969 copy_phis_for_bb (bb, id);
2970
2971 FOR_ALL_BB_FN (bb, cfun_to_copy)
2972 if (bb->aux)
2973 {
2974 if (need_debug_cleanup
2975 && bb->index != ENTRY_BLOCK
2976 && bb->index != EXIT_BLOCK)
2977 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2978 /* Update call edge destinations. This cannot be done before loop
2979 info is updated, because we may split basic blocks. */
2980 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2981 && bb->index != ENTRY_BLOCK
2982 && bb->index != EXIT_BLOCK)
2983 redirect_all_calls (id, (basic_block)bb->aux);
2984 ((basic_block)bb->aux)->aux = NULL;
2985 bb->aux = NULL;
2986 }
2987
2988 /* Zero out AUX fields of newly created block during EH edge
2989 insertion. */
2990 for (; last < last_basic_block_for_fn (cfun); last++)
2991 {
2992 if (need_debug_cleanup)
2993 maybe_move_debug_stmts_to_successors (id,
2994 BASIC_BLOCK_FOR_FN (cfun, last));
2995 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2996 /* Update call edge destinations. This cannot be done before loop
2997 info is updated, because we may split basic blocks. */
2998 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2999 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3000 }
3001 entry_block_map->aux = NULL;
3002 exit_block_map->aux = NULL;
3003
3004 if (id->eh_map)
3005 {
3006 delete id->eh_map;
3007 id->eh_map = NULL;
3008 }
3009 if (id->dependence_map)
3010 {
3011 delete id->dependence_map;
3012 id->dependence_map = NULL;
3013 }
3014
3015 return new_fndecl;
3016 }
3017
3018 /* Copy the debug STMT using ID. We deal with these statements in a
3019 special way: if any variable in their VALUE expression wasn't
3020 remapped yet, we won't remap it, because that would get decl uids
3021 out of sync, causing codegen differences between -g and -g0. If
3022 this arises, we drop the VALUE expression altogether. */
3023
3024 static void
3025 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3026 {
3027 tree t, *n;
3028 struct walk_stmt_info wi;
3029
3030 if (tree block = gimple_block (stmt))
3031 {
3032 n = id->decl_map->get (block);
3033 gimple_set_block (stmt, n ? *n : id->block);
3034 }
3035
3036 if (gimple_debug_nonbind_marker_p (stmt))
3037 return;
3038
3039 /* Remap all the operands in COPY. */
3040 memset (&wi, 0, sizeof (wi));
3041 wi.info = id;
3042
3043 processing_debug_stmt = 1;
3044
3045 if (gimple_debug_source_bind_p (stmt))
3046 t = gimple_debug_source_bind_get_var (stmt);
3047 else if (gimple_debug_bind_p (stmt))
3048 t = gimple_debug_bind_get_var (stmt);
3049 else
3050 gcc_unreachable ();
3051
3052 if (TREE_CODE (t) == PARM_DECL && id->debug_map
3053 && (n = id->debug_map->get (t)))
3054 {
3055 gcc_assert (VAR_P (*n));
3056 t = *n;
3057 }
3058 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3059 /* T is a non-localized variable. */;
3060 else
3061 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3062
3063 if (gimple_debug_bind_p (stmt))
3064 {
3065 gimple_debug_bind_set_var (stmt, t);
3066
3067 if (gimple_debug_bind_has_value_p (stmt))
3068 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3069 remap_gimple_op_r, &wi, NULL);
3070
3071 /* Punt if any decl couldn't be remapped. */
3072 if (processing_debug_stmt < 0)
3073 gimple_debug_bind_reset_value (stmt);
3074 }
3075 else if (gimple_debug_source_bind_p (stmt))
3076 {
3077 gimple_debug_source_bind_set_var (stmt, t);
3078 /* When inlining and source bind refers to one of the optimized
3079 away parameters, change the source bind into normal debug bind
3080 referring to the corresponding DEBUG_EXPR_DECL that should have
3081 been bound before the call stmt. */
3082 t = gimple_debug_source_bind_get_value (stmt);
3083 if (t != NULL_TREE
3084 && TREE_CODE (t) == PARM_DECL
3085 && id->call_stmt)
3086 {
3087 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3088 unsigned int i;
3089 if (debug_args != NULL)
3090 {
3091 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3092 if ((**debug_args)[i] == DECL_ORIGIN (t)
3093 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3094 {
3095 t = (**debug_args)[i + 1];
3096 stmt->subcode = GIMPLE_DEBUG_BIND;
3097 gimple_debug_bind_set_value (stmt, t);
3098 break;
3099 }
3100 }
3101 }
3102 if (gimple_debug_source_bind_p (stmt))
3103 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3104 remap_gimple_op_r, &wi, NULL);
3105 }
3106
3107 processing_debug_stmt = 0;
3108
3109 update_stmt (stmt);
3110 }
3111
3112 /* Process deferred debug stmts. In order to give values better odds
3113 of being successfully remapped, we delay the processing of debug
3114 stmts until all other stmts that might require remapping are
3115 processed. */
3116
3117 static void
3118 copy_debug_stmts (copy_body_data *id)
3119 {
3120 size_t i;
3121 gdebug *stmt;
3122
3123 if (!id->debug_stmts.exists ())
3124 return;
3125
3126 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3127 copy_debug_stmt (stmt, id);
3128
3129 id->debug_stmts.release ();
3130 }
3131
3132 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3133 another function. */
3134
3135 static tree
3136 copy_tree_body (copy_body_data *id)
3137 {
3138 tree fndecl = id->src_fn;
3139 tree body = DECL_SAVED_TREE (fndecl);
3140
3141 walk_tree (&body, copy_tree_body_r, id, NULL);
3142
3143 return body;
3144 }
3145
3146 /* Make a copy of the body of FN so that it can be inserted inline in
3147 another function. */
3148
3149 static tree
3150 copy_body (copy_body_data *id,
3151 basic_block entry_block_map, basic_block exit_block_map,
3152 basic_block new_entry)
3153 {
3154 tree fndecl = id->src_fn;
3155 tree body;
3156
3157 /* If this body has a CFG, walk CFG and copy. */
3158 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3159 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3160 new_entry);
3161 copy_debug_stmts (id);
3162
3163 return body;
3164 }
3165
3166 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3167 defined in function FN, or of a data member thereof. */
3168
3169 static bool
3170 self_inlining_addr_expr (tree value, tree fn)
3171 {
3172 tree var;
3173
3174 if (TREE_CODE (value) != ADDR_EXPR)
3175 return false;
3176
3177 var = get_base_address (TREE_OPERAND (value, 0));
3178
3179 return var && auto_var_in_fn_p (var, fn);
3180 }
3181
3182 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3183 lexical block and line number information from base_stmt, if given,
3184 or from the last stmt of the block otherwise. */
3185
3186 static gimple *
3187 insert_init_debug_bind (copy_body_data *id,
3188 basic_block bb, tree var, tree value,
3189 gimple *base_stmt)
3190 {
3191 gimple *note;
3192 gimple_stmt_iterator gsi;
3193 tree tracked_var;
3194
3195 if (!gimple_in_ssa_p (id->src_cfun))
3196 return NULL;
3197
3198 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3199 return NULL;
3200
3201 tracked_var = target_for_debug_bind (var);
3202 if (!tracked_var)
3203 return NULL;
3204
3205 if (bb)
3206 {
3207 gsi = gsi_last_bb (bb);
3208 if (!base_stmt && !gsi_end_p (gsi))
3209 base_stmt = gsi_stmt (gsi);
3210 }
3211
3212 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3213
3214 if (bb)
3215 {
3216 if (!gsi_end_p (gsi))
3217 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3218 else
3219 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3220 }
3221
3222 return note;
3223 }
3224
3225 static void
3226 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3227 {
3228 /* If VAR represents a zero-sized variable, it's possible that the
3229 assignment statement may result in no gimple statements. */
3230 if (init_stmt)
3231 {
3232 gimple_stmt_iterator si = gsi_last_bb (bb);
3233
3234 /* We can end up with init statements that store to a non-register
3235 from a rhs with a conversion. Handle that here by forcing the
3236 rhs into a temporary. gimple_regimplify_operands is not
3237 prepared to do this for us. */
3238 if (!is_gimple_debug (init_stmt)
3239 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3240 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3241 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3242 {
3243 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3244 gimple_expr_type (init_stmt),
3245 gimple_assign_rhs1 (init_stmt));
3246 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3247 GSI_NEW_STMT);
3248 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3249 gimple_assign_set_rhs1 (init_stmt, rhs);
3250 }
3251 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3252 gimple_regimplify_operands (init_stmt, &si);
3253
3254 if (!is_gimple_debug (init_stmt))
3255 {
3256 tree def = gimple_assign_lhs (init_stmt);
3257 insert_init_debug_bind (id, bb, def, def, init_stmt);
3258 }
3259 }
3260 }
3261
3262 /* Initialize parameter P with VALUE. If needed, produce init statement
3263 at the end of BB. When BB is NULL, we return init statement to be
3264 output later. */
3265 static gimple *
3266 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3267 basic_block bb, tree *vars)
3268 {
3269 gimple *init_stmt = NULL;
3270 tree var;
3271 tree rhs = value;
3272 tree def = (gimple_in_ssa_p (cfun)
3273 ? ssa_default_def (id->src_cfun, p) : NULL);
3274
3275 if (value
3276 && value != error_mark_node
3277 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3278 {
3279 /* If we can match up types by promotion/demotion do so. */
3280 if (fold_convertible_p (TREE_TYPE (p), value))
3281 rhs = fold_convert (TREE_TYPE (p), value);
3282 else
3283 {
3284 /* ??? For valid programs we should not end up here.
3285 Still if we end up with truly mismatched types here, fall back
3286 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3287 GIMPLE to the following passes. */
3288 if (!is_gimple_reg_type (TREE_TYPE (value))
3289 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3290 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3291 else
3292 rhs = build_zero_cst (TREE_TYPE (p));
3293 }
3294 }
3295
3296 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3297 here since the type of this decl must be visible to the calling
3298 function. */
3299 var = copy_decl_to_var (p, id);
3300
3301 /* Declare this new variable. */
3302 DECL_CHAIN (var) = *vars;
3303 *vars = var;
3304
3305 /* Make gimplifier happy about this variable. */
3306 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3307
3308 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3309 we would not need to create a new variable here at all, if it
3310 weren't for debug info. Still, we can just use the argument
3311 value. */
3312 if (TREE_READONLY (p)
3313 && !TREE_ADDRESSABLE (p)
3314 && value && !TREE_SIDE_EFFECTS (value)
3315 && !def)
3316 {
3317 /* We may produce non-gimple trees by adding NOPs or introduce
3318 invalid sharing when operand is not really constant.
3319 It is not big deal to prohibit constant propagation here as
3320 we will constant propagate in DOM1 pass anyway. */
3321 if (is_gimple_min_invariant (value)
3322 && useless_type_conversion_p (TREE_TYPE (p),
3323 TREE_TYPE (value))
3324 /* We have to be very careful about ADDR_EXPR. Make sure
3325 the base variable isn't a local variable of the inlined
3326 function, e.g., when doing recursive inlining, direct or
3327 mutually-recursive or whatever, which is why we don't
3328 just test whether fn == current_function_decl. */
3329 && ! self_inlining_addr_expr (value, fn))
3330 {
3331 insert_decl_map (id, p, value);
3332 insert_debug_decl_map (id, p, var);
3333 return insert_init_debug_bind (id, bb, var, value, NULL);
3334 }
3335 }
3336
3337 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3338 that way, when the PARM_DECL is encountered, it will be
3339 automatically replaced by the VAR_DECL. */
3340 insert_decl_map (id, p, var);
3341
3342 /* Even if P was TREE_READONLY, the new VAR should not be.
3343 In the original code, we would have constructed a
3344 temporary, and then the function body would have never
3345 changed the value of P. However, now, we will be
3346 constructing VAR directly. The constructor body may
3347 change its value multiple times as it is being
3348 constructed. Therefore, it must not be TREE_READONLY;
3349 the back-end assumes that TREE_READONLY variable is
3350 assigned to only once. */
3351 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3352 TREE_READONLY (var) = 0;
3353
3354 /* If there is no setup required and we are in SSA, take the easy route
3355 replacing all SSA names representing the function parameter by the
3356 SSA name passed to function.
3357
3358 We need to construct map for the variable anyway as it might be used
3359 in different SSA names when parameter is set in function.
3360
3361 Do replacement at -O0 for const arguments replaced by constant.
3362 This is important for builtin_constant_p and other construct requiring
3363 constant argument to be visible in inlined function body. */
3364 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3365 && (optimize
3366 || (TREE_READONLY (p)
3367 && is_gimple_min_invariant (rhs)))
3368 && (TREE_CODE (rhs) == SSA_NAME
3369 || is_gimple_min_invariant (rhs))
3370 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3371 {
3372 insert_decl_map (id, def, rhs);
3373 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3374 }
3375
3376 /* If the value of argument is never used, don't care about initializing
3377 it. */
3378 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3379 {
3380 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3381 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3382 }
3383
3384 /* Initialize this VAR_DECL from the equivalent argument. Convert
3385 the argument to the proper type in case it was promoted. */
3386 if (value)
3387 {
3388 if (rhs == error_mark_node)
3389 {
3390 insert_decl_map (id, p, var);
3391 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3392 }
3393
3394 STRIP_USELESS_TYPE_CONVERSION (rhs);
3395
3396 /* If we are in SSA form properly remap the default definition
3397 or assign to a dummy SSA name if the parameter is unused and
3398 we are not optimizing. */
3399 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3400 {
3401 if (def)
3402 {
3403 def = remap_ssa_name (def, id);
3404 init_stmt = gimple_build_assign (def, rhs);
3405 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3406 set_ssa_default_def (cfun, var, NULL);
3407 }
3408 else if (!optimize)
3409 {
3410 def = make_ssa_name (var);
3411 init_stmt = gimple_build_assign (def, rhs);
3412 }
3413 }
3414 else
3415 init_stmt = gimple_build_assign (var, rhs);
3416
3417 if (bb && init_stmt)
3418 insert_init_stmt (id, bb, init_stmt);
3419 }
3420 return init_stmt;
3421 }
3422
3423 /* Generate code to initialize the parameters of the function at the
3424 top of the stack in ID from the GIMPLE_CALL STMT. */
3425
3426 static void
3427 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3428 tree fn, basic_block bb)
3429 {
3430 tree parms;
3431 size_t i;
3432 tree p;
3433 tree vars = NULL_TREE;
3434 tree static_chain = gimple_call_chain (stmt);
3435
3436 /* Figure out what the parameters are. */
3437 parms = DECL_ARGUMENTS (fn);
3438
3439 /* Loop through the parameter declarations, replacing each with an
3440 equivalent VAR_DECL, appropriately initialized. */
3441 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3442 {
3443 tree val;
3444 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3445 setup_one_parameter (id, p, val, fn, bb, &vars);
3446 }
3447 /* After remapping parameters remap their types. This has to be done
3448 in a second loop over all parameters to appropriately remap
3449 variable sized arrays when the size is specified in a
3450 parameter following the array. */
3451 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3452 {
3453 tree *varp = id->decl_map->get (p);
3454 if (varp && VAR_P (*varp))
3455 {
3456 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3457 ? ssa_default_def (id->src_cfun, p) : NULL);
3458 tree var = *varp;
3459 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3460 /* Also remap the default definition if it was remapped
3461 to the default definition of the parameter replacement
3462 by the parameter setup. */
3463 if (def)
3464 {
3465 tree *defp = id->decl_map->get (def);
3466 if (defp
3467 && TREE_CODE (*defp) == SSA_NAME
3468 && SSA_NAME_VAR (*defp) == var)
3469 TREE_TYPE (*defp) = TREE_TYPE (var);
3470 }
3471 }
3472 }
3473
3474 /* Initialize the static chain. */
3475 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3476 gcc_assert (fn != current_function_decl);
3477 if (p)
3478 {
3479 /* No static chain? Seems like a bug in tree-nested.c. */
3480 gcc_assert (static_chain);
3481
3482 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3483 }
3484
3485 declare_inline_vars (id->block, vars);
3486 }
3487
3488
3489 /* Declare a return variable to replace the RESULT_DECL for the
3490 function we are calling. An appropriate DECL_STMT is returned.
3491 The USE_STMT is filled to contain a use of the declaration to
3492 indicate the return value of the function.
3493
3494 RETURN_SLOT, if non-null is place where to store the result. It
3495 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3496 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3497
3498 The return value is a (possibly null) value that holds the result
3499 as seen by the caller. */
3500
3501 static tree
3502 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3503 basic_block entry_bb)
3504 {
3505 tree callee = id->src_fn;
3506 tree result = DECL_RESULT (callee);
3507 tree callee_type = TREE_TYPE (result);
3508 tree caller_type;
3509 tree var, use;
3510
3511 /* Handle type-mismatches in the function declaration return type
3512 vs. the call expression. */
3513 if (modify_dest)
3514 caller_type = TREE_TYPE (modify_dest);
3515 else
3516 caller_type = TREE_TYPE (TREE_TYPE (callee));
3517
3518 /* We don't need to do anything for functions that don't return anything. */
3519 if (VOID_TYPE_P (callee_type))
3520 return NULL_TREE;
3521
3522 /* If there was a return slot, then the return value is the
3523 dereferenced address of that object. */
3524 if (return_slot)
3525 {
3526 /* The front end shouldn't have used both return_slot and
3527 a modify expression. */
3528 gcc_assert (!modify_dest);
3529 if (DECL_BY_REFERENCE (result))
3530 {
3531 tree return_slot_addr = build_fold_addr_expr (return_slot);
3532 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3533
3534 /* We are going to construct *&return_slot and we can't do that
3535 for variables believed to be not addressable.
3536
3537 FIXME: This check possibly can match, because values returned
3538 via return slot optimization are not believed to have address
3539 taken by alias analysis. */
3540 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3541 var = return_slot_addr;
3542 }
3543 else
3544 {
3545 var = return_slot;
3546 gcc_assert (TREE_CODE (var) != SSA_NAME);
3547 if (TREE_ADDRESSABLE (result))
3548 mark_addressable (var);
3549 }
3550 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3551 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3552 && !DECL_GIMPLE_REG_P (result)
3553 && DECL_P (var))
3554 DECL_GIMPLE_REG_P (var) = 0;
3555 use = NULL;
3556 goto done;
3557 }
3558
3559 /* All types requiring non-trivial constructors should have been handled. */
3560 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3561
3562 /* Attempt to avoid creating a new temporary variable. */
3563 if (modify_dest
3564 && TREE_CODE (modify_dest) != SSA_NAME)
3565 {
3566 bool use_it = false;
3567
3568 /* We can't use MODIFY_DEST if there's type promotion involved. */
3569 if (!useless_type_conversion_p (callee_type, caller_type))
3570 use_it = false;
3571
3572 /* ??? If we're assigning to a variable sized type, then we must
3573 reuse the destination variable, because we've no good way to
3574 create variable sized temporaries at this point. */
3575 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3576 use_it = true;
3577
3578 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3579 reuse it as the result of the call directly. Don't do this if
3580 it would promote MODIFY_DEST to addressable. */
3581 else if (TREE_ADDRESSABLE (result))
3582 use_it = false;
3583 else
3584 {
3585 tree base_m = get_base_address (modify_dest);
3586
3587 /* If the base isn't a decl, then it's a pointer, and we don't
3588 know where that's going to go. */
3589 if (!DECL_P (base_m))
3590 use_it = false;
3591 else if (is_global_var (base_m))
3592 use_it = false;
3593 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3594 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3595 && !DECL_GIMPLE_REG_P (result)
3596 && DECL_GIMPLE_REG_P (base_m))
3597 use_it = false;
3598 else if (!TREE_ADDRESSABLE (base_m))
3599 use_it = true;
3600 }
3601
3602 if (use_it)
3603 {
3604 var = modify_dest;
3605 use = NULL;
3606 goto done;
3607 }
3608 }
3609
3610 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3611
3612 var = copy_result_decl_to_var (result, id);
3613 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3614
3615 /* Do not have the rest of GCC warn about this variable as it should
3616 not be visible to the user. */
3617 TREE_NO_WARNING (var) = 1;
3618
3619 declare_inline_vars (id->block, var);
3620
3621 /* Build the use expr. If the return type of the function was
3622 promoted, convert it back to the expected type. */
3623 use = var;
3624 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3625 {
3626 /* If we can match up types by promotion/demotion do so. */
3627 if (fold_convertible_p (caller_type, var))
3628 use = fold_convert (caller_type, var);
3629 else
3630 {
3631 /* ??? For valid programs we should not end up here.
3632 Still if we end up with truly mismatched types here, fall back
3633 to using a MEM_REF to not leak invalid GIMPLE to the following
3634 passes. */
3635 /* Prevent var from being written into SSA form. */
3636 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3637 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3638 DECL_GIMPLE_REG_P (var) = false;
3639 else if (is_gimple_reg_type (TREE_TYPE (var)))
3640 TREE_ADDRESSABLE (var) = true;
3641 use = fold_build2 (MEM_REF, caller_type,
3642 build_fold_addr_expr (var),
3643 build_int_cst (ptr_type_node, 0));
3644 }
3645 }
3646
3647 STRIP_USELESS_TYPE_CONVERSION (use);
3648
3649 if (DECL_BY_REFERENCE (result))
3650 {
3651 TREE_ADDRESSABLE (var) = 1;
3652 var = build_fold_addr_expr (var);
3653 }
3654
3655 done:
3656 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3657 way, when the RESULT_DECL is encountered, it will be
3658 automatically replaced by the VAR_DECL.
3659
3660 When returning by reference, ensure that RESULT_DECL remaps to
3661 gimple_val. */
3662 if (DECL_BY_REFERENCE (result)
3663 && !is_gimple_val (var))
3664 {
3665 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3666 insert_decl_map (id, result, temp);
3667 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3668 it's default_def SSA_NAME. */
3669 if (gimple_in_ssa_p (id->src_cfun)
3670 && is_gimple_reg (result))
3671 {
3672 temp = make_ssa_name (temp);
3673 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3674 }
3675 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3676 }
3677 else
3678 insert_decl_map (id, result, var);
3679
3680 /* Remember this so we can ignore it in remap_decls. */
3681 id->retvar = var;
3682 return use;
3683 }
3684
3685 /* Determine if the function can be copied. If so return NULL. If
3686 not return a string describng the reason for failure. */
3687
3688 const char *
3689 copy_forbidden (struct function *fun)
3690 {
3691 const char *reason = fun->cannot_be_copied_reason;
3692
3693 /* Only examine the function once. */
3694 if (fun->cannot_be_copied_set)
3695 return reason;
3696
3697 /* We cannot copy a function that receives a non-local goto
3698 because we cannot remap the destination label used in the
3699 function that is performing the non-local goto. */
3700 /* ??? Actually, this should be possible, if we work at it.
3701 No doubt there's just a handful of places that simply
3702 assume it doesn't happen and don't substitute properly. */
3703 if (fun->has_nonlocal_label)
3704 {
3705 reason = G_("function %q+F can never be copied "
3706 "because it receives a non-local goto");
3707 goto fail;
3708 }
3709
3710 if (fun->has_forced_label_in_static)
3711 {
3712 reason = G_("function %q+F can never be copied because it saves "
3713 "address of local label in a static variable");
3714 goto fail;
3715 }
3716
3717 fail:
3718 fun->cannot_be_copied_reason = reason;
3719 fun->cannot_be_copied_set = true;
3720 return reason;
3721 }
3722
3723
3724 static const char *inline_forbidden_reason;
3725
3726 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3727 iff a function cannot be inlined. Also sets the reason why. */
3728
3729 static tree
3730 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3731 struct walk_stmt_info *wip)
3732 {
3733 tree fn = (tree) wip->info;
3734 tree t;
3735 gimple *stmt = gsi_stmt (*gsi);
3736
3737 switch (gimple_code (stmt))
3738 {
3739 case GIMPLE_CALL:
3740 /* Refuse to inline alloca call unless user explicitly forced so as
3741 this may change program's memory overhead drastically when the
3742 function using alloca is called in loop. In GCC present in
3743 SPEC2000 inlining into schedule_block cause it to require 2GB of
3744 RAM instead of 256MB. Don't do so for alloca calls emitted for
3745 VLA objects as those can't cause unbounded growth (they're always
3746 wrapped inside stack_save/stack_restore regions. */
3747 if (gimple_maybe_alloca_call_p (stmt)
3748 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3749 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3750 {
3751 inline_forbidden_reason
3752 = G_("function %q+F can never be inlined because it uses "
3753 "alloca (override using the always_inline attribute)");
3754 *handled_ops_p = true;
3755 return fn;
3756 }
3757
3758 t = gimple_call_fndecl (stmt);
3759 if (t == NULL_TREE)
3760 break;
3761
3762 /* We cannot inline functions that call setjmp. */
3763 if (setjmp_call_p (t))
3764 {
3765 inline_forbidden_reason
3766 = G_("function %q+F can never be inlined because it uses setjmp");
3767 *handled_ops_p = true;
3768 return t;
3769 }
3770
3771 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3772 switch (DECL_FUNCTION_CODE (t))
3773 {
3774 /* We cannot inline functions that take a variable number of
3775 arguments. */
3776 case BUILT_IN_VA_START:
3777 case BUILT_IN_NEXT_ARG:
3778 case BUILT_IN_VA_END:
3779 inline_forbidden_reason
3780 = G_("function %q+F can never be inlined because it "
3781 "uses variable argument lists");
3782 *handled_ops_p = true;
3783 return t;
3784
3785 case BUILT_IN_LONGJMP:
3786 /* We can't inline functions that call __builtin_longjmp at
3787 all. The non-local goto machinery really requires the
3788 destination be in a different function. If we allow the
3789 function calling __builtin_longjmp to be inlined into the
3790 function calling __builtin_setjmp, Things will Go Awry. */
3791 inline_forbidden_reason
3792 = G_("function %q+F can never be inlined because "
3793 "it uses setjmp-longjmp exception handling");
3794 *handled_ops_p = true;
3795 return t;
3796
3797 case BUILT_IN_NONLOCAL_GOTO:
3798 /* Similarly. */
3799 inline_forbidden_reason
3800 = G_("function %q+F can never be inlined because "
3801 "it uses non-local goto");
3802 *handled_ops_p = true;
3803 return t;
3804
3805 case BUILT_IN_RETURN:
3806 case BUILT_IN_APPLY_ARGS:
3807 /* If a __builtin_apply_args caller would be inlined,
3808 it would be saving arguments of the function it has
3809 been inlined into. Similarly __builtin_return would
3810 return from the function the inline has been inlined into. */
3811 inline_forbidden_reason
3812 = G_("function %q+F can never be inlined because "
3813 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3814 *handled_ops_p = true;
3815 return t;
3816
3817 default:
3818 break;
3819 }
3820 break;
3821
3822 case GIMPLE_GOTO:
3823 t = gimple_goto_dest (stmt);
3824
3825 /* We will not inline a function which uses computed goto. The
3826 addresses of its local labels, which may be tucked into
3827 global storage, are of course not constant across
3828 instantiations, which causes unexpected behavior. */
3829 if (TREE_CODE (t) != LABEL_DECL)
3830 {
3831 inline_forbidden_reason
3832 = G_("function %q+F can never be inlined "
3833 "because it contains a computed goto");
3834 *handled_ops_p = true;
3835 return t;
3836 }
3837 break;
3838
3839 default:
3840 break;
3841 }
3842
3843 *handled_ops_p = false;
3844 return NULL_TREE;
3845 }
3846
3847 /* Return true if FNDECL is a function that cannot be inlined into
3848 another one. */
3849
3850 static bool
3851 inline_forbidden_p (tree fndecl)
3852 {
3853 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3854 struct walk_stmt_info wi;
3855 basic_block bb;
3856 bool forbidden_p = false;
3857
3858 /* First check for shared reasons not to copy the code. */
3859 inline_forbidden_reason = copy_forbidden (fun);
3860 if (inline_forbidden_reason != NULL)
3861 return true;
3862
3863 /* Next, walk the statements of the function looking for
3864 constraucts we can't handle, or are non-optimal for inlining. */
3865 hash_set<tree> visited_nodes;
3866 memset (&wi, 0, sizeof (wi));
3867 wi.info = (void *) fndecl;
3868 wi.pset = &visited_nodes;
3869
3870 FOR_EACH_BB_FN (bb, fun)
3871 {
3872 gimple *ret;
3873 gimple_seq seq = bb_seq (bb);
3874 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3875 forbidden_p = (ret != NULL);
3876 if (forbidden_p)
3877 break;
3878 }
3879
3880 return forbidden_p;
3881 }
3882 \f
3883 /* Return false if the function FNDECL cannot be inlined on account of its
3884 attributes, true otherwise. */
3885 static bool
3886 function_attribute_inlinable_p (const_tree fndecl)
3887 {
3888 if (targetm.attribute_table)
3889 {
3890 const_tree a;
3891
3892 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3893 {
3894 const_tree name = TREE_PURPOSE (a);
3895 int i;
3896
3897 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3898 if (is_attribute_p (targetm.attribute_table[i].name, name))
3899 return targetm.function_attribute_inlinable_p (fndecl);
3900 }
3901 }
3902
3903 return true;
3904 }
3905
3906 /* Returns nonzero if FN is a function that does not have any
3907 fundamental inline blocking properties. */
3908
3909 bool
3910 tree_inlinable_function_p (tree fn)
3911 {
3912 bool inlinable = true;
3913 bool do_warning;
3914 tree always_inline;
3915
3916 /* If we've already decided this function shouldn't be inlined,
3917 there's no need to check again. */
3918 if (DECL_UNINLINABLE (fn))
3919 return false;
3920
3921 /* We only warn for functions declared `inline' by the user. */
3922 do_warning = (warn_inline
3923 && DECL_DECLARED_INLINE_P (fn)
3924 && !DECL_NO_INLINE_WARNING_P (fn)
3925 && !DECL_IN_SYSTEM_HEADER (fn));
3926
3927 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3928
3929 if (flag_no_inline
3930 && always_inline == NULL)
3931 {
3932 if (do_warning)
3933 warning (OPT_Winline, "function %q+F can never be inlined because it "
3934 "is suppressed using %<-fno-inline%>", fn);
3935 inlinable = false;
3936 }
3937
3938 else if (!function_attribute_inlinable_p (fn))
3939 {
3940 if (do_warning)
3941 warning (OPT_Winline, "function %q+F can never be inlined because it "
3942 "uses attributes conflicting with inlining", fn);
3943 inlinable = false;
3944 }
3945
3946 else if (inline_forbidden_p (fn))
3947 {
3948 /* See if we should warn about uninlinable functions. Previously,
3949 some of these warnings would be issued while trying to expand
3950 the function inline, but that would cause multiple warnings
3951 about functions that would for example call alloca. But since
3952 this a property of the function, just one warning is enough.
3953 As a bonus we can now give more details about the reason why a
3954 function is not inlinable. */
3955 if (always_inline)
3956 error (inline_forbidden_reason, fn);
3957 else if (do_warning)
3958 warning (OPT_Winline, inline_forbidden_reason, fn);
3959
3960 inlinable = false;
3961 }
3962
3963 /* Squirrel away the result so that we don't have to check again. */
3964 DECL_UNINLINABLE (fn) = !inlinable;
3965
3966 return inlinable;
3967 }
3968
3969 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3970 word size and take possible memcpy call into account and return
3971 cost based on whether optimizing for size or speed according to SPEED_P. */
3972
3973 int
3974 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3975 {
3976 HOST_WIDE_INT size;
3977
3978 gcc_assert (!VOID_TYPE_P (type));
3979
3980 if (TREE_CODE (type) == VECTOR_TYPE)
3981 {
3982 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
3983 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
3984 int orig_mode_size
3985 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
3986 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
3987 return ((orig_mode_size + simd_mode_size - 1)
3988 / simd_mode_size);
3989 }
3990
3991 size = int_size_in_bytes (type);
3992
3993 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3994 /* Cost of a memcpy call, 3 arguments and the call. */
3995 return 4;
3996 else
3997 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3998 }
3999
4000 /* Returns cost of operation CODE, according to WEIGHTS */
4001
4002 static int
4003 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4004 tree op1 ATTRIBUTE_UNUSED, tree op2)
4005 {
4006 switch (code)
4007 {
4008 /* These are "free" conversions, or their presumed cost
4009 is folded into other operations. */
4010 case RANGE_EXPR:
4011 CASE_CONVERT:
4012 case COMPLEX_EXPR:
4013 case PAREN_EXPR:
4014 case VIEW_CONVERT_EXPR:
4015 return 0;
4016
4017 /* Assign cost of 1 to usual operations.
4018 ??? We may consider mapping RTL costs to this. */
4019 case COND_EXPR:
4020 case VEC_COND_EXPR:
4021 case VEC_PERM_EXPR:
4022
4023 case PLUS_EXPR:
4024 case POINTER_PLUS_EXPR:
4025 case POINTER_DIFF_EXPR:
4026 case MINUS_EXPR:
4027 case MULT_EXPR:
4028 case MULT_HIGHPART_EXPR:
4029
4030 case ADDR_SPACE_CONVERT_EXPR:
4031 case FIXED_CONVERT_EXPR:
4032 case FIX_TRUNC_EXPR:
4033
4034 case NEGATE_EXPR:
4035 case FLOAT_EXPR:
4036 case MIN_EXPR:
4037 case MAX_EXPR:
4038 case ABS_EXPR:
4039 case ABSU_EXPR:
4040
4041 case LSHIFT_EXPR:
4042 case RSHIFT_EXPR:
4043 case LROTATE_EXPR:
4044 case RROTATE_EXPR:
4045
4046 case BIT_IOR_EXPR:
4047 case BIT_XOR_EXPR:
4048 case BIT_AND_EXPR:
4049 case BIT_NOT_EXPR:
4050
4051 case TRUTH_ANDIF_EXPR:
4052 case TRUTH_ORIF_EXPR:
4053 case TRUTH_AND_EXPR:
4054 case TRUTH_OR_EXPR:
4055 case TRUTH_XOR_EXPR:
4056 case TRUTH_NOT_EXPR:
4057
4058 case LT_EXPR:
4059 case LE_EXPR:
4060 case GT_EXPR:
4061 case GE_EXPR:
4062 case EQ_EXPR:
4063 case NE_EXPR:
4064 case ORDERED_EXPR:
4065 case UNORDERED_EXPR:
4066
4067 case UNLT_EXPR:
4068 case UNLE_EXPR:
4069 case UNGT_EXPR:
4070 case UNGE_EXPR:
4071 case UNEQ_EXPR:
4072 case LTGT_EXPR:
4073
4074 case CONJ_EXPR:
4075
4076 case PREDECREMENT_EXPR:
4077 case PREINCREMENT_EXPR:
4078 case POSTDECREMENT_EXPR:
4079 case POSTINCREMENT_EXPR:
4080
4081 case REALIGN_LOAD_EXPR:
4082
4083 case WIDEN_SUM_EXPR:
4084 case WIDEN_MULT_EXPR:
4085 case DOT_PROD_EXPR:
4086 case SAD_EXPR:
4087 case WIDEN_MULT_PLUS_EXPR:
4088 case WIDEN_MULT_MINUS_EXPR:
4089 case WIDEN_LSHIFT_EXPR:
4090
4091 case VEC_WIDEN_MULT_HI_EXPR:
4092 case VEC_WIDEN_MULT_LO_EXPR:
4093 case VEC_WIDEN_MULT_EVEN_EXPR:
4094 case VEC_WIDEN_MULT_ODD_EXPR:
4095 case VEC_UNPACK_HI_EXPR:
4096 case VEC_UNPACK_LO_EXPR:
4097 case VEC_UNPACK_FLOAT_HI_EXPR:
4098 case VEC_UNPACK_FLOAT_LO_EXPR:
4099 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4100 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4101 case VEC_PACK_TRUNC_EXPR:
4102 case VEC_PACK_SAT_EXPR:
4103 case VEC_PACK_FIX_TRUNC_EXPR:
4104 case VEC_PACK_FLOAT_EXPR:
4105 case VEC_WIDEN_LSHIFT_HI_EXPR:
4106 case VEC_WIDEN_LSHIFT_LO_EXPR:
4107 case VEC_DUPLICATE_EXPR:
4108 case VEC_SERIES_EXPR:
4109
4110 return 1;
4111
4112 /* Few special cases of expensive operations. This is useful
4113 to avoid inlining on functions having too many of these. */
4114 case TRUNC_DIV_EXPR:
4115 case CEIL_DIV_EXPR:
4116 case FLOOR_DIV_EXPR:
4117 case ROUND_DIV_EXPR:
4118 case EXACT_DIV_EXPR:
4119 case TRUNC_MOD_EXPR:
4120 case CEIL_MOD_EXPR:
4121 case FLOOR_MOD_EXPR:
4122 case ROUND_MOD_EXPR:
4123 case RDIV_EXPR:
4124 if (TREE_CODE (op2) != INTEGER_CST)
4125 return weights->div_mod_cost;
4126 return 1;
4127
4128 /* Bit-field insertion needs several shift and mask operations. */
4129 case BIT_INSERT_EXPR:
4130 return 3;
4131
4132 default:
4133 /* We expect a copy assignment with no operator. */
4134 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4135 return 0;
4136 }
4137 }
4138
4139
4140 /* Estimate number of instructions that will be created by expanding
4141 the statements in the statement sequence STMTS.
4142 WEIGHTS contains weights attributed to various constructs. */
4143
4144 int
4145 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4146 {
4147 int cost;
4148 gimple_stmt_iterator gsi;
4149
4150 cost = 0;
4151 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4152 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4153
4154 return cost;
4155 }
4156
4157
4158 /* Estimate number of instructions that will be created by expanding STMT.
4159 WEIGHTS contains weights attributed to various constructs. */
4160
4161 int
4162 estimate_num_insns (gimple *stmt, eni_weights *weights)
4163 {
4164 unsigned cost, i;
4165 enum gimple_code code = gimple_code (stmt);
4166 tree lhs;
4167 tree rhs;
4168
4169 switch (code)
4170 {
4171 case GIMPLE_ASSIGN:
4172 /* Try to estimate the cost of assignments. We have three cases to
4173 deal with:
4174 1) Simple assignments to registers;
4175 2) Stores to things that must live in memory. This includes
4176 "normal" stores to scalars, but also assignments of large
4177 structures, or constructors of big arrays;
4178
4179 Let us look at the first two cases, assuming we have "a = b + C":
4180 <GIMPLE_ASSIGN <var_decl "a">
4181 <plus_expr <var_decl "b"> <constant C>>
4182 If "a" is a GIMPLE register, the assignment to it is free on almost
4183 any target, because "a" usually ends up in a real register. Hence
4184 the only cost of this expression comes from the PLUS_EXPR, and we
4185 can ignore the GIMPLE_ASSIGN.
4186 If "a" is not a GIMPLE register, the assignment to "a" will most
4187 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4188 of moving something into "a", which we compute using the function
4189 estimate_move_cost. */
4190 if (gimple_clobber_p (stmt))
4191 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4192
4193 lhs = gimple_assign_lhs (stmt);
4194 rhs = gimple_assign_rhs1 (stmt);
4195
4196 cost = 0;
4197
4198 /* Account for the cost of moving to / from memory. */
4199 if (gimple_store_p (stmt))
4200 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4201 if (gimple_assign_load_p (stmt))
4202 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4203
4204 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4205 gimple_assign_rhs1 (stmt),
4206 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4207 == GIMPLE_BINARY_RHS
4208 ? gimple_assign_rhs2 (stmt) : NULL);
4209 break;
4210
4211 case GIMPLE_COND:
4212 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4213 gimple_op (stmt, 0),
4214 gimple_op (stmt, 1));
4215 break;
4216
4217 case GIMPLE_SWITCH:
4218 {
4219 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4220 /* Take into account cost of the switch + guess 2 conditional jumps for
4221 each case label.
4222
4223 TODO: once the switch expansion logic is sufficiently separated, we can
4224 do better job on estimating cost of the switch. */
4225 if (weights->time_based)
4226 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4227 else
4228 cost = gimple_switch_num_labels (switch_stmt) * 2;
4229 }
4230 break;
4231
4232 case GIMPLE_CALL:
4233 {
4234 tree decl;
4235
4236 if (gimple_call_internal_p (stmt))
4237 return 0;
4238 else if ((decl = gimple_call_fndecl (stmt))
4239 && fndecl_built_in_p (decl))
4240 {
4241 /* Do not special case builtins where we see the body.
4242 This just confuse inliner. */
4243 struct cgraph_node *node;
4244 if (!(node = cgraph_node::get (decl))
4245 || node->definition)
4246 ;
4247 /* For buitins that are likely expanded to nothing or
4248 inlined do not account operand costs. */
4249 else if (is_simple_builtin (decl))
4250 return 0;
4251 else if (is_inexpensive_builtin (decl))
4252 return weights->target_builtin_call_cost;
4253 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4254 {
4255 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4256 specialize the cheap expansion we do here.
4257 ??? This asks for a more general solution. */
4258 switch (DECL_FUNCTION_CODE (decl))
4259 {
4260 case BUILT_IN_POW:
4261 case BUILT_IN_POWF:
4262 case BUILT_IN_POWL:
4263 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4264 && (real_equal
4265 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4266 &dconst2)))
4267 return estimate_operator_cost
4268 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4269 gimple_call_arg (stmt, 0));
4270 break;
4271
4272 default:
4273 break;
4274 }
4275 }
4276 }
4277
4278 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4279 if (gimple_call_lhs (stmt))
4280 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4281 weights->time_based);
4282 for (i = 0; i < gimple_call_num_args (stmt); i++)
4283 {
4284 tree arg = gimple_call_arg (stmt, i);
4285 cost += estimate_move_cost (TREE_TYPE (arg),
4286 weights->time_based);
4287 }
4288 break;
4289 }
4290
4291 case GIMPLE_RETURN:
4292 return weights->return_cost;
4293
4294 case GIMPLE_GOTO:
4295 case GIMPLE_LABEL:
4296 case GIMPLE_NOP:
4297 case GIMPLE_PHI:
4298 case GIMPLE_PREDICT:
4299 case GIMPLE_DEBUG:
4300 return 0;
4301
4302 case GIMPLE_ASM:
4303 {
4304 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4305 /* 1000 means infinity. This avoids overflows later
4306 with very long asm statements. */
4307 if (count > 1000)
4308 count = 1000;
4309 /* If this asm is asm inline, count anything as minimum size. */
4310 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4311 count = MIN (1, count);
4312 return MAX (1, count);
4313 }
4314
4315 case GIMPLE_RESX:
4316 /* This is either going to be an external function call with one
4317 argument, or two register copy statements plus a goto. */
4318 return 2;
4319
4320 case GIMPLE_EH_DISPATCH:
4321 /* ??? This is going to turn into a switch statement. Ideally
4322 we'd have a look at the eh region and estimate the number of
4323 edges involved. */
4324 return 10;
4325
4326 case GIMPLE_BIND:
4327 return estimate_num_insns_seq (
4328 gimple_bind_body (as_a <gbind *> (stmt)),
4329 weights);
4330
4331 case GIMPLE_EH_FILTER:
4332 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4333
4334 case GIMPLE_CATCH:
4335 return estimate_num_insns_seq (gimple_catch_handler (
4336 as_a <gcatch *> (stmt)),
4337 weights);
4338
4339 case GIMPLE_TRY:
4340 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4341 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4342
4343 /* OMP directives are generally very expensive. */
4344
4345 case GIMPLE_OMP_RETURN:
4346 case GIMPLE_OMP_SECTIONS_SWITCH:
4347 case GIMPLE_OMP_ATOMIC_STORE:
4348 case GIMPLE_OMP_CONTINUE:
4349 /* ...except these, which are cheap. */
4350 return 0;
4351
4352 case GIMPLE_OMP_ATOMIC_LOAD:
4353 return weights->omp_cost;
4354
4355 case GIMPLE_OMP_FOR:
4356 return (weights->omp_cost
4357 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4358 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4359
4360 case GIMPLE_OMP_PARALLEL:
4361 case GIMPLE_OMP_TASK:
4362 case GIMPLE_OMP_CRITICAL:
4363 case GIMPLE_OMP_MASTER:
4364 case GIMPLE_OMP_TASKGROUP:
4365 case GIMPLE_OMP_ORDERED:
4366 case GIMPLE_OMP_SECTION:
4367 case GIMPLE_OMP_SECTIONS:
4368 case GIMPLE_OMP_SINGLE:
4369 case GIMPLE_OMP_TARGET:
4370 case GIMPLE_OMP_TEAMS:
4371 return (weights->omp_cost
4372 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4373
4374 case GIMPLE_TRANSACTION:
4375 return (weights->tm_cost
4376 + estimate_num_insns_seq (gimple_transaction_body (
4377 as_a <gtransaction *> (stmt)),
4378 weights));
4379
4380 default:
4381 gcc_unreachable ();
4382 }
4383
4384 return cost;
4385 }
4386
4387 /* Estimate number of instructions that will be created by expanding
4388 function FNDECL. WEIGHTS contains weights attributed to various
4389 constructs. */
4390
4391 int
4392 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4393 {
4394 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4395 gimple_stmt_iterator bsi;
4396 basic_block bb;
4397 int n = 0;
4398
4399 gcc_assert (my_function && my_function->cfg);
4400 FOR_EACH_BB_FN (bb, my_function)
4401 {
4402 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4403 n += estimate_num_insns (gsi_stmt (bsi), weights);
4404 }
4405
4406 return n;
4407 }
4408
4409
4410 /* Initializes weights used by estimate_num_insns. */
4411
4412 void
4413 init_inline_once (void)
4414 {
4415 eni_size_weights.call_cost = 1;
4416 eni_size_weights.indirect_call_cost = 3;
4417 eni_size_weights.target_builtin_call_cost = 1;
4418 eni_size_weights.div_mod_cost = 1;
4419 eni_size_weights.omp_cost = 40;
4420 eni_size_weights.tm_cost = 10;
4421 eni_size_weights.time_based = false;
4422 eni_size_weights.return_cost = 1;
4423
4424 /* Estimating time for call is difficult, since we have no idea what the
4425 called function does. In the current uses of eni_time_weights,
4426 underestimating the cost does less harm than overestimating it, so
4427 we choose a rather small value here. */
4428 eni_time_weights.call_cost = 10;
4429 eni_time_weights.indirect_call_cost = 15;
4430 eni_time_weights.target_builtin_call_cost = 1;
4431 eni_time_weights.div_mod_cost = 10;
4432 eni_time_weights.omp_cost = 40;
4433 eni_time_weights.tm_cost = 40;
4434 eni_time_weights.time_based = true;
4435 eni_time_weights.return_cost = 2;
4436 }
4437
4438
4439 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4440
4441 static void
4442 prepend_lexical_block (tree current_block, tree new_block)
4443 {
4444 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4445 BLOCK_SUBBLOCKS (current_block) = new_block;
4446 BLOCK_SUPERCONTEXT (new_block) = current_block;
4447 }
4448
4449 /* Add local variables from CALLEE to CALLER. */
4450
4451 static inline void
4452 add_local_variables (struct function *callee, struct function *caller,
4453 copy_body_data *id)
4454 {
4455 tree var;
4456 unsigned ix;
4457
4458 FOR_EACH_LOCAL_DECL (callee, ix, var)
4459 if (!can_be_nonlocal (var, id))
4460 {
4461 tree new_var = remap_decl (var, id);
4462
4463 /* Remap debug-expressions. */
4464 if (VAR_P (new_var)
4465 && DECL_HAS_DEBUG_EXPR_P (var)
4466 && new_var != var)
4467 {
4468 tree tem = DECL_DEBUG_EXPR (var);
4469 bool old_regimplify = id->regimplify;
4470 id->remapping_type_depth++;
4471 walk_tree (&tem, copy_tree_body_r, id, NULL);
4472 id->remapping_type_depth--;
4473 id->regimplify = old_regimplify;
4474 SET_DECL_DEBUG_EXPR (new_var, tem);
4475 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4476 }
4477 add_local_decl (caller, new_var);
4478 }
4479 }
4480
4481 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4482 have brought in or introduced any debug stmts for SRCVAR. */
4483
4484 static inline void
4485 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4486 {
4487 tree *remappedvarp = id->decl_map->get (srcvar);
4488
4489 if (!remappedvarp)
4490 return;
4491
4492 if (!VAR_P (*remappedvarp))
4493 return;
4494
4495 if (*remappedvarp == id->retvar)
4496 return;
4497
4498 tree tvar = target_for_debug_bind (*remappedvarp);
4499 if (!tvar)
4500 return;
4501
4502 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4503 id->call_stmt);
4504 gimple_seq_add_stmt (bindings, stmt);
4505 }
4506
4507 /* For each inlined variable for which we may have debug bind stmts,
4508 add before GSI a final debug stmt resetting it, marking the end of
4509 its life, so that var-tracking knows it doesn't have to compute
4510 further locations for it. */
4511
4512 static inline void
4513 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4514 {
4515 tree var;
4516 unsigned ix;
4517 gimple_seq bindings = NULL;
4518
4519 if (!gimple_in_ssa_p (id->src_cfun))
4520 return;
4521
4522 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4523 return;
4524
4525 for (var = DECL_ARGUMENTS (id->src_fn);
4526 var; var = DECL_CHAIN (var))
4527 reset_debug_binding (id, var, &bindings);
4528
4529 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4530 reset_debug_binding (id, var, &bindings);
4531
4532 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4533 }
4534
4535 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4536
4537 static bool
4538 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4539 {
4540 tree use_retvar;
4541 tree fn;
4542 hash_map<tree, tree> *dst;
4543 hash_map<tree, tree> *st = NULL;
4544 tree return_slot;
4545 tree modify_dest;
4546 struct cgraph_edge *cg_edge;
4547 cgraph_inline_failed_t reason;
4548 basic_block return_block;
4549 edge e;
4550 gimple_stmt_iterator gsi, stmt_gsi;
4551 bool successfully_inlined = false;
4552 bool purge_dead_abnormal_edges;
4553 gcall *call_stmt;
4554 unsigned int prop_mask, src_properties;
4555 struct function *dst_cfun;
4556 tree simduid;
4557 use_operand_p use;
4558 gimple *simtenter_stmt = NULL;
4559 vec<tree> *simtvars_save;
4560
4561 /* The gimplifier uses input_location in too many places, such as
4562 internal_get_tmp_var (). */
4563 location_t saved_location = input_location;
4564 input_location = gimple_location (stmt);
4565
4566 /* From here on, we're only interested in CALL_EXPRs. */
4567 call_stmt = dyn_cast <gcall *> (stmt);
4568 if (!call_stmt)
4569 goto egress;
4570
4571 cg_edge = id->dst_node->get_edge (stmt);
4572 gcc_checking_assert (cg_edge);
4573 /* First, see if we can figure out what function is being called.
4574 If we cannot, then there is no hope of inlining the function. */
4575 if (cg_edge->indirect_unknown_callee)
4576 goto egress;
4577 fn = cg_edge->callee->decl;
4578 gcc_checking_assert (fn);
4579
4580 /* If FN is a declaration of a function in a nested scope that was
4581 globally declared inline, we don't set its DECL_INITIAL.
4582 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4583 C++ front-end uses it for cdtors to refer to their internal
4584 declarations, that are not real functions. Fortunately those
4585 don't have trees to be saved, so we can tell by checking their
4586 gimple_body. */
4587 if (!DECL_INITIAL (fn)
4588 && DECL_ABSTRACT_ORIGIN (fn)
4589 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4590 fn = DECL_ABSTRACT_ORIGIN (fn);
4591
4592 /* Don't try to inline functions that are not well-suited to inlining. */
4593 if (cg_edge->inline_failed)
4594 {
4595 reason = cg_edge->inline_failed;
4596 /* If this call was originally indirect, we do not want to emit any
4597 inlining related warnings or sorry messages because there are no
4598 guarantees regarding those. */
4599 if (cg_edge->indirect_inlining_edge)
4600 goto egress;
4601
4602 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4603 /* For extern inline functions that get redefined we always
4604 silently ignored always_inline flag. Better behavior would
4605 be to be able to keep both bodies and use extern inline body
4606 for inlining, but we can't do that because frontends overwrite
4607 the body. */
4608 && !cg_edge->callee->local.redefined_extern_inline
4609 /* During early inline pass, report only when optimization is
4610 not turned on. */
4611 && (symtab->global_info_ready
4612 || !optimize
4613 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4614 /* PR 20090218-1_0.c. Body can be provided by another module. */
4615 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4616 {
4617 error ("inlining failed in call to always_inline %q+F: %s", fn,
4618 cgraph_inline_failed_string (reason));
4619 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4620 inform (gimple_location (stmt), "called from here");
4621 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4622 inform (DECL_SOURCE_LOCATION (cfun->decl),
4623 "called from this function");
4624 }
4625 else if (warn_inline
4626 && DECL_DECLARED_INLINE_P (fn)
4627 && !DECL_NO_INLINE_WARNING_P (fn)
4628 && !DECL_IN_SYSTEM_HEADER (fn)
4629 && reason != CIF_UNSPECIFIED
4630 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4631 /* Do not warn about not inlined recursive calls. */
4632 && !cg_edge->recursive_p ()
4633 /* Avoid warnings during early inline pass. */
4634 && symtab->global_info_ready)
4635 {
4636 auto_diagnostic_group d;
4637 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4638 fn, _(cgraph_inline_failed_string (reason))))
4639 {
4640 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4641 inform (gimple_location (stmt), "called from here");
4642 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4643 inform (DECL_SOURCE_LOCATION (cfun->decl),
4644 "called from this function");
4645 }
4646 }
4647 goto egress;
4648 }
4649 id->src_node = cg_edge->callee;
4650
4651 /* If callee is thunk, all we need is to adjust the THIS pointer
4652 and redirect to function being thunked. */
4653 if (id->src_node->thunk.thunk_p)
4654 {
4655 cgraph_edge *edge;
4656 tree virtual_offset = NULL;
4657 profile_count count = cg_edge->count;
4658 tree op;
4659 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4660
4661 cg_edge->remove ();
4662 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4663 gimple_uid (stmt),
4664 profile_count::one (),
4665 profile_count::one (),
4666 true);
4667 edge->count = count;
4668 if (id->src_node->thunk.virtual_offset_p)
4669 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4670 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4671 NULL);
4672 gsi_insert_before (&iter, gimple_build_assign (op,
4673 gimple_call_arg (stmt, 0)),
4674 GSI_NEW_STMT);
4675 gcc_assert (id->src_node->thunk.this_adjusting);
4676 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4677 virtual_offset, id->src_node->thunk.indirect_offset);
4678
4679 gimple_call_set_arg (stmt, 0, op);
4680 gimple_call_set_fndecl (stmt, edge->callee->decl);
4681 update_stmt (stmt);
4682 id->src_node->remove ();
4683 expand_call_inline (bb, stmt, id);
4684 maybe_remove_unused_call_args (cfun, stmt);
4685 return true;
4686 }
4687 fn = cg_edge->callee->decl;
4688 cg_edge->callee->get_untransformed_body ();
4689
4690 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4691 cg_edge->callee->verify ();
4692
4693 /* We will be inlining this callee. */
4694 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4695
4696 /* Update the callers EH personality. */
4697 if (DECL_FUNCTION_PERSONALITY (fn))
4698 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4699 = DECL_FUNCTION_PERSONALITY (fn);
4700
4701 /* Split the block before the GIMPLE_CALL. */
4702 stmt_gsi = gsi_for_stmt (stmt);
4703 gsi_prev (&stmt_gsi);
4704 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4705 bb = e->src;
4706 return_block = e->dest;
4707 remove_edge (e);
4708
4709 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4710 been the source of abnormal edges. In this case, schedule
4711 the removal of dead abnormal edges. */
4712 gsi = gsi_start_bb (return_block);
4713 gsi_next (&gsi);
4714 purge_dead_abnormal_edges = gsi_end_p (gsi);
4715
4716 stmt_gsi = gsi_start_bb (return_block);
4717
4718 /* Build a block containing code to initialize the arguments, the
4719 actual inline expansion of the body, and a label for the return
4720 statements within the function to jump to. The type of the
4721 statement expression is the return type of the function call.
4722 ??? If the call does not have an associated block then we will
4723 remap all callee blocks to NULL, effectively dropping most of
4724 its debug information. This should only happen for calls to
4725 artificial decls inserted by the compiler itself. We need to
4726 either link the inlined blocks into the caller block tree or
4727 not refer to them in any way to not break GC for locations. */
4728 if (tree block = gimple_block (stmt))
4729 {
4730 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4731 to make inlined_function_outer_scope_p return true on this BLOCK. */
4732 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4733 if (loc == UNKNOWN_LOCATION)
4734 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4735 if (loc == UNKNOWN_LOCATION)
4736 loc = BUILTINS_LOCATION;
4737 id->block = make_node (BLOCK);
4738 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4739 BLOCK_SOURCE_LOCATION (id->block) = loc;
4740 prepend_lexical_block (block, id->block);
4741 }
4742
4743 /* Local declarations will be replaced by their equivalents in this map. */
4744 st = id->decl_map;
4745 id->decl_map = new hash_map<tree, tree>;
4746 dst = id->debug_map;
4747 id->debug_map = NULL;
4748 if (flag_stack_reuse != SR_NONE)
4749 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4750
4751 /* Record the function we are about to inline. */
4752 id->src_fn = fn;
4753 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4754 id->reset_location = DECL_IGNORED_P (fn);
4755 id->call_stmt = call_stmt;
4756
4757 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4758 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4759 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4760 simtvars_save = id->dst_simt_vars;
4761 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4762 && (simduid = bb->loop_father->simduid) != NULL_TREE
4763 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4764 && single_imm_use (simduid, &use, &simtenter_stmt)
4765 && is_gimple_call (simtenter_stmt)
4766 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4767 vec_alloc (id->dst_simt_vars, 0);
4768 else
4769 id->dst_simt_vars = NULL;
4770
4771 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4772 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4773
4774 /* If the src function contains an IFN_VA_ARG, then so will the dst
4775 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4776 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4777 src_properties = id->src_cfun->curr_properties & prop_mask;
4778 if (src_properties != prop_mask)
4779 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4780
4781 gcc_assert (!id->src_cfun->after_inlining);
4782
4783 id->entry_bb = bb;
4784 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4785 {
4786 gimple_stmt_iterator si = gsi_last_bb (bb);
4787 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4788 NOT_TAKEN),
4789 GSI_NEW_STMT);
4790 }
4791 initialize_inlined_parameters (id, stmt, fn, bb);
4792 if (debug_nonbind_markers_p && debug_inline_points && id->block
4793 && inlined_function_outer_scope_p (id->block))
4794 {
4795 gimple_stmt_iterator si = gsi_last_bb (bb);
4796 gsi_insert_after (&si, gimple_build_debug_inline_entry
4797 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4798 GSI_NEW_STMT);
4799 }
4800
4801 if (DECL_INITIAL (fn))
4802 {
4803 if (gimple_block (stmt))
4804 {
4805 tree *var;
4806
4807 prepend_lexical_block (id->block,
4808 remap_blocks (DECL_INITIAL (fn), id));
4809 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4810 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4811 == NULL_TREE));
4812 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4813 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4814 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4815 under it. The parameters can be then evaluated in the debugger,
4816 but don't show in backtraces. */
4817 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4818 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4819 {
4820 tree v = *var;
4821 *var = TREE_CHAIN (v);
4822 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4823 BLOCK_VARS (id->block) = v;
4824 }
4825 else
4826 var = &TREE_CHAIN (*var);
4827 }
4828 else
4829 remap_blocks_to_null (DECL_INITIAL (fn), id);
4830 }
4831
4832 /* Return statements in the function body will be replaced by jumps
4833 to the RET_LABEL. */
4834 gcc_assert (DECL_INITIAL (fn));
4835 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4836
4837 /* Find the LHS to which the result of this call is assigned. */
4838 return_slot = NULL;
4839 if (gimple_call_lhs (stmt))
4840 {
4841 modify_dest = gimple_call_lhs (stmt);
4842
4843 /* The function which we are inlining might not return a value,
4844 in which case we should issue a warning that the function
4845 does not return a value. In that case the optimizers will
4846 see that the variable to which the value is assigned was not
4847 initialized. We do not want to issue a warning about that
4848 uninitialized variable. */
4849 if (DECL_P (modify_dest))
4850 TREE_NO_WARNING (modify_dest) = 1;
4851
4852 if (gimple_call_return_slot_opt_p (call_stmt))
4853 {
4854 return_slot = modify_dest;
4855 modify_dest = NULL;
4856 }
4857 }
4858 else
4859 modify_dest = NULL;
4860
4861 /* If we are inlining a call to the C++ operator new, we don't want
4862 to use type based alias analysis on the return value. Otherwise
4863 we may get confused if the compiler sees that the inlined new
4864 function returns a pointer which was just deleted. See bug
4865 33407. */
4866 if (DECL_IS_OPERATOR_NEW (fn))
4867 {
4868 return_slot = NULL;
4869 modify_dest = NULL;
4870 }
4871
4872 /* Declare the return variable for the function. */
4873 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4874
4875 /* Add local vars in this inlined callee to caller. */
4876 add_local_variables (id->src_cfun, cfun, id);
4877
4878 if (dump_enabled_p ())
4879 {
4880 char buf[128];
4881 snprintf (buf, sizeof(buf), "%4.2f",
4882 cg_edge->sreal_frequency ().to_double ());
4883 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
4884 call_stmt,
4885 "Inlining %C to %C with frequency %s\n",
4886 id->src_node, id->dst_node, buf);
4887 if (dump_file && (dump_flags & TDF_DETAILS))
4888 {
4889 id->src_node->dump (dump_file);
4890 id->dst_node->dump (dump_file);
4891 }
4892 }
4893
4894 /* This is it. Duplicate the callee body. Assume callee is
4895 pre-gimplified. Note that we must not alter the caller
4896 function in any way before this point, as this CALL_EXPR may be
4897 a self-referential call; if we're calling ourselves, we need to
4898 duplicate our body before altering anything. */
4899 copy_body (id, bb, return_block, NULL);
4900
4901 reset_debug_bindings (id, stmt_gsi);
4902
4903 if (flag_stack_reuse != SR_NONE)
4904 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4905 if (!TREE_THIS_VOLATILE (p))
4906 {
4907 tree *varp = id->decl_map->get (p);
4908 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4909 {
4910 tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4911 gimple *clobber_stmt;
4912 TREE_THIS_VOLATILE (clobber) = 1;
4913 clobber_stmt = gimple_build_assign (*varp, clobber);
4914 gimple_set_location (clobber_stmt, gimple_location (stmt));
4915 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4916 }
4917 }
4918
4919 /* Reset the escaped solution. */
4920 if (cfun->gimple_df)
4921 pt_solution_reset (&cfun->gimple_df->escaped);
4922
4923 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4924 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4925 {
4926 size_t nargs = gimple_call_num_args (simtenter_stmt);
4927 vec<tree> *vars = id->dst_simt_vars;
4928 auto_vec<tree> newargs (nargs + vars->length ());
4929 for (size_t i = 0; i < nargs; i++)
4930 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4931 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4932 {
4933 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4934 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4935 }
4936 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4937 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4938 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4939 gsi_replace (&gsi, g, false);
4940 }
4941 vec_free (id->dst_simt_vars);
4942 id->dst_simt_vars = simtvars_save;
4943
4944 /* Clean up. */
4945 if (id->debug_map)
4946 {
4947 delete id->debug_map;
4948 id->debug_map = dst;
4949 }
4950 delete id->decl_map;
4951 id->decl_map = st;
4952
4953 /* Unlink the calls virtual operands before replacing it. */
4954 unlink_stmt_vdef (stmt);
4955 if (gimple_vdef (stmt)
4956 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4957 release_ssa_name (gimple_vdef (stmt));
4958
4959 /* If the inlined function returns a result that we care about,
4960 substitute the GIMPLE_CALL with an assignment of the return
4961 variable to the LHS of the call. That is, if STMT was
4962 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4963 if (use_retvar && gimple_call_lhs (stmt))
4964 {
4965 gimple *old_stmt = stmt;
4966 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4967 gimple_set_location (stmt, gimple_location (old_stmt));
4968 gsi_replace (&stmt_gsi, stmt, false);
4969 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4970 /* Append a clobber for id->retvar if easily possible. */
4971 if (flag_stack_reuse != SR_NONE
4972 && id->retvar
4973 && VAR_P (id->retvar)
4974 && id->retvar != return_slot
4975 && id->retvar != modify_dest
4976 && !TREE_THIS_VOLATILE (id->retvar)
4977 && !is_gimple_reg (id->retvar)
4978 && !stmt_ends_bb_p (stmt))
4979 {
4980 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4981 gimple *clobber_stmt;
4982 TREE_THIS_VOLATILE (clobber) = 1;
4983 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4984 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4985 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4986 }
4987 }
4988 else
4989 {
4990 /* Handle the case of inlining a function with no return
4991 statement, which causes the return value to become undefined. */
4992 if (gimple_call_lhs (stmt)
4993 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4994 {
4995 tree name = gimple_call_lhs (stmt);
4996 tree var = SSA_NAME_VAR (name);
4997 tree def = var ? ssa_default_def (cfun, var) : NULL;
4998
4999 if (def)
5000 {
5001 /* If the variable is used undefined, make this name
5002 undefined via a move. */
5003 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5004 gsi_replace (&stmt_gsi, stmt, true);
5005 }
5006 else
5007 {
5008 if (!var)
5009 {
5010 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5011 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5012 }
5013 /* Otherwise make this variable undefined. */
5014 gsi_remove (&stmt_gsi, true);
5015 set_ssa_default_def (cfun, var, name);
5016 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5017 }
5018 }
5019 /* Replace with a clobber for id->retvar. */
5020 else if (flag_stack_reuse != SR_NONE
5021 && id->retvar
5022 && VAR_P (id->retvar)
5023 && id->retvar != return_slot
5024 && id->retvar != modify_dest
5025 && !TREE_THIS_VOLATILE (id->retvar)
5026 && !is_gimple_reg (id->retvar))
5027 {
5028 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
5029 gimple *clobber_stmt;
5030 TREE_THIS_VOLATILE (clobber) = 1;
5031 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5032 gimple_set_location (clobber_stmt, gimple_location (stmt));
5033 gsi_replace (&stmt_gsi, clobber_stmt, false);
5034 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5035 }
5036 else
5037 gsi_remove (&stmt_gsi, true);
5038 }
5039
5040 if (purge_dead_abnormal_edges)
5041 {
5042 gimple_purge_dead_eh_edges (return_block);
5043 gimple_purge_dead_abnormal_call_edges (return_block);
5044 }
5045
5046 /* If the value of the new expression is ignored, that's OK. We
5047 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5048 the equivalent inlined version either. */
5049 if (is_gimple_assign (stmt))
5050 {
5051 gcc_assert (gimple_assign_single_p (stmt)
5052 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5053 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5054 }
5055
5056 id->add_clobbers_to_eh_landing_pads = 0;
5057
5058 /* Output the inlining info for this abstract function, since it has been
5059 inlined. If we don't do this now, we can lose the information about the
5060 variables in the function when the blocks get blown away as soon as we
5061 remove the cgraph node. */
5062 if (gimple_block (stmt))
5063 (*debug_hooks->outlining_inline_function) (fn);
5064
5065 /* Update callgraph if needed. */
5066 cg_edge->callee->remove ();
5067
5068 id->block = NULL_TREE;
5069 id->retvar = NULL_TREE;
5070 successfully_inlined = true;
5071
5072 egress:
5073 input_location = saved_location;
5074 return successfully_inlined;
5075 }
5076
5077 /* Expand call statements reachable from STMT_P.
5078 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5079 in a MODIFY_EXPR. */
5080
5081 static bool
5082 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
5083 {
5084 gimple_stmt_iterator gsi;
5085 bool inlined = false;
5086
5087 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5088 {
5089 gimple *stmt = gsi_stmt (gsi);
5090 gsi_prev (&gsi);
5091
5092 if (is_gimple_call (stmt)
5093 && !gimple_call_internal_p (stmt))
5094 inlined |= expand_call_inline (bb, stmt, id);
5095 }
5096
5097 return inlined;
5098 }
5099
5100
5101 /* Walk all basic blocks created after FIRST and try to fold every statement
5102 in the STATEMENTS pointer set. */
5103
5104 static void
5105 fold_marked_statements (int first, hash_set<gimple *> *statements)
5106 {
5107 for (; first < last_basic_block_for_fn (cfun); first++)
5108 if (BASIC_BLOCK_FOR_FN (cfun, first))
5109 {
5110 gimple_stmt_iterator gsi;
5111
5112 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5113 !gsi_end_p (gsi);
5114 gsi_next (&gsi))
5115 if (statements->contains (gsi_stmt (gsi)))
5116 {
5117 gimple *old_stmt = gsi_stmt (gsi);
5118 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
5119
5120 if (old_decl && fndecl_built_in_p (old_decl))
5121 {
5122 /* Folding builtins can create multiple instructions,
5123 we need to look at all of them. */
5124 gimple_stmt_iterator i2 = gsi;
5125 gsi_prev (&i2);
5126 if (fold_stmt (&gsi))
5127 {
5128 gimple *new_stmt;
5129 /* If a builtin at the end of a bb folded into nothing,
5130 the following loop won't work. */
5131 if (gsi_end_p (gsi))
5132 {
5133 cgraph_update_edges_for_call_stmt (old_stmt,
5134 old_decl, NULL);
5135 break;
5136 }
5137 if (gsi_end_p (i2))
5138 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5139 else
5140 gsi_next (&i2);
5141 while (1)
5142 {
5143 new_stmt = gsi_stmt (i2);
5144 update_stmt (new_stmt);
5145 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5146 new_stmt);
5147
5148 if (new_stmt == gsi_stmt (gsi))
5149 {
5150 /* It is okay to check only for the very last
5151 of these statements. If it is a throwing
5152 statement nothing will change. If it isn't
5153 this can remove EH edges. If that weren't
5154 correct then because some intermediate stmts
5155 throw, but not the last one. That would mean
5156 we'd have to split the block, which we can't
5157 here and we'd loose anyway. And as builtins
5158 probably never throw, this all
5159 is mood anyway. */
5160 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5161 new_stmt))
5162 gimple_purge_dead_eh_edges (
5163 BASIC_BLOCK_FOR_FN (cfun, first));
5164 break;
5165 }
5166 gsi_next (&i2);
5167 }
5168 }
5169 }
5170 else if (fold_stmt (&gsi))
5171 {
5172 /* Re-read the statement from GSI as fold_stmt() may
5173 have changed it. */
5174 gimple *new_stmt = gsi_stmt (gsi);
5175 update_stmt (new_stmt);
5176
5177 if (is_gimple_call (old_stmt)
5178 || is_gimple_call (new_stmt))
5179 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5180 new_stmt);
5181
5182 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5183 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
5184 first));
5185 }
5186 }
5187 }
5188 }
5189
5190 /* Expand calls to inline functions in the body of FN. */
5191
5192 unsigned int
5193 optimize_inline_calls (tree fn)
5194 {
5195 copy_body_data id;
5196 basic_block bb;
5197 int last = n_basic_blocks_for_fn (cfun);
5198 bool inlined_p = false;
5199
5200 /* Clear out ID. */
5201 memset (&id, 0, sizeof (id));
5202
5203 id.src_node = id.dst_node = cgraph_node::get (fn);
5204 gcc_assert (id.dst_node->definition);
5205 id.dst_fn = fn;
5206 /* Or any functions that aren't finished yet. */
5207 if (current_function_decl)
5208 id.dst_fn = current_function_decl;
5209
5210 id.copy_decl = copy_decl_maybe_to_var;
5211 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5212 id.transform_new_cfg = false;
5213 id.transform_return_to_modify = true;
5214 id.transform_parameter = true;
5215 id.transform_lang_insert_block = NULL;
5216 id.statements_to_fold = new hash_set<gimple *>;
5217
5218 push_gimplify_context ();
5219
5220 /* We make no attempts to keep dominance info up-to-date. */
5221 free_dominance_info (CDI_DOMINATORS);
5222 free_dominance_info (CDI_POST_DOMINATORS);
5223
5224 /* Register specific gimple functions. */
5225 gimple_register_cfg_hooks ();
5226
5227 /* Reach the trees by walking over the CFG, and note the
5228 enclosing basic-blocks in the call edges. */
5229 /* We walk the blocks going forward, because inlined function bodies
5230 will split id->current_basic_block, and the new blocks will
5231 follow it; we'll trudge through them, processing their CALL_EXPRs
5232 along the way. */
5233 FOR_EACH_BB_FN (bb, cfun)
5234 inlined_p |= gimple_expand_calls_inline (bb, &id);
5235
5236 pop_gimplify_context (NULL);
5237
5238 if (flag_checking)
5239 {
5240 struct cgraph_edge *e;
5241
5242 id.dst_node->verify ();
5243
5244 /* Double check that we inlined everything we are supposed to inline. */
5245 for (e = id.dst_node->callees; e; e = e->next_callee)
5246 gcc_assert (e->inline_failed);
5247 }
5248
5249 /* Fold queued statements. */
5250 update_max_bb_count ();
5251 fold_marked_statements (last, id.statements_to_fold);
5252 delete id.statements_to_fold;
5253
5254 gcc_assert (!id.debug_stmts.exists ());
5255
5256 /* If we didn't inline into the function there is nothing to do. */
5257 if (!inlined_p)
5258 return 0;
5259
5260 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5261 number_blocks (fn);
5262
5263 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5264
5265 if (flag_checking)
5266 id.dst_node->verify ();
5267
5268 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5269 not possible yet - the IPA passes might make various functions to not
5270 throw and they don't care to proactively update local EH info. This is
5271 done later in fixup_cfg pass that also execute the verification. */
5272 return (TODO_update_ssa
5273 | TODO_cleanup_cfg
5274 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5275 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5276 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5277 ? TODO_rebuild_frequencies : 0));
5278 }
5279
5280 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5281
5282 tree
5283 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5284 {
5285 enum tree_code code = TREE_CODE (*tp);
5286 enum tree_code_class cl = TREE_CODE_CLASS (code);
5287
5288 /* We make copies of most nodes. */
5289 if (IS_EXPR_CODE_CLASS (cl)
5290 || code == TREE_LIST
5291 || code == TREE_VEC
5292 || code == TYPE_DECL
5293 || code == OMP_CLAUSE)
5294 {
5295 /* Because the chain gets clobbered when we make a copy, we save it
5296 here. */
5297 tree chain = NULL_TREE, new_tree;
5298
5299 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5300 chain = TREE_CHAIN (*tp);
5301
5302 /* Copy the node. */
5303 new_tree = copy_node (*tp);
5304
5305 *tp = new_tree;
5306
5307 /* Now, restore the chain, if appropriate. That will cause
5308 walk_tree to walk into the chain as well. */
5309 if (code == PARM_DECL
5310 || code == TREE_LIST
5311 || code == OMP_CLAUSE)
5312 TREE_CHAIN (*tp) = chain;
5313
5314 /* For now, we don't update BLOCKs when we make copies. So, we
5315 have to nullify all BIND_EXPRs. */
5316 if (TREE_CODE (*tp) == BIND_EXPR)
5317 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5318 }
5319 else if (code == CONSTRUCTOR)
5320 {
5321 /* CONSTRUCTOR nodes need special handling because
5322 we need to duplicate the vector of elements. */
5323 tree new_tree;
5324
5325 new_tree = copy_node (*tp);
5326 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5327 *tp = new_tree;
5328 }
5329 else if (code == STATEMENT_LIST)
5330 /* We used to just abort on STATEMENT_LIST, but we can run into them
5331 with statement-expressions (c++/40975). */
5332 copy_statement_list (tp);
5333 else if (TREE_CODE_CLASS (code) == tcc_type)
5334 *walk_subtrees = 0;
5335 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5336 *walk_subtrees = 0;
5337 else if (TREE_CODE_CLASS (code) == tcc_constant)
5338 *walk_subtrees = 0;
5339 return NULL_TREE;
5340 }
5341
5342 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5343 information indicating to what new SAVE_EXPR this one should be mapped,
5344 use that one. Otherwise, create a new node and enter it in ST. FN is
5345 the function into which the copy will be placed. */
5346
5347 static void
5348 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5349 {
5350 tree *n;
5351 tree t;
5352
5353 /* See if we already encountered this SAVE_EXPR. */
5354 n = st->get (*tp);
5355
5356 /* If we didn't already remap this SAVE_EXPR, do so now. */
5357 if (!n)
5358 {
5359 t = copy_node (*tp);
5360
5361 /* Remember this SAVE_EXPR. */
5362 st->put (*tp, t);
5363 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5364 st->put (t, t);
5365 }
5366 else
5367 {
5368 /* We've already walked into this SAVE_EXPR; don't do it again. */
5369 *walk_subtrees = 0;
5370 t = *n;
5371 }
5372
5373 /* Replace this SAVE_EXPR with the copy. */
5374 *tp = t;
5375 }
5376
5377 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5378 label, copies the declaration and enters it in the splay_tree in DATA (which
5379 is really a 'copy_body_data *'. */
5380
5381 static tree
5382 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5383 bool *handled_ops_p ATTRIBUTE_UNUSED,
5384 struct walk_stmt_info *wi)
5385 {
5386 copy_body_data *id = (copy_body_data *) wi->info;
5387 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5388
5389 if (stmt)
5390 {
5391 tree decl = gimple_label_label (stmt);
5392
5393 /* Copy the decl and remember the copy. */
5394 insert_decl_map (id, decl, id->copy_decl (decl, id));
5395 }
5396
5397 return NULL_TREE;
5398 }
5399
5400 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5401 struct walk_stmt_info *wi);
5402
5403 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5404 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5405 remaps all local declarations to appropriate replacements in gimple
5406 operands. */
5407
5408 static tree
5409 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5410 {
5411 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5412 copy_body_data *id = (copy_body_data *) wi->info;
5413 hash_map<tree, tree> *st = id->decl_map;
5414 tree *n;
5415 tree expr = *tp;
5416
5417 /* For recursive invocations this is no longer the LHS itself. */
5418 bool is_lhs = wi->is_lhs;
5419 wi->is_lhs = false;
5420
5421 if (TREE_CODE (expr) == SSA_NAME)
5422 {
5423 *tp = remap_ssa_name (*tp, id);
5424 *walk_subtrees = 0;
5425 if (is_lhs)
5426 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5427 }
5428 /* Only a local declaration (variable or label). */
5429 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5430 || TREE_CODE (expr) == LABEL_DECL)
5431 {
5432 /* Lookup the declaration. */
5433 n = st->get (expr);
5434
5435 /* If it's there, remap it. */
5436 if (n)
5437 *tp = *n;
5438 *walk_subtrees = 0;
5439 }
5440 else if (TREE_CODE (expr) == STATEMENT_LIST
5441 || TREE_CODE (expr) == BIND_EXPR
5442 || TREE_CODE (expr) == SAVE_EXPR)
5443 gcc_unreachable ();
5444 else if (TREE_CODE (expr) == TARGET_EXPR)
5445 {
5446 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5447 It's OK for this to happen if it was part of a subtree that
5448 isn't immediately expanded, such as operand 2 of another
5449 TARGET_EXPR. */
5450 if (!TREE_OPERAND (expr, 1))
5451 {
5452 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5453 TREE_OPERAND (expr, 3) = NULL_TREE;
5454 }
5455 }
5456 else if (TREE_CODE (expr) == OMP_CLAUSE)
5457 {
5458 /* Before the omplower pass completes, some OMP clauses can contain
5459 sequences that are neither copied by gimple_seq_copy nor walked by
5460 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5461 in those situations, we have to copy and process them explicitely. */
5462
5463 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5464 {
5465 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5466 seq = duplicate_remap_omp_clause_seq (seq, wi);
5467 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5468 }
5469 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5470 {
5471 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5472 seq = duplicate_remap_omp_clause_seq (seq, wi);
5473 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5474 }
5475 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5476 {
5477 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5478 seq = duplicate_remap_omp_clause_seq (seq, wi);
5479 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5480 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5481 seq = duplicate_remap_omp_clause_seq (seq, wi);
5482 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5483 }
5484 }
5485
5486 /* Keep iterating. */
5487 return NULL_TREE;
5488 }
5489
5490
5491 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5492 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5493 remaps all local declarations to appropriate replacements in gimple
5494 statements. */
5495
5496 static tree
5497 replace_locals_stmt (gimple_stmt_iterator *gsip,
5498 bool *handled_ops_p ATTRIBUTE_UNUSED,
5499 struct walk_stmt_info *wi)
5500 {
5501 copy_body_data *id = (copy_body_data *) wi->info;
5502 gimple *gs = gsi_stmt (*gsip);
5503
5504 if (gbind *stmt = dyn_cast <gbind *> (gs))
5505 {
5506 tree block = gimple_bind_block (stmt);
5507
5508 if (block)
5509 {
5510 remap_block (&block, id);
5511 gimple_bind_set_block (stmt, block);
5512 }
5513
5514 /* This will remap a lot of the same decls again, but this should be
5515 harmless. */
5516 if (gimple_bind_vars (stmt))
5517 {
5518 tree old_var, decls = gimple_bind_vars (stmt);
5519
5520 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5521 if (!can_be_nonlocal (old_var, id)
5522 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5523 remap_decl (old_var, id);
5524
5525 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5526 id->prevent_decl_creation_for_types = true;
5527 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5528 id->prevent_decl_creation_for_types = false;
5529 }
5530 }
5531
5532 /* Keep iterating. */
5533 return NULL_TREE;
5534 }
5535
5536 /* Create a copy of SEQ and remap all decls in it. */
5537
5538 static gimple_seq
5539 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5540 {
5541 if (!seq)
5542 return NULL;
5543
5544 /* If there are any labels in OMP sequences, they can be only referred to in
5545 the sequence itself and therefore we can do both here. */
5546 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5547 gimple_seq copy = gimple_seq_copy (seq);
5548 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5549 return copy;
5550 }
5551
5552 /* Copies everything in SEQ and replaces variables and labels local to
5553 current_function_decl. */
5554
5555 gimple_seq
5556 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5557 {
5558 copy_body_data id;
5559 struct walk_stmt_info wi;
5560 gimple_seq copy;
5561
5562 /* There's nothing to do for NULL_TREE. */
5563 if (seq == NULL)
5564 return seq;
5565
5566 /* Set up ID. */
5567 memset (&id, 0, sizeof (id));
5568 id.src_fn = current_function_decl;
5569 id.dst_fn = current_function_decl;
5570 id.src_cfun = cfun;
5571 id.decl_map = new hash_map<tree, tree>;
5572 id.debug_map = NULL;
5573
5574 id.copy_decl = copy_decl_no_change;
5575 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5576 id.transform_new_cfg = false;
5577 id.transform_return_to_modify = false;
5578 id.transform_parameter = false;
5579 id.transform_lang_insert_block = NULL;
5580
5581 /* Walk the tree once to find local labels. */
5582 memset (&wi, 0, sizeof (wi));
5583 hash_set<tree> visited;
5584 wi.info = &id;
5585 wi.pset = &visited;
5586 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5587
5588 copy = gimple_seq_copy (seq);
5589
5590 /* Walk the copy, remapping decls. */
5591 memset (&wi, 0, sizeof (wi));
5592 wi.info = &id;
5593 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5594
5595 /* Clean up. */
5596 delete id.decl_map;
5597 if (id.debug_map)
5598 delete id.debug_map;
5599 if (id.dependence_map)
5600 {
5601 delete id.dependence_map;
5602 id.dependence_map = NULL;
5603 }
5604
5605 return copy;
5606 }
5607
5608
5609 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5610
5611 static tree
5612 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5613 {
5614 if (*tp == data)
5615 return (tree) data;
5616 else
5617 return NULL;
5618 }
5619
5620 DEBUG_FUNCTION bool
5621 debug_find_tree (tree top, tree search)
5622 {
5623 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5624 }
5625
5626
5627 /* Declare the variables created by the inliner. Add all the variables in
5628 VARS to BIND_EXPR. */
5629
5630 static void
5631 declare_inline_vars (tree block, tree vars)
5632 {
5633 tree t;
5634 for (t = vars; t; t = DECL_CHAIN (t))
5635 {
5636 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5637 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5638 add_local_decl (cfun, t);
5639 }
5640
5641 if (block)
5642 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5643 }
5644
5645 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5646 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5647 VAR_DECL translation. */
5648
5649 tree
5650 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5651 {
5652 /* Don't generate debug information for the copy if we wouldn't have
5653 generated it for the copy either. */
5654 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5655 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5656
5657 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5658 declaration inspired this copy. */
5659 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5660
5661 /* The new variable/label has no RTL, yet. */
5662 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5663 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5664 SET_DECL_RTL (copy, 0);
5665 /* For vector typed decls make sure to update DECL_MODE according
5666 to the new function context. */
5667 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5668 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5669
5670 /* These args would always appear unused, if not for this. */
5671 TREE_USED (copy) = 1;
5672
5673 /* Set the context for the new declaration. */
5674 if (!DECL_CONTEXT (decl))
5675 /* Globals stay global. */
5676 ;
5677 else if (DECL_CONTEXT (decl) != id->src_fn)
5678 /* Things that weren't in the scope of the function we're inlining
5679 from aren't in the scope we're inlining to, either. */
5680 ;
5681 else if (TREE_STATIC (decl))
5682 /* Function-scoped static variables should stay in the original
5683 function. */
5684 ;
5685 else
5686 {
5687 /* Ordinary automatic local variables are now in the scope of the
5688 new function. */
5689 DECL_CONTEXT (copy) = id->dst_fn;
5690 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5691 {
5692 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5693 DECL_ATTRIBUTES (copy)
5694 = tree_cons (get_identifier ("omp simt private"), NULL,
5695 DECL_ATTRIBUTES (copy));
5696 id->dst_simt_vars->safe_push (copy);
5697 }
5698 }
5699
5700 return copy;
5701 }
5702
5703 static tree
5704 copy_decl_to_var (tree decl, copy_body_data *id)
5705 {
5706 tree copy, type;
5707
5708 gcc_assert (TREE_CODE (decl) == PARM_DECL
5709 || TREE_CODE (decl) == RESULT_DECL);
5710
5711 type = TREE_TYPE (decl);
5712
5713 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5714 VAR_DECL, DECL_NAME (decl), type);
5715 if (DECL_PT_UID_SET_P (decl))
5716 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5717 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5718 TREE_READONLY (copy) = TREE_READONLY (decl);
5719 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5720 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5721
5722 return copy_decl_for_dup_finish (id, decl, copy);
5723 }
5724
5725 /* Like copy_decl_to_var, but create a return slot object instead of a
5726 pointer variable for return by invisible reference. */
5727
5728 static tree
5729 copy_result_decl_to_var (tree decl, copy_body_data *id)
5730 {
5731 tree copy, type;
5732
5733 gcc_assert (TREE_CODE (decl) == PARM_DECL
5734 || TREE_CODE (decl) == RESULT_DECL);
5735
5736 type = TREE_TYPE (decl);
5737 if (DECL_BY_REFERENCE (decl))
5738 type = TREE_TYPE (type);
5739
5740 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5741 VAR_DECL, DECL_NAME (decl), type);
5742 if (DECL_PT_UID_SET_P (decl))
5743 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5744 TREE_READONLY (copy) = TREE_READONLY (decl);
5745 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5746 if (!DECL_BY_REFERENCE (decl))
5747 {
5748 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5749 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5750 }
5751
5752 return copy_decl_for_dup_finish (id, decl, copy);
5753 }
5754
5755 tree
5756 copy_decl_no_change (tree decl, copy_body_data *id)
5757 {
5758 tree copy;
5759
5760 copy = copy_node (decl);
5761
5762 /* The COPY is not abstract; it will be generated in DST_FN. */
5763 DECL_ABSTRACT_P (copy) = false;
5764 lang_hooks.dup_lang_specific_decl (copy);
5765
5766 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5767 been taken; it's for internal bookkeeping in expand_goto_internal. */
5768 if (TREE_CODE (copy) == LABEL_DECL)
5769 {
5770 TREE_ADDRESSABLE (copy) = 0;
5771 LABEL_DECL_UID (copy) = -1;
5772 }
5773
5774 return copy_decl_for_dup_finish (id, decl, copy);
5775 }
5776
5777 static tree
5778 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5779 {
5780 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5781 return copy_decl_to_var (decl, id);
5782 else
5783 return copy_decl_no_change (decl, id);
5784 }
5785
5786 /* Return a copy of the function's argument tree. */
5787 static tree
5788 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5789 bitmap args_to_skip, tree *vars)
5790 {
5791 tree arg, *parg;
5792 tree new_parm = NULL;
5793 int i = 0;
5794
5795 parg = &new_parm;
5796
5797 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5798 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5799 {
5800 tree new_tree = remap_decl (arg, id);
5801 if (TREE_CODE (new_tree) != PARM_DECL)
5802 new_tree = id->copy_decl (arg, id);
5803 lang_hooks.dup_lang_specific_decl (new_tree);
5804 *parg = new_tree;
5805 parg = &DECL_CHAIN (new_tree);
5806 }
5807 else if (!id->decl_map->get (arg))
5808 {
5809 /* Make an equivalent VAR_DECL. If the argument was used
5810 as temporary variable later in function, the uses will be
5811 replaced by local variable. */
5812 tree var = copy_decl_to_var (arg, id);
5813 insert_decl_map (id, arg, var);
5814 /* Declare this new variable. */
5815 DECL_CHAIN (var) = *vars;
5816 *vars = var;
5817 }
5818 return new_parm;
5819 }
5820
5821 /* Return a copy of the function's static chain. */
5822 static tree
5823 copy_static_chain (tree static_chain, copy_body_data * id)
5824 {
5825 tree *chain_copy, *pvar;
5826
5827 chain_copy = &static_chain;
5828 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5829 {
5830 tree new_tree = remap_decl (*pvar, id);
5831 lang_hooks.dup_lang_specific_decl (new_tree);
5832 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5833 *pvar = new_tree;
5834 }
5835 return static_chain;
5836 }
5837
5838 /* Return true if the function is allowed to be versioned.
5839 This is a guard for the versioning functionality. */
5840
5841 bool
5842 tree_versionable_function_p (tree fndecl)
5843 {
5844 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5845 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5846 }
5847
5848 /* Update clone info after duplication. */
5849
5850 static void
5851 update_clone_info (copy_body_data * id)
5852 {
5853 struct cgraph_node *node;
5854 if (!id->dst_node->clones)
5855 return;
5856 for (node = id->dst_node->clones; node != id->dst_node;)
5857 {
5858 /* First update replace maps to match the new body. */
5859 if (node->clone.tree_map)
5860 {
5861 unsigned int i;
5862 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5863 {
5864 struct ipa_replace_map *replace_info;
5865 replace_info = (*node->clone.tree_map)[i];
5866 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5867 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5868 }
5869 }
5870 if (node->clones)
5871 node = node->clones;
5872 else if (node->next_sibling_clone)
5873 node = node->next_sibling_clone;
5874 else
5875 {
5876 while (node != id->dst_node && !node->next_sibling_clone)
5877 node = node->clone_of;
5878 if (node != id->dst_node)
5879 node = node->next_sibling_clone;
5880 }
5881 }
5882 }
5883
5884 /* Create a copy of a function's tree.
5885 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5886 of the original function and the new copied function
5887 respectively. In case we want to replace a DECL
5888 tree with another tree while duplicating the function's
5889 body, TREE_MAP represents the mapping between these
5890 trees. If UPDATE_CLONES is set, the call_stmt fields
5891 of edges of clones of the function will be updated.
5892
5893 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5894 from new version.
5895 If SKIP_RETURN is true, the new version will return void.
5896 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5897 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5898 */
5899 void
5900 tree_function_versioning (tree old_decl, tree new_decl,
5901 vec<ipa_replace_map *, va_gc> *tree_map,
5902 bool update_clones, bitmap args_to_skip,
5903 bool skip_return, bitmap blocks_to_copy,
5904 basic_block new_entry)
5905 {
5906 struct cgraph_node *old_version_node;
5907 struct cgraph_node *new_version_node;
5908 copy_body_data id;
5909 tree p;
5910 unsigned i;
5911 struct ipa_replace_map *replace_info;
5912 basic_block old_entry_block, bb;
5913 auto_vec<gimple *, 10> init_stmts;
5914 tree vars = NULL_TREE;
5915 bitmap debug_args_to_skip = args_to_skip;
5916
5917 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5918 && TREE_CODE (new_decl) == FUNCTION_DECL);
5919 DECL_POSSIBLY_INLINED (old_decl) = 1;
5920
5921 old_version_node = cgraph_node::get (old_decl);
5922 gcc_checking_assert (old_version_node);
5923 new_version_node = cgraph_node::get (new_decl);
5924 gcc_checking_assert (new_version_node);
5925
5926 /* Copy over debug args. */
5927 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5928 {
5929 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5930 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5931 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5932 old_debug_args = decl_debug_args_lookup (old_decl);
5933 if (old_debug_args)
5934 {
5935 new_debug_args = decl_debug_args_insert (new_decl);
5936 *new_debug_args = vec_safe_copy (*old_debug_args);
5937 }
5938 }
5939
5940 /* Output the inlining info for this abstract function, since it has been
5941 inlined. If we don't do this now, we can lose the information about the
5942 variables in the function when the blocks get blown away as soon as we
5943 remove the cgraph node. */
5944 (*debug_hooks->outlining_inline_function) (old_decl);
5945
5946 DECL_ARTIFICIAL (new_decl) = 1;
5947 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5948 if (DECL_ORIGIN (old_decl) == old_decl)
5949 old_version_node->used_as_abstract_origin = true;
5950 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5951
5952 /* Prepare the data structures for the tree copy. */
5953 memset (&id, 0, sizeof (id));
5954
5955 /* Generate a new name for the new version. */
5956 id.statements_to_fold = new hash_set<gimple *>;
5957
5958 id.decl_map = new hash_map<tree, tree>;
5959 id.debug_map = NULL;
5960 id.src_fn = old_decl;
5961 id.dst_fn = new_decl;
5962 id.src_node = old_version_node;
5963 id.dst_node = new_version_node;
5964 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5965 id.blocks_to_copy = blocks_to_copy;
5966
5967 id.copy_decl = copy_decl_no_change;
5968 id.transform_call_graph_edges
5969 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5970 id.transform_new_cfg = true;
5971 id.transform_return_to_modify = false;
5972 id.transform_parameter = false;
5973 id.transform_lang_insert_block = NULL;
5974
5975 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5976 (DECL_STRUCT_FUNCTION (old_decl));
5977 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5978 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5979 initialize_cfun (new_decl, old_decl,
5980 new_entry ? new_entry->count : old_entry_block->count);
5981 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5982 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5983 = id.src_cfun->gimple_df->ipa_pta;
5984
5985 /* Copy the function's static chain. */
5986 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5987 if (p)
5988 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5989 = copy_static_chain (p, &id);
5990
5991 /* If there's a tree_map, prepare for substitution. */
5992 if (tree_map)
5993 for (i = 0; i < tree_map->length (); i++)
5994 {
5995 gimple *init;
5996 replace_info = (*tree_map)[i];
5997 if (replace_info->replace_p)
5998 {
5999 int parm_num = -1;
6000 if (!replace_info->old_tree)
6001 {
6002 int p = replace_info->parm_num;
6003 tree parm;
6004 tree req_type, new_type;
6005
6006 for (parm = DECL_ARGUMENTS (old_decl); p;
6007 parm = DECL_CHAIN (parm))
6008 p--;
6009 replace_info->old_tree = parm;
6010 parm_num = replace_info->parm_num;
6011 req_type = TREE_TYPE (parm);
6012 new_type = TREE_TYPE (replace_info->new_tree);
6013 if (!useless_type_conversion_p (req_type, new_type))
6014 {
6015 if (fold_convertible_p (req_type, replace_info->new_tree))
6016 replace_info->new_tree
6017 = fold_build1 (NOP_EXPR, req_type,
6018 replace_info->new_tree);
6019 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
6020 replace_info->new_tree
6021 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
6022 replace_info->new_tree);
6023 else
6024 {
6025 if (dump_file)
6026 {
6027 fprintf (dump_file, " const ");
6028 print_generic_expr (dump_file,
6029 replace_info->new_tree);
6030 fprintf (dump_file,
6031 " can't be converted to param ");
6032 print_generic_expr (dump_file, parm);
6033 fprintf (dump_file, "\n");
6034 }
6035 replace_info->old_tree = NULL;
6036 }
6037 }
6038 }
6039 else
6040 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
6041 if (replace_info->old_tree)
6042 {
6043 init = setup_one_parameter (&id, replace_info->old_tree,
6044 replace_info->new_tree, id.src_fn,
6045 NULL,
6046 &vars);
6047 if (init)
6048 init_stmts.safe_push (init);
6049 if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
6050 {
6051 if (parm_num == -1)
6052 {
6053 tree parm;
6054 int p;
6055 for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
6056 parm = DECL_CHAIN (parm), p++)
6057 if (parm == replace_info->old_tree)
6058 {
6059 parm_num = p;
6060 break;
6061 }
6062 }
6063 if (parm_num != -1)
6064 {
6065 if (debug_args_to_skip == args_to_skip)
6066 {
6067 debug_args_to_skip = BITMAP_ALLOC (NULL);
6068 bitmap_copy (debug_args_to_skip, args_to_skip);
6069 }
6070 bitmap_clear_bit (debug_args_to_skip, parm_num);
6071 }
6072 }
6073 }
6074 }
6075 }
6076 /* Copy the function's arguments. */
6077 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6078 DECL_ARGUMENTS (new_decl)
6079 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
6080 args_to_skip, &vars);
6081
6082 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6083 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6084
6085 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6086
6087 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6088 /* Add local vars. */
6089 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6090
6091 if (DECL_RESULT (old_decl) == NULL_TREE)
6092 ;
6093 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6094 {
6095 DECL_RESULT (new_decl)
6096 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6097 RESULT_DECL, NULL_TREE, void_type_node);
6098 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6099 cfun->returns_struct = 0;
6100 cfun->returns_pcc_struct = 0;
6101 }
6102 else
6103 {
6104 tree old_name;
6105 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6106 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6107 if (gimple_in_ssa_p (id.src_cfun)
6108 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6109 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6110 {
6111 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6112 insert_decl_map (&id, old_name, new_name);
6113 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6114 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6115 }
6116 }
6117
6118 /* Set up the destination functions loop tree. */
6119 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6120 {
6121 cfun->curr_properties &= ~PROP_loops;
6122 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6123 cfun->curr_properties |= PROP_loops;
6124 }
6125
6126 /* Copy the Function's body. */
6127 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6128 new_entry);
6129
6130 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6131 number_blocks (new_decl);
6132
6133 /* We want to create the BB unconditionally, so that the addition of
6134 debug stmts doesn't affect BB count, which may in the end cause
6135 codegen differences. */
6136 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6137 while (init_stmts.length ())
6138 insert_init_stmt (&id, bb, init_stmts.pop ());
6139 update_clone_info (&id);
6140
6141 /* Remap the nonlocal_goto_save_area, if any. */
6142 if (cfun->nonlocal_goto_save_area)
6143 {
6144 struct walk_stmt_info wi;
6145
6146 memset (&wi, 0, sizeof (wi));
6147 wi.info = &id;
6148 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6149 }
6150
6151 /* Clean up. */
6152 delete id.decl_map;
6153 if (id.debug_map)
6154 delete id.debug_map;
6155 free_dominance_info (CDI_DOMINATORS);
6156 free_dominance_info (CDI_POST_DOMINATORS);
6157
6158 update_max_bb_count ();
6159 fold_marked_statements (0, id.statements_to_fold);
6160 delete id.statements_to_fold;
6161 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6162 if (id.dst_node->definition)
6163 cgraph_edge::rebuild_references ();
6164 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6165 {
6166 calculate_dominance_info (CDI_DOMINATORS);
6167 fix_loop_structure (NULL);
6168 }
6169 update_ssa (TODO_update_ssa);
6170
6171 /* After partial cloning we need to rescale frequencies, so they are
6172 within proper range in the cloned function. */
6173 if (new_entry)
6174 {
6175 struct cgraph_edge *e;
6176 rebuild_frequencies ();
6177
6178 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6179 for (e = new_version_node->callees; e; e = e->next_callee)
6180 {
6181 basic_block bb = gimple_bb (e->call_stmt);
6182 e->count = bb->count;
6183 }
6184 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6185 {
6186 basic_block bb = gimple_bb (e->call_stmt);
6187 e->count = bb->count;
6188 }
6189 }
6190
6191 if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6192 {
6193 tree parm;
6194 vec<tree, va_gc> **debug_args = NULL;
6195 unsigned int len = 0;
6196 for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6197 parm; parm = DECL_CHAIN (parm), i++)
6198 if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6199 {
6200 tree ddecl;
6201
6202 if (debug_args == NULL)
6203 {
6204 debug_args = decl_debug_args_insert (new_decl);
6205 len = vec_safe_length (*debug_args);
6206 }
6207 ddecl = make_node (DEBUG_EXPR_DECL);
6208 DECL_ARTIFICIAL (ddecl) = 1;
6209 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6210 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6211 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6212 vec_safe_push (*debug_args, ddecl);
6213 }
6214 if (debug_args != NULL)
6215 {
6216 /* On the callee side, add
6217 DEBUG D#Y s=> parm
6218 DEBUG var => D#Y
6219 stmts to the first bb where var is a VAR_DECL created for the
6220 optimized away parameter in DECL_INITIAL block. This hints
6221 in the debug info that var (whole DECL_ORIGIN is the parm
6222 PARM_DECL) is optimized away, but could be looked up at the
6223 call site as value of D#X there. */
6224 tree var = vars, vexpr;
6225 gimple_stmt_iterator cgsi
6226 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6227 gimple *def_temp;
6228 var = vars;
6229 i = vec_safe_length (*debug_args);
6230 do
6231 {
6232 i -= 2;
6233 while (var != NULL_TREE
6234 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6235 var = TREE_CHAIN (var);
6236 if (var == NULL_TREE)
6237 break;
6238 vexpr = make_node (DEBUG_EXPR_DECL);
6239 parm = (**debug_args)[i];
6240 DECL_ARTIFICIAL (vexpr) = 1;
6241 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6242 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6243 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6244 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6245 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6246 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6247 }
6248 while (i > len);
6249 }
6250 }
6251
6252 if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6253 BITMAP_FREE (debug_args_to_skip);
6254 free_dominance_info (CDI_DOMINATORS);
6255 free_dominance_info (CDI_POST_DOMINATORS);
6256
6257 gcc_assert (!id.debug_stmts.exists ());
6258 pop_cfun ();
6259 return;
6260 }
6261
6262 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6263 the callee and return the inlined body on success. */
6264
6265 tree
6266 maybe_inline_call_in_expr (tree exp)
6267 {
6268 tree fn = get_callee_fndecl (exp);
6269
6270 /* We can only try to inline "const" functions. */
6271 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6272 {
6273 call_expr_arg_iterator iter;
6274 copy_body_data id;
6275 tree param, arg, t;
6276 hash_map<tree, tree> decl_map;
6277
6278 /* Remap the parameters. */
6279 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6280 param;
6281 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6282 decl_map.put (param, arg);
6283
6284 memset (&id, 0, sizeof (id));
6285 id.src_fn = fn;
6286 id.dst_fn = current_function_decl;
6287 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6288 id.decl_map = &decl_map;
6289
6290 id.copy_decl = copy_decl_no_change;
6291 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6292 id.transform_new_cfg = false;
6293 id.transform_return_to_modify = true;
6294 id.transform_parameter = true;
6295 id.transform_lang_insert_block = NULL;
6296
6297 /* Make sure not to unshare trees behind the front-end's back
6298 since front-end specific mechanisms may rely on sharing. */
6299 id.regimplify = false;
6300 id.do_not_unshare = true;
6301
6302 /* We're not inside any EH region. */
6303 id.eh_lp_nr = 0;
6304
6305 t = copy_tree_body (&id);
6306
6307 /* We can only return something suitable for use in a GENERIC
6308 expression tree. */
6309 if (TREE_CODE (t) == MODIFY_EXPR)
6310 return TREE_OPERAND (t, 1);
6311 }
6312
6313 return NULL_TREE;
6314 }
6315
6316 /* Duplicate a type, fields and all. */
6317
6318 tree
6319 build_duplicate_type (tree type)
6320 {
6321 struct copy_body_data id;
6322
6323 memset (&id, 0, sizeof (id));
6324 id.src_fn = current_function_decl;
6325 id.dst_fn = current_function_decl;
6326 id.src_cfun = cfun;
6327 id.decl_map = new hash_map<tree, tree>;
6328 id.debug_map = NULL;
6329 id.copy_decl = copy_decl_no_change;
6330
6331 type = remap_type_1 (type, &id);
6332
6333 delete id.decl_map;
6334 if (id.debug_map)
6335 delete id.debug_map;
6336
6337 TYPE_CANONICAL (type) = type;
6338
6339 return type;
6340 }
6341
6342 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6343 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6344 evaluation. */
6345
6346 tree
6347 copy_fn (tree fn, tree& parms, tree& result)
6348 {
6349 copy_body_data id;
6350 tree param;
6351 hash_map<tree, tree> decl_map;
6352
6353 tree *p = &parms;
6354 *p = NULL_TREE;
6355
6356 memset (&id, 0, sizeof (id));
6357 id.src_fn = fn;
6358 id.dst_fn = current_function_decl;
6359 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6360 id.decl_map = &decl_map;
6361
6362 id.copy_decl = copy_decl_no_change;
6363 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6364 id.transform_new_cfg = false;
6365 id.transform_return_to_modify = false;
6366 id.transform_parameter = true;
6367 id.transform_lang_insert_block = NULL;
6368
6369 /* Make sure not to unshare trees behind the front-end's back
6370 since front-end specific mechanisms may rely on sharing. */
6371 id.regimplify = false;
6372 id.do_not_unshare = true;
6373
6374 /* We're not inside any EH region. */
6375 id.eh_lp_nr = 0;
6376
6377 /* Remap the parameters and result and return them to the caller. */
6378 for (param = DECL_ARGUMENTS (fn);
6379 param;
6380 param = DECL_CHAIN (param))
6381 {
6382 *p = remap_decl (param, &id);
6383 p = &DECL_CHAIN (*p);
6384 }
6385
6386 if (DECL_RESULT (fn))
6387 result = remap_decl (DECL_RESULT (fn), &id);
6388 else
6389 result = NULL_TREE;
6390
6391 return copy_tree_body (&id);
6392 }