]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-inline.c
re PR c++/59813 (tail-call elimination didn't fire for left-shift of char to cout)
[thirdparty/gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "params.h"
57 #include "value-prof.h"
58 #include "cfgloop.h"
59 #include "builtins.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "sreal.h"
63 #include "tree-cfgcleanup.h"
64 #include "tree-ssa-live.h"
65
66 /* I'm not real happy about this, but we need to handle gimple and
67 non-gimple trees. */
68
69 /* Inlining, Cloning, Versioning, Parallelization
70
71 Inlining: a function body is duplicated, but the PARM_DECLs are
72 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
73 MODIFY_EXPRs that store to a dedicated returned-value variable.
74 The duplicated eh_region info of the copy will later be appended
75 to the info for the caller; the eh_region info in copied throwing
76 statements and RESX statements are adjusted accordingly.
77
78 Cloning: (only in C++) We have one body for a con/de/structor, and
79 multiple function decls, each with a unique parameter list.
80 Duplicate the body, using the given splay tree; some parameters
81 will become constants (like 0 or 1).
82
83 Versioning: a function body is duplicated and the result is a new
84 function rather than into blocks of an existing function as with
85 inlining. Some parameters will become constants.
86
87 Parallelization: a region of a function is duplicated resulting in
88 a new function. Variables may be replaced with complex expressions
89 to enable shared variable semantics.
90
91 All of these will simultaneously lookup any callgraph edges. If
92 we're going to inline the duplicated function body, and the given
93 function has some cloned callgraph nodes (one for each place this
94 function will be inlined) those callgraph edges will be duplicated.
95 If we're cloning the body, those callgraph edges will be
96 updated to point into the new body. (Note that the original
97 callgraph node and edge list will not be altered.)
98
99 See the CALL_EXPR handling case in copy_tree_body_r (). */
100
101 /* To Do:
102
103 o In order to make inlining-on-trees work, we pessimized
104 function-local static constants. In particular, they are now
105 always output, even when not addressed. Fix this by treating
106 function-local static constants just like global static
107 constants; the back-end already knows not to output them if they
108 are not needed.
109
110 o Provide heuristics to clamp inlining of recursive template
111 calls? */
112
113
114 /* Weights that estimate_num_insns uses to estimate the size of the
115 produced code. */
116
117 eni_weights eni_size_weights;
118
119 /* Weights that estimate_num_insns uses to estimate the time necessary
120 to execute the produced code. */
121
122 eni_weights eni_time_weights;
123
124 /* Prototypes. */
125
126 static tree declare_return_variable (copy_body_data *, tree, tree,
127 basic_block);
128 static void remap_block (tree *, copy_body_data *);
129 static void copy_bind_expr (tree *, int *, copy_body_data *);
130 static void declare_inline_vars (tree, tree);
131 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
132 static void prepend_lexical_block (tree current_block, tree new_block);
133 static tree copy_decl_to_var (tree, copy_body_data *);
134 static tree copy_result_decl_to_var (tree, copy_body_data *);
135 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
136 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
137 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
138
139 /* Insert a tree->tree mapping for ID. Despite the name suggests
140 that the trees should be variables, it is used for more than that. */
141
142 void
143 insert_decl_map (copy_body_data *id, tree key, tree value)
144 {
145 id->decl_map->put (key, value);
146
147 /* Always insert an identity map as well. If we see this same new
148 node again, we won't want to duplicate it a second time. */
149 if (key != value)
150 id->decl_map->put (value, value);
151 }
152
153 /* Insert a tree->tree mapping for ID. This is only used for
154 variables. */
155
156 static void
157 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
158 {
159 if (!gimple_in_ssa_p (id->src_cfun))
160 return;
161
162 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
163 return;
164
165 if (!target_for_debug_bind (key))
166 return;
167
168 gcc_assert (TREE_CODE (key) == PARM_DECL);
169 gcc_assert (VAR_P (value));
170
171 if (!id->debug_map)
172 id->debug_map = new hash_map<tree, tree>;
173
174 id->debug_map->put (key, value);
175 }
176
177 /* If nonzero, we're remapping the contents of inlined debug
178 statements. If negative, an error has occurred, such as a
179 reference to a variable that isn't available in the inlined
180 context. */
181 static int processing_debug_stmt = 0;
182
183 /* Construct new SSA name for old NAME. ID is the inline context. */
184
185 static tree
186 remap_ssa_name (tree name, copy_body_data *id)
187 {
188 tree new_tree, var;
189 tree *n;
190
191 gcc_assert (TREE_CODE (name) == SSA_NAME);
192
193 n = id->decl_map->get (name);
194 if (n)
195 return unshare_expr (*n);
196
197 if (processing_debug_stmt)
198 {
199 if (SSA_NAME_IS_DEFAULT_DEF (name)
200 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
201 && id->entry_bb == NULL
202 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
203 {
204 tree vexpr = make_node (DEBUG_EXPR_DECL);
205 gimple *def_temp;
206 gimple_stmt_iterator gsi;
207 tree val = SSA_NAME_VAR (name);
208
209 n = id->decl_map->get (val);
210 if (n != NULL)
211 val = *n;
212 if (TREE_CODE (val) != PARM_DECL
213 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
214 {
215 processing_debug_stmt = -1;
216 return name;
217 }
218 n = id->decl_map->get (val);
219 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
220 return *n;
221 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
222 DECL_ARTIFICIAL (vexpr) = 1;
223 TREE_TYPE (vexpr) = TREE_TYPE (name);
224 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
225 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
226 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
227 insert_decl_map (id, val, vexpr);
228 return vexpr;
229 }
230
231 processing_debug_stmt = -1;
232 return name;
233 }
234
235 /* Remap anonymous SSA names or SSA names of anonymous decls. */
236 var = SSA_NAME_VAR (name);
237 if (!var
238 || (!SSA_NAME_IS_DEFAULT_DEF (name)
239 && VAR_P (var)
240 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
241 && DECL_ARTIFICIAL (var)
242 && DECL_IGNORED_P (var)
243 && !DECL_NAME (var)))
244 {
245 struct ptr_info_def *pi;
246 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
247 if (!var && SSA_NAME_IDENTIFIER (name))
248 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
249 insert_decl_map (id, name, new_tree);
250 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
251 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
252 /* At least IPA points-to info can be directly transferred. */
253 if (id->src_cfun->gimple_df
254 && id->src_cfun->gimple_df->ipa_pta
255 && POINTER_TYPE_P (TREE_TYPE (name))
256 && (pi = SSA_NAME_PTR_INFO (name))
257 && !pi->pt.anything)
258 {
259 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
260 new_pi->pt = pi->pt;
261 }
262 return new_tree;
263 }
264
265 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
266 in copy_bb. */
267 new_tree = remap_decl (var, id);
268
269 /* We might've substituted constant or another SSA_NAME for
270 the variable.
271
272 Replace the SSA name representing RESULT_DECL by variable during
273 inlining: this saves us from need to introduce PHI node in a case
274 return value is just partly initialized. */
275 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
276 && (!SSA_NAME_VAR (name)
277 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
278 || !id->transform_return_to_modify))
279 {
280 struct ptr_info_def *pi;
281 new_tree = make_ssa_name (new_tree);
282 insert_decl_map (id, name, new_tree);
283 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
284 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
285 /* At least IPA points-to info can be directly transferred. */
286 if (id->src_cfun->gimple_df
287 && id->src_cfun->gimple_df->ipa_pta
288 && POINTER_TYPE_P (TREE_TYPE (name))
289 && (pi = SSA_NAME_PTR_INFO (name))
290 && !pi->pt.anything)
291 {
292 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
293 new_pi->pt = pi->pt;
294 }
295 if (SSA_NAME_IS_DEFAULT_DEF (name))
296 {
297 /* By inlining function having uninitialized variable, we might
298 extend the lifetime (variable might get reused). This cause
299 ICE in the case we end up extending lifetime of SSA name across
300 abnormal edge, but also increase register pressure.
301
302 We simply initialize all uninitialized vars by 0 except
303 for case we are inlining to very first BB. We can avoid
304 this for all BBs that are not inside strongly connected
305 regions of the CFG, but this is expensive to test. */
306 if (id->entry_bb
307 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
308 && (!SSA_NAME_VAR (name)
309 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
310 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
311 0)->dest
312 || EDGE_COUNT (id->entry_bb->preds) != 1))
313 {
314 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
315 gimple *init_stmt;
316 tree zero = build_zero_cst (TREE_TYPE (new_tree));
317
318 init_stmt = gimple_build_assign (new_tree, zero);
319 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
320 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
321 }
322 else
323 {
324 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
325 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
326 }
327 }
328 }
329 else
330 insert_decl_map (id, name, new_tree);
331 return new_tree;
332 }
333
334 /* Remap DECL during the copying of the BLOCK tree for the function. */
335
336 tree
337 remap_decl (tree decl, copy_body_data *id)
338 {
339 tree *n;
340
341 /* We only remap local variables in the current function. */
342
343 /* See if we have remapped this declaration. */
344
345 n = id->decl_map->get (decl);
346
347 if (!n && processing_debug_stmt)
348 {
349 processing_debug_stmt = -1;
350 return decl;
351 }
352
353 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
354 necessary DECLs have already been remapped and we do not want to duplicate
355 a decl coming from outside of the sequence we are copying. */
356 if (!n
357 && id->prevent_decl_creation_for_types
358 && id->remapping_type_depth > 0
359 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
360 return decl;
361
362 /* If we didn't already have an equivalent for this declaration, create one
363 now. */
364 if (!n)
365 {
366 /* Make a copy of the variable or label. */
367 tree t = id->copy_decl (decl, id);
368
369 /* Remember it, so that if we encounter this local entity again
370 we can reuse this copy. Do this early because remap_type may
371 need this decl for TYPE_STUB_DECL. */
372 insert_decl_map (id, decl, t);
373
374 if (!DECL_P (t))
375 return t;
376
377 /* Remap types, if necessary. */
378 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
379 if (TREE_CODE (t) == TYPE_DECL)
380 {
381 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
382
383 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
384 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
385 is not set on the TYPE_DECL, for example in LTO mode. */
386 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
387 {
388 tree x = build_variant_type_copy (TREE_TYPE (t));
389 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
390 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
391 DECL_ORIGINAL_TYPE (t) = x;
392 }
393 }
394
395 /* Remap sizes as necessary. */
396 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
397 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
398
399 /* If fields, do likewise for offset and qualifier. */
400 if (TREE_CODE (t) == FIELD_DECL)
401 {
402 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
403 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
404 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
405 }
406
407 return t;
408 }
409
410 if (id->do_not_unshare)
411 return *n;
412 else
413 return unshare_expr (*n);
414 }
415
416 static tree
417 remap_type_1 (tree type, copy_body_data *id)
418 {
419 tree new_tree, t;
420
421 /* We do need a copy. build and register it now. If this is a pointer or
422 reference type, remap the designated type and make a new pointer or
423 reference type. */
424 if (TREE_CODE (type) == POINTER_TYPE)
425 {
426 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
427 TYPE_MODE (type),
428 TYPE_REF_CAN_ALIAS_ALL (type));
429 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
430 new_tree = build_type_attribute_qual_variant (new_tree,
431 TYPE_ATTRIBUTES (type),
432 TYPE_QUALS (type));
433 insert_decl_map (id, type, new_tree);
434 return new_tree;
435 }
436 else if (TREE_CODE (type) == REFERENCE_TYPE)
437 {
438 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
439 TYPE_MODE (type),
440 TYPE_REF_CAN_ALIAS_ALL (type));
441 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
442 new_tree = build_type_attribute_qual_variant (new_tree,
443 TYPE_ATTRIBUTES (type),
444 TYPE_QUALS (type));
445 insert_decl_map (id, type, new_tree);
446 return new_tree;
447 }
448 else
449 new_tree = copy_node (type);
450
451 insert_decl_map (id, type, new_tree);
452
453 /* This is a new type, not a copy of an old type. Need to reassociate
454 variants. We can handle everything except the main variant lazily. */
455 t = TYPE_MAIN_VARIANT (type);
456 if (type != t)
457 {
458 t = remap_type (t, id);
459 TYPE_MAIN_VARIANT (new_tree) = t;
460 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
461 TYPE_NEXT_VARIANT (t) = new_tree;
462 }
463 else
464 {
465 TYPE_MAIN_VARIANT (new_tree) = new_tree;
466 TYPE_NEXT_VARIANT (new_tree) = NULL;
467 }
468
469 if (TYPE_STUB_DECL (type))
470 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
471
472 /* Lazily create pointer and reference types. */
473 TYPE_POINTER_TO (new_tree) = NULL;
474 TYPE_REFERENCE_TO (new_tree) = NULL;
475
476 /* Copy all types that may contain references to local variables; be sure to
477 preserve sharing in between type and its main variant when possible. */
478 switch (TREE_CODE (new_tree))
479 {
480 case INTEGER_TYPE:
481 case REAL_TYPE:
482 case FIXED_POINT_TYPE:
483 case ENUMERAL_TYPE:
484 case BOOLEAN_TYPE:
485 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
486 {
487 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
488 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
489
490 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
491 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
492 }
493 else
494 {
495 t = TYPE_MIN_VALUE (new_tree);
496 if (t && TREE_CODE (t) != INTEGER_CST)
497 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
498
499 t = TYPE_MAX_VALUE (new_tree);
500 if (t && TREE_CODE (t) != INTEGER_CST)
501 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
502 }
503 return new_tree;
504
505 case FUNCTION_TYPE:
506 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
507 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
508 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
509 else
510 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
511 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
512 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
513 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
514 else
515 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
516 return new_tree;
517
518 case ARRAY_TYPE:
519 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
520 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
521 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
522 else
523 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
524
525 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
526 {
527 gcc_checking_assert (TYPE_DOMAIN (type)
528 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
529 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
530 }
531 else
532 {
533 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
534 /* For array bounds where we have decided not to copy over the bounds
535 variable which isn't used in OpenMP/OpenACC region, change them to
536 an uninitialized VAR_DECL temporary. */
537 if (TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
538 && id->adjust_array_error_bounds
539 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
540 {
541 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
542 DECL_ATTRIBUTES (v)
543 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
544 DECL_ATTRIBUTES (v));
545 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
546 }
547 }
548 break;
549
550 case RECORD_TYPE:
551 case UNION_TYPE:
552 case QUAL_UNION_TYPE:
553 if (TYPE_MAIN_VARIANT (type) != type
554 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
555 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
556 else
557 {
558 tree f, nf = NULL;
559
560 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
561 {
562 t = remap_decl (f, id);
563 DECL_CONTEXT (t) = new_tree;
564 DECL_CHAIN (t) = nf;
565 nf = t;
566 }
567 TYPE_FIELDS (new_tree) = nreverse (nf);
568 }
569 break;
570
571 case OFFSET_TYPE:
572 default:
573 /* Shouldn't have been thought variable sized. */
574 gcc_unreachable ();
575 }
576
577 /* All variants of type share the same size, so use the already remaped data. */
578 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
579 {
580 tree s = TYPE_SIZE (type);
581 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
582 tree su = TYPE_SIZE_UNIT (type);
583 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
584 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
585 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
586 || s == mvs);
587 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
588 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
589 || su == mvsu);
590 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
591 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
592 }
593 else
594 {
595 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
596 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
597 }
598
599 return new_tree;
600 }
601
602 /* Helper function for remap_type_2, called through walk_tree. */
603
604 static tree
605 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
606 {
607 copy_body_data *id = (copy_body_data *) data;
608
609 if (TYPE_P (*tp))
610 *walk_subtrees = 0;
611
612 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
613 return *tp;
614
615 return NULL_TREE;
616 }
617
618 /* Return true if TYPE needs to be remapped because remap_decl on any
619 needed embedded decl returns something other than that decl. */
620
621 static bool
622 remap_type_2 (tree type, copy_body_data *id)
623 {
624 tree t;
625
626 #define RETURN_TRUE_IF_VAR(T) \
627 do \
628 { \
629 tree _t = (T); \
630 if (_t) \
631 { \
632 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
633 return true; \
634 if (!TYPE_SIZES_GIMPLIFIED (type) \
635 && walk_tree (&_t, remap_type_3, id, NULL)) \
636 return true; \
637 } \
638 } \
639 while (0)
640
641 switch (TREE_CODE (type))
642 {
643 case POINTER_TYPE:
644 case REFERENCE_TYPE:
645 case FUNCTION_TYPE:
646 case METHOD_TYPE:
647 return remap_type_2 (TREE_TYPE (type), id);
648
649 case INTEGER_TYPE:
650 case REAL_TYPE:
651 case FIXED_POINT_TYPE:
652 case ENUMERAL_TYPE:
653 case BOOLEAN_TYPE:
654 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
655 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
656 return false;
657
658 case ARRAY_TYPE:
659 if (remap_type_2 (TREE_TYPE (type), id)
660 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
661 return true;
662 break;
663
664 case RECORD_TYPE:
665 case UNION_TYPE:
666 case QUAL_UNION_TYPE:
667 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
668 if (TREE_CODE (t) == FIELD_DECL)
669 {
670 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
671 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
672 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
673 if (TREE_CODE (type) == QUAL_UNION_TYPE)
674 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
675 }
676 break;
677
678 default:
679 return false;
680 }
681
682 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
683 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
684 return false;
685 #undef RETURN_TRUE_IF_VAR
686 }
687
688 tree
689 remap_type (tree type, copy_body_data *id)
690 {
691 tree *node;
692 tree tmp;
693
694 if (type == NULL)
695 return type;
696
697 /* See if we have remapped this type. */
698 node = id->decl_map->get (type);
699 if (node)
700 return *node;
701
702 /* The type only needs remapping if it's variably modified. */
703 if (! variably_modified_type_p (type, id->src_fn)
704 /* Don't remap if copy_decl method doesn't always return a new
705 decl and for all embedded decls returns the passed in decl. */
706 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
707 {
708 insert_decl_map (id, type, type);
709 return type;
710 }
711
712 id->remapping_type_depth++;
713 tmp = remap_type_1 (type, id);
714 id->remapping_type_depth--;
715
716 return tmp;
717 }
718
719 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
720
721 static bool
722 can_be_nonlocal (tree decl, copy_body_data *id)
723 {
724 /* We cannot duplicate function decls. */
725 if (TREE_CODE (decl) == FUNCTION_DECL)
726 return true;
727
728 /* Local static vars must be non-local or we get multiple declaration
729 problems. */
730 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
731 return true;
732
733 return false;
734 }
735
736 static tree
737 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
738 copy_body_data *id)
739 {
740 tree old_var;
741 tree new_decls = NULL_TREE;
742
743 /* Remap its variables. */
744 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
745 {
746 tree new_var;
747
748 if (can_be_nonlocal (old_var, id))
749 {
750 /* We need to add this variable to the local decls as otherwise
751 nothing else will do so. */
752 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
753 add_local_decl (cfun, old_var);
754 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
755 && !DECL_IGNORED_P (old_var)
756 && nonlocalized_list)
757 vec_safe_push (*nonlocalized_list, old_var);
758 continue;
759 }
760
761 /* Remap the variable. */
762 new_var = remap_decl (old_var, id);
763
764 /* If we didn't remap this variable, we can't mess with its
765 TREE_CHAIN. If we remapped this variable to the return slot, it's
766 already declared somewhere else, so don't declare it here. */
767
768 if (new_var == id->retvar)
769 ;
770 else if (!new_var)
771 {
772 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
773 && !DECL_IGNORED_P (old_var)
774 && nonlocalized_list)
775 vec_safe_push (*nonlocalized_list, old_var);
776 }
777 else
778 {
779 gcc_assert (DECL_P (new_var));
780 DECL_CHAIN (new_var) = new_decls;
781 new_decls = new_var;
782
783 /* Also copy value-expressions. */
784 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
785 {
786 tree tem = DECL_VALUE_EXPR (new_var);
787 bool old_regimplify = id->regimplify;
788 id->remapping_type_depth++;
789 walk_tree (&tem, copy_tree_body_r, id, NULL);
790 id->remapping_type_depth--;
791 id->regimplify = old_regimplify;
792 SET_DECL_VALUE_EXPR (new_var, tem);
793 }
794 }
795 }
796
797 return nreverse (new_decls);
798 }
799
800 /* Copy the BLOCK to contain remapped versions of the variables
801 therein. And hook the new block into the block-tree. */
802
803 static void
804 remap_block (tree *block, copy_body_data *id)
805 {
806 tree old_block;
807 tree new_block;
808
809 /* Make the new block. */
810 old_block = *block;
811 new_block = make_node (BLOCK);
812 TREE_USED (new_block) = TREE_USED (old_block);
813 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
814 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
815 BLOCK_NONLOCALIZED_VARS (new_block)
816 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
817 *block = new_block;
818
819 /* Remap its variables. */
820 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
821 &BLOCK_NONLOCALIZED_VARS (new_block),
822 id);
823
824 if (id->transform_lang_insert_block)
825 id->transform_lang_insert_block (new_block);
826
827 /* Remember the remapped block. */
828 insert_decl_map (id, old_block, new_block);
829 }
830
831 /* Copy the whole block tree and root it in id->block. */
832
833 static tree
834 remap_blocks (tree block, copy_body_data *id)
835 {
836 tree t;
837 tree new_tree = block;
838
839 if (!block)
840 return NULL;
841
842 remap_block (&new_tree, id);
843 gcc_assert (new_tree != block);
844 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
845 prepend_lexical_block (new_tree, remap_blocks (t, id));
846 /* Blocks are in arbitrary order, but make things slightly prettier and do
847 not swap order when producing a copy. */
848 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
849 return new_tree;
850 }
851
852 /* Remap the block tree rooted at BLOCK to nothing. */
853
854 static void
855 remap_blocks_to_null (tree block, copy_body_data *id)
856 {
857 tree t;
858 insert_decl_map (id, block, NULL_TREE);
859 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
860 remap_blocks_to_null (t, id);
861 }
862
863 /* Remap the location info pointed to by LOCUS. */
864
865 static location_t
866 remap_location (location_t locus, copy_body_data *id)
867 {
868 if (LOCATION_BLOCK (locus))
869 {
870 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
871 gcc_assert (n);
872 if (*n)
873 return set_block (locus, *n);
874 }
875
876 locus = LOCATION_LOCUS (locus);
877
878 if (locus != UNKNOWN_LOCATION && id->block)
879 return set_block (locus, id->block);
880
881 return locus;
882 }
883
884 static void
885 copy_statement_list (tree *tp)
886 {
887 tree_stmt_iterator oi, ni;
888 tree new_tree;
889
890 new_tree = alloc_stmt_list ();
891 ni = tsi_start (new_tree);
892 oi = tsi_start (*tp);
893 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
894 *tp = new_tree;
895
896 for (; !tsi_end_p (oi); tsi_next (&oi))
897 {
898 tree stmt = tsi_stmt (oi);
899 if (TREE_CODE (stmt) == STATEMENT_LIST)
900 /* This copy is not redundant; tsi_link_after will smash this
901 STATEMENT_LIST into the end of the one we're building, and we
902 don't want to do that with the original. */
903 copy_statement_list (&stmt);
904 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
905 }
906 }
907
908 static void
909 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
910 {
911 tree block = BIND_EXPR_BLOCK (*tp);
912 /* Copy (and replace) the statement. */
913 copy_tree_r (tp, walk_subtrees, NULL);
914 if (block)
915 {
916 remap_block (&block, id);
917 BIND_EXPR_BLOCK (*tp) = block;
918 }
919
920 if (BIND_EXPR_VARS (*tp))
921 /* This will remap a lot of the same decls again, but this should be
922 harmless. */
923 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
924 }
925
926
927 /* Create a new gimple_seq by remapping all the statements in BODY
928 using the inlining information in ID. */
929
930 static gimple_seq
931 remap_gimple_seq (gimple_seq body, copy_body_data *id)
932 {
933 gimple_stmt_iterator si;
934 gimple_seq new_body = NULL;
935
936 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
937 {
938 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
939 gimple_seq_add_seq (&new_body, new_stmts);
940 }
941
942 return new_body;
943 }
944
945
946 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
947 block using the mapping information in ID. */
948
949 static gimple *
950 copy_gimple_bind (gbind *stmt, copy_body_data *id)
951 {
952 gimple *new_bind;
953 tree new_block, new_vars;
954 gimple_seq body, new_body;
955
956 /* Copy the statement. Note that we purposely don't use copy_stmt
957 here because we need to remap statements as we copy. */
958 body = gimple_bind_body (stmt);
959 new_body = remap_gimple_seq (body, id);
960
961 new_block = gimple_bind_block (stmt);
962 if (new_block)
963 remap_block (&new_block, id);
964
965 /* This will remap a lot of the same decls again, but this should be
966 harmless. */
967 new_vars = gimple_bind_vars (stmt);
968 if (new_vars)
969 new_vars = remap_decls (new_vars, NULL, id);
970
971 new_bind = gimple_build_bind (new_vars, new_body, new_block);
972
973 return new_bind;
974 }
975
976 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
977
978 static bool
979 is_parm (tree decl)
980 {
981 if (TREE_CODE (decl) == SSA_NAME)
982 {
983 decl = SSA_NAME_VAR (decl);
984 if (!decl)
985 return false;
986 }
987
988 return (TREE_CODE (decl) == PARM_DECL);
989 }
990
991 /* Remap the dependence CLIQUE from the source to the destination function
992 as specified in ID. */
993
994 static unsigned short
995 remap_dependence_clique (copy_body_data *id, unsigned short clique)
996 {
997 if (clique == 0 || processing_debug_stmt)
998 return 0;
999 if (!id->dependence_map)
1000 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1001 bool existed;
1002 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1003 if (!existed)
1004 {
1005 /* Clique 1 is reserved for local ones set by PTA. */
1006 if (cfun->last_clique == 0)
1007 cfun->last_clique = 1;
1008 newc = ++cfun->last_clique;
1009 }
1010 return newc;
1011 }
1012
1013 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1014 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1015 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1016 recursing into the children nodes of *TP. */
1017
1018 static tree
1019 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1020 {
1021 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1022 copy_body_data *id = (copy_body_data *) wi_p->info;
1023 tree fn = id->src_fn;
1024
1025 /* For recursive invocations this is no longer the LHS itself. */
1026 bool is_lhs = wi_p->is_lhs;
1027 wi_p->is_lhs = false;
1028
1029 if (TREE_CODE (*tp) == SSA_NAME)
1030 {
1031 *tp = remap_ssa_name (*tp, id);
1032 *walk_subtrees = 0;
1033 if (is_lhs)
1034 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1035 return NULL;
1036 }
1037 else if (auto_var_in_fn_p (*tp, fn))
1038 {
1039 /* Local variables and labels need to be replaced by equivalent
1040 variables. We don't want to copy static variables; there's
1041 only one of those, no matter how many times we inline the
1042 containing function. Similarly for globals from an outer
1043 function. */
1044 tree new_decl;
1045
1046 /* Remap the declaration. */
1047 new_decl = remap_decl (*tp, id);
1048 gcc_assert (new_decl);
1049 /* Replace this variable with the copy. */
1050 STRIP_TYPE_NOPS (new_decl);
1051 /* ??? The C++ frontend uses void * pointer zero to initialize
1052 any other type. This confuses the middle-end type verification.
1053 As cloned bodies do not go through gimplification again the fixup
1054 there doesn't trigger. */
1055 if (TREE_CODE (new_decl) == INTEGER_CST
1056 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1057 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1058 *tp = new_decl;
1059 *walk_subtrees = 0;
1060 }
1061 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1062 gcc_unreachable ();
1063 else if (TREE_CODE (*tp) == SAVE_EXPR)
1064 gcc_unreachable ();
1065 else if (TREE_CODE (*tp) == LABEL_DECL
1066 && (!DECL_CONTEXT (*tp)
1067 || decl_function_context (*tp) == id->src_fn))
1068 /* These may need to be remapped for EH handling. */
1069 *tp = remap_decl (*tp, id);
1070 else if (TREE_CODE (*tp) == FIELD_DECL)
1071 {
1072 /* If the enclosing record type is variably_modified_type_p, the field
1073 has already been remapped. Otherwise, it need not be. */
1074 tree *n = id->decl_map->get (*tp);
1075 if (n)
1076 *tp = *n;
1077 *walk_subtrees = 0;
1078 }
1079 else if (TYPE_P (*tp))
1080 /* Types may need remapping as well. */
1081 *tp = remap_type (*tp, id);
1082 else if (CONSTANT_CLASS_P (*tp))
1083 {
1084 /* If this is a constant, we have to copy the node iff the type
1085 will be remapped. copy_tree_r will not copy a constant. */
1086 tree new_type = remap_type (TREE_TYPE (*tp), id);
1087
1088 if (new_type == TREE_TYPE (*tp))
1089 *walk_subtrees = 0;
1090
1091 else if (TREE_CODE (*tp) == INTEGER_CST)
1092 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1093 else
1094 {
1095 *tp = copy_node (*tp);
1096 TREE_TYPE (*tp) = new_type;
1097 }
1098 }
1099 else
1100 {
1101 /* Otherwise, just copy the node. Note that copy_tree_r already
1102 knows not to copy VAR_DECLs, etc., so this is safe. */
1103
1104 if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1105 {
1106 /* We need to re-canonicalize MEM_REFs from inline substitutions
1107 that can happen when a pointer argument is an ADDR_EXPR.
1108 Recurse here manually to allow that. */
1109 tree ptr = TREE_OPERAND (*tp, 0);
1110 tree type = remap_type (TREE_TYPE (*tp), id);
1111 tree old = *tp;
1112 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1113 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1114 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1115 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1116 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1117 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1118 {
1119 MR_DEPENDENCE_CLIQUE (*tp)
1120 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1121 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1122 }
1123 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1124 remapped a parameter as the property might be valid only
1125 for the parameter itself. */
1126 if (TREE_THIS_NOTRAP (old)
1127 && (!is_parm (TREE_OPERAND (old, 0))
1128 || (!id->transform_parameter && is_parm (ptr))))
1129 TREE_THIS_NOTRAP (*tp) = 1;
1130 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1131 *walk_subtrees = 0;
1132 return NULL;
1133 }
1134
1135 /* Here is the "usual case". Copy this tree node, and then
1136 tweak some special cases. */
1137 copy_tree_r (tp, walk_subtrees, NULL);
1138
1139 if (TREE_CODE (*tp) != OMP_CLAUSE)
1140 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1141
1142 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1143 {
1144 /* The copied TARGET_EXPR has never been expanded, even if the
1145 original node was expanded already. */
1146 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1147 TREE_OPERAND (*tp, 3) = NULL_TREE;
1148 }
1149 else if (TREE_CODE (*tp) == ADDR_EXPR)
1150 {
1151 /* Variable substitution need not be simple. In particular,
1152 the MEM_REF substitution above. Make sure that
1153 TREE_CONSTANT and friends are up-to-date. */
1154 int invariant = is_gimple_min_invariant (*tp);
1155 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1156 recompute_tree_invariant_for_addr_expr (*tp);
1157
1158 /* If this used to be invariant, but is not any longer,
1159 then regimplification is probably needed. */
1160 if (invariant && !is_gimple_min_invariant (*tp))
1161 id->regimplify = true;
1162
1163 *walk_subtrees = 0;
1164 }
1165 }
1166
1167 /* Update the TREE_BLOCK for the cloned expr. */
1168 if (EXPR_P (*tp))
1169 {
1170 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1171 tree old_block = TREE_BLOCK (*tp);
1172 if (old_block)
1173 {
1174 tree *n;
1175 n = id->decl_map->get (TREE_BLOCK (*tp));
1176 if (n)
1177 new_block = *n;
1178 }
1179 TREE_SET_BLOCK (*tp, new_block);
1180 }
1181
1182 /* Keep iterating. */
1183 return NULL_TREE;
1184 }
1185
1186
1187 /* Called from copy_body_id via walk_tree. DATA is really a
1188 `copy_body_data *'. */
1189
1190 tree
1191 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1192 {
1193 copy_body_data *id = (copy_body_data *) data;
1194 tree fn = id->src_fn;
1195 tree new_block;
1196
1197 /* Begin by recognizing trees that we'll completely rewrite for the
1198 inlining context. Our output for these trees is completely
1199 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1200 into an edge). Further down, we'll handle trees that get
1201 duplicated and/or tweaked. */
1202
1203 /* When requested, RETURN_EXPRs should be transformed to just the
1204 contained MODIFY_EXPR. The branch semantics of the return will
1205 be handled elsewhere by manipulating the CFG rather than a statement. */
1206 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1207 {
1208 tree assignment = TREE_OPERAND (*tp, 0);
1209
1210 /* If we're returning something, just turn that into an
1211 assignment into the equivalent of the original RESULT_DECL.
1212 If the "assignment" is just the result decl, the result
1213 decl has already been set (e.g. a recent "foo (&result_decl,
1214 ...)"); just toss the entire RETURN_EXPR. */
1215 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1216 {
1217 /* Replace the RETURN_EXPR with (a copy of) the
1218 MODIFY_EXPR hanging underneath. */
1219 *tp = copy_node (assignment);
1220 }
1221 else /* Else the RETURN_EXPR returns no value. */
1222 {
1223 *tp = NULL;
1224 return (tree) (void *)1;
1225 }
1226 }
1227 else if (TREE_CODE (*tp) == SSA_NAME)
1228 {
1229 *tp = remap_ssa_name (*tp, id);
1230 *walk_subtrees = 0;
1231 return NULL;
1232 }
1233
1234 /* Local variables and labels need to be replaced by equivalent
1235 variables. We don't want to copy static variables; there's only
1236 one of those, no matter how many times we inline the containing
1237 function. Similarly for globals from an outer function. */
1238 else if (auto_var_in_fn_p (*tp, fn))
1239 {
1240 tree new_decl;
1241
1242 /* Remap the declaration. */
1243 new_decl = remap_decl (*tp, id);
1244 gcc_assert (new_decl);
1245 /* Replace this variable with the copy. */
1246 STRIP_TYPE_NOPS (new_decl);
1247 *tp = new_decl;
1248 *walk_subtrees = 0;
1249 }
1250 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1251 copy_statement_list (tp);
1252 else if (TREE_CODE (*tp) == SAVE_EXPR
1253 || TREE_CODE (*tp) == TARGET_EXPR)
1254 remap_save_expr (tp, id->decl_map, walk_subtrees);
1255 else if (TREE_CODE (*tp) == LABEL_DECL
1256 && (! DECL_CONTEXT (*tp)
1257 || decl_function_context (*tp) == id->src_fn))
1258 /* These may need to be remapped for EH handling. */
1259 *tp = remap_decl (*tp, id);
1260 else if (TREE_CODE (*tp) == BIND_EXPR)
1261 copy_bind_expr (tp, walk_subtrees, id);
1262 /* Types may need remapping as well. */
1263 else if (TYPE_P (*tp))
1264 *tp = remap_type (*tp, id);
1265
1266 /* If this is a constant, we have to copy the node iff the type will be
1267 remapped. copy_tree_r will not copy a constant. */
1268 else if (CONSTANT_CLASS_P (*tp))
1269 {
1270 tree new_type = remap_type (TREE_TYPE (*tp), id);
1271
1272 if (new_type == TREE_TYPE (*tp))
1273 *walk_subtrees = 0;
1274
1275 else if (TREE_CODE (*tp) == INTEGER_CST)
1276 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1277 else
1278 {
1279 *tp = copy_node (*tp);
1280 TREE_TYPE (*tp) = new_type;
1281 }
1282 }
1283
1284 /* Otherwise, just copy the node. Note that copy_tree_r already
1285 knows not to copy VAR_DECLs, etc., so this is safe. */
1286 else
1287 {
1288 /* Here we handle trees that are not completely rewritten.
1289 First we detect some inlining-induced bogosities for
1290 discarding. */
1291 if (TREE_CODE (*tp) == MODIFY_EXPR
1292 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1293 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1294 {
1295 /* Some assignments VAR = VAR; don't generate any rtl code
1296 and thus don't count as variable modification. Avoid
1297 keeping bogosities like 0 = 0. */
1298 tree decl = TREE_OPERAND (*tp, 0), value;
1299 tree *n;
1300
1301 n = id->decl_map->get (decl);
1302 if (n)
1303 {
1304 value = *n;
1305 STRIP_TYPE_NOPS (value);
1306 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1307 {
1308 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1309 return copy_tree_body_r (tp, walk_subtrees, data);
1310 }
1311 }
1312 }
1313 else if (TREE_CODE (*tp) == INDIRECT_REF)
1314 {
1315 /* Get rid of *& from inline substitutions that can happen when a
1316 pointer argument is an ADDR_EXPR. */
1317 tree decl = TREE_OPERAND (*tp, 0);
1318 tree *n = id->decl_map->get (decl);
1319 if (n)
1320 {
1321 /* If we happen to get an ADDR_EXPR in n->value, strip
1322 it manually here as we'll eventually get ADDR_EXPRs
1323 which lie about their types pointed to. In this case
1324 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1325 but we absolutely rely on that. As fold_indirect_ref
1326 does other useful transformations, try that first, though. */
1327 tree type = TREE_TYPE (*tp);
1328 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1329 tree old = *tp;
1330 *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1331 if (! *tp)
1332 {
1333 type = remap_type (type, id);
1334 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1335 {
1336 *tp
1337 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1338 /* ??? We should either assert here or build
1339 a VIEW_CONVERT_EXPR instead of blindly leaking
1340 incompatible types to our IL. */
1341 if (! *tp)
1342 *tp = TREE_OPERAND (ptr, 0);
1343 }
1344 else
1345 {
1346 *tp = build1 (INDIRECT_REF, type, ptr);
1347 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1348 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1349 TREE_READONLY (*tp) = TREE_READONLY (old);
1350 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1351 have remapped a parameter as the property might be
1352 valid only for the parameter itself. */
1353 if (TREE_THIS_NOTRAP (old)
1354 && (!is_parm (TREE_OPERAND (old, 0))
1355 || (!id->transform_parameter && is_parm (ptr))))
1356 TREE_THIS_NOTRAP (*tp) = 1;
1357 }
1358 }
1359 *walk_subtrees = 0;
1360 return NULL;
1361 }
1362 }
1363 else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1364 {
1365 /* We need to re-canonicalize MEM_REFs from inline substitutions
1366 that can happen when a pointer argument is an ADDR_EXPR.
1367 Recurse here manually to allow that. */
1368 tree ptr = TREE_OPERAND (*tp, 0);
1369 tree type = remap_type (TREE_TYPE (*tp), id);
1370 tree old = *tp;
1371 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1372 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1373 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1374 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1375 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1376 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1377 {
1378 MR_DEPENDENCE_CLIQUE (*tp)
1379 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1380 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1381 }
1382 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1383 remapped a parameter as the property might be valid only
1384 for the parameter itself. */
1385 if (TREE_THIS_NOTRAP (old)
1386 && (!is_parm (TREE_OPERAND (old, 0))
1387 || (!id->transform_parameter && is_parm (ptr))))
1388 TREE_THIS_NOTRAP (*tp) = 1;
1389 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1390 *walk_subtrees = 0;
1391 return NULL;
1392 }
1393
1394 /* Here is the "usual case". Copy this tree node, and then
1395 tweak some special cases. */
1396 copy_tree_r (tp, walk_subtrees, NULL);
1397
1398 /* If EXPR has block defined, map it to newly constructed block.
1399 When inlining we want EXPRs without block appear in the block
1400 of function call if we are not remapping a type. */
1401 if (EXPR_P (*tp))
1402 {
1403 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1404 if (TREE_BLOCK (*tp))
1405 {
1406 tree *n;
1407 n = id->decl_map->get (TREE_BLOCK (*tp));
1408 if (n)
1409 new_block = *n;
1410 }
1411 TREE_SET_BLOCK (*tp, new_block);
1412 }
1413
1414 if (TREE_CODE (*tp) != OMP_CLAUSE)
1415 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1416
1417 /* The copied TARGET_EXPR has never been expanded, even if the
1418 original node was expanded already. */
1419 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1420 {
1421 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1422 TREE_OPERAND (*tp, 3) = NULL_TREE;
1423 }
1424
1425 /* Variable substitution need not be simple. In particular, the
1426 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1427 and friends are up-to-date. */
1428 else if (TREE_CODE (*tp) == ADDR_EXPR)
1429 {
1430 int invariant = is_gimple_min_invariant (*tp);
1431 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1432
1433 /* Handle the case where we substituted an INDIRECT_REF
1434 into the operand of the ADDR_EXPR. */
1435 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1436 && !id->do_not_fold)
1437 {
1438 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1439 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1440 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1441 *tp = t;
1442 }
1443 else
1444 recompute_tree_invariant_for_addr_expr (*tp);
1445
1446 /* If this used to be invariant, but is not any longer,
1447 then regimplification is probably needed. */
1448 if (invariant && !is_gimple_min_invariant (*tp))
1449 id->regimplify = true;
1450
1451 *walk_subtrees = 0;
1452 }
1453 }
1454
1455 /* Keep iterating. */
1456 return NULL_TREE;
1457 }
1458
1459 /* Helper for remap_gimple_stmt. Given an EH region number for the
1460 source function, map that to the duplicate EH region number in
1461 the destination function. */
1462
1463 static int
1464 remap_eh_region_nr (int old_nr, copy_body_data *id)
1465 {
1466 eh_region old_r, new_r;
1467
1468 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1469 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1470
1471 return new_r->index;
1472 }
1473
1474 /* Similar, but operate on INTEGER_CSTs. */
1475
1476 static tree
1477 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1478 {
1479 int old_nr, new_nr;
1480
1481 old_nr = tree_to_shwi (old_t_nr);
1482 new_nr = remap_eh_region_nr (old_nr, id);
1483
1484 return build_int_cst (integer_type_node, new_nr);
1485 }
1486
1487 /* Helper for copy_bb. Remap statement STMT using the inlining
1488 information in ID. Return the new statement copy. */
1489
1490 static gimple_seq
1491 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1492 {
1493 gimple *copy = NULL;
1494 struct walk_stmt_info wi;
1495 bool skip_first = false;
1496 gimple_seq stmts = NULL;
1497
1498 if (is_gimple_debug (stmt)
1499 && (gimple_debug_nonbind_marker_p (stmt)
1500 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1501 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1502 return NULL;
1503
1504 /* Begin by recognizing trees that we'll completely rewrite for the
1505 inlining context. Our output for these trees is completely
1506 different from our input (e.g. RETURN_EXPR is deleted and morphs
1507 into an edge). Further down, we'll handle trees that get
1508 duplicated and/or tweaked. */
1509
1510 /* When requested, GIMPLE_RETURN should be transformed to just the
1511 contained GIMPLE_ASSIGN. The branch semantics of the return will
1512 be handled elsewhere by manipulating the CFG rather than the
1513 statement. */
1514 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1515 {
1516 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1517
1518 /* If we're returning something, just turn that into an
1519 assignment to the equivalent of the original RESULT_DECL.
1520 If RETVAL is just the result decl, the result decl has
1521 already been set (e.g. a recent "foo (&result_decl, ...)");
1522 just toss the entire GIMPLE_RETURN. */
1523 if (retval
1524 && (TREE_CODE (retval) != RESULT_DECL
1525 && (TREE_CODE (retval) != SSA_NAME
1526 || ! SSA_NAME_VAR (retval)
1527 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1528 {
1529 copy = gimple_build_assign (id->do_not_unshare
1530 ? id->retvar : unshare_expr (id->retvar),
1531 retval);
1532 /* id->retvar is already substituted. Skip it on later remapping. */
1533 skip_first = true;
1534 }
1535 else
1536 return NULL;
1537 }
1538 else if (gimple_has_substatements (stmt))
1539 {
1540 gimple_seq s1, s2;
1541
1542 /* When cloning bodies from the C++ front end, we will be handed bodies
1543 in High GIMPLE form. Handle here all the High GIMPLE statements that
1544 have embedded statements. */
1545 switch (gimple_code (stmt))
1546 {
1547 case GIMPLE_BIND:
1548 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1549 break;
1550
1551 case GIMPLE_CATCH:
1552 {
1553 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1554 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1555 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1556 }
1557 break;
1558
1559 case GIMPLE_EH_FILTER:
1560 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1561 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1562 break;
1563
1564 case GIMPLE_TRY:
1565 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1566 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1567 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1568 break;
1569
1570 case GIMPLE_WITH_CLEANUP_EXPR:
1571 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1572 copy = gimple_build_wce (s1);
1573 break;
1574
1575 case GIMPLE_OMP_PARALLEL:
1576 {
1577 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1578 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1579 copy = gimple_build_omp_parallel
1580 (s1,
1581 gimple_omp_parallel_clauses (omp_par_stmt),
1582 gimple_omp_parallel_child_fn (omp_par_stmt),
1583 gimple_omp_parallel_data_arg (omp_par_stmt));
1584 }
1585 break;
1586
1587 case GIMPLE_OMP_TASK:
1588 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1589 copy = gimple_build_omp_task
1590 (s1,
1591 gimple_omp_task_clauses (stmt),
1592 gimple_omp_task_child_fn (stmt),
1593 gimple_omp_task_data_arg (stmt),
1594 gimple_omp_task_copy_fn (stmt),
1595 gimple_omp_task_arg_size (stmt),
1596 gimple_omp_task_arg_align (stmt));
1597 break;
1598
1599 case GIMPLE_OMP_FOR:
1600 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1601 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1602 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1603 gimple_omp_for_clauses (stmt),
1604 gimple_omp_for_collapse (stmt), s2);
1605 {
1606 size_t i;
1607 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1608 {
1609 gimple_omp_for_set_index (copy, i,
1610 gimple_omp_for_index (stmt, i));
1611 gimple_omp_for_set_initial (copy, i,
1612 gimple_omp_for_initial (stmt, i));
1613 gimple_omp_for_set_final (copy, i,
1614 gimple_omp_for_final (stmt, i));
1615 gimple_omp_for_set_incr (copy, i,
1616 gimple_omp_for_incr (stmt, i));
1617 gimple_omp_for_set_cond (copy, i,
1618 gimple_omp_for_cond (stmt, i));
1619 }
1620 }
1621 break;
1622
1623 case GIMPLE_OMP_MASTER:
1624 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1625 copy = gimple_build_omp_master (s1);
1626 break;
1627
1628 case GIMPLE_OMP_TASKGROUP:
1629 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1630 copy = gimple_build_omp_taskgroup
1631 (s1, gimple_omp_taskgroup_clauses (stmt));
1632 break;
1633
1634 case GIMPLE_OMP_ORDERED:
1635 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1636 copy = gimple_build_omp_ordered
1637 (s1,
1638 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1639 break;
1640
1641 case GIMPLE_OMP_SECTION:
1642 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1643 copy = gimple_build_omp_section (s1);
1644 break;
1645
1646 case GIMPLE_OMP_SECTIONS:
1647 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1648 copy = gimple_build_omp_sections
1649 (s1, gimple_omp_sections_clauses (stmt));
1650 break;
1651
1652 case GIMPLE_OMP_SINGLE:
1653 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1654 copy = gimple_build_omp_single
1655 (s1, gimple_omp_single_clauses (stmt));
1656 break;
1657
1658 case GIMPLE_OMP_TARGET:
1659 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1660 copy = gimple_build_omp_target
1661 (s1, gimple_omp_target_kind (stmt),
1662 gimple_omp_target_clauses (stmt));
1663 break;
1664
1665 case GIMPLE_OMP_TEAMS:
1666 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1667 copy = gimple_build_omp_teams
1668 (s1, gimple_omp_teams_clauses (stmt));
1669 break;
1670
1671 case GIMPLE_OMP_CRITICAL:
1672 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1673 copy = gimple_build_omp_critical (s1,
1674 gimple_omp_critical_name
1675 (as_a <gomp_critical *> (stmt)),
1676 gimple_omp_critical_clauses
1677 (as_a <gomp_critical *> (stmt)));
1678 break;
1679
1680 case GIMPLE_TRANSACTION:
1681 {
1682 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1683 gtransaction *new_trans_stmt;
1684 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1685 id);
1686 copy = new_trans_stmt = gimple_build_transaction (s1);
1687 gimple_transaction_set_subcode (new_trans_stmt,
1688 gimple_transaction_subcode (old_trans_stmt));
1689 gimple_transaction_set_label_norm (new_trans_stmt,
1690 gimple_transaction_label_norm (old_trans_stmt));
1691 gimple_transaction_set_label_uninst (new_trans_stmt,
1692 gimple_transaction_label_uninst (old_trans_stmt));
1693 gimple_transaction_set_label_over (new_trans_stmt,
1694 gimple_transaction_label_over (old_trans_stmt));
1695 }
1696 break;
1697
1698 default:
1699 gcc_unreachable ();
1700 }
1701 }
1702 else
1703 {
1704 if (gimple_assign_copy_p (stmt)
1705 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1706 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1707 {
1708 /* Here we handle statements that are not completely rewritten.
1709 First we detect some inlining-induced bogosities for
1710 discarding. */
1711
1712 /* Some assignments VAR = VAR; don't generate any rtl code
1713 and thus don't count as variable modification. Avoid
1714 keeping bogosities like 0 = 0. */
1715 tree decl = gimple_assign_lhs (stmt), value;
1716 tree *n;
1717
1718 n = id->decl_map->get (decl);
1719 if (n)
1720 {
1721 value = *n;
1722 STRIP_TYPE_NOPS (value);
1723 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1724 return NULL;
1725 }
1726 }
1727
1728 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1729 in a block that we aren't copying during tree_function_versioning,
1730 just drop the clobber stmt. */
1731 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1732 {
1733 tree lhs = gimple_assign_lhs (stmt);
1734 if (TREE_CODE (lhs) == MEM_REF
1735 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1736 {
1737 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1738 if (gimple_bb (def_stmt)
1739 && !bitmap_bit_p (id->blocks_to_copy,
1740 gimple_bb (def_stmt)->index))
1741 return NULL;
1742 }
1743 }
1744
1745 if (gimple_debug_bind_p (stmt))
1746 {
1747 gdebug *copy
1748 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1749 gimple_debug_bind_get_value (stmt),
1750 stmt);
1751 if (id->reset_location)
1752 gimple_set_location (copy, input_location);
1753 id->debug_stmts.safe_push (copy);
1754 gimple_seq_add_stmt (&stmts, copy);
1755 return stmts;
1756 }
1757 if (gimple_debug_source_bind_p (stmt))
1758 {
1759 gdebug *copy = gimple_build_debug_source_bind
1760 (gimple_debug_source_bind_get_var (stmt),
1761 gimple_debug_source_bind_get_value (stmt),
1762 stmt);
1763 if (id->reset_location)
1764 gimple_set_location (copy, input_location);
1765 id->debug_stmts.safe_push (copy);
1766 gimple_seq_add_stmt (&stmts, copy);
1767 return stmts;
1768 }
1769 if (gimple_debug_nonbind_marker_p (stmt))
1770 {
1771 /* If the inlined function has too many debug markers,
1772 don't copy them. */
1773 if (id->src_cfun->debug_marker_count
1774 > PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
1775 return stmts;
1776
1777 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1778 if (id->reset_location)
1779 gimple_set_location (copy, input_location);
1780 id->debug_stmts.safe_push (copy);
1781 gimple_seq_add_stmt (&stmts, copy);
1782 return stmts;
1783 }
1784
1785 /* Create a new deep copy of the statement. */
1786 copy = gimple_copy (stmt);
1787
1788 /* Clear flags that need revisiting. */
1789 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1790 {
1791 if (gimple_call_tail_p (call_stmt))
1792 gimple_call_set_tail (call_stmt, false);
1793 if (gimple_call_from_thunk_p (call_stmt))
1794 gimple_call_set_from_thunk (call_stmt, false);
1795 if (gimple_call_internal_p (call_stmt))
1796 switch (gimple_call_internal_fn (call_stmt))
1797 {
1798 case IFN_GOMP_SIMD_LANE:
1799 case IFN_GOMP_SIMD_VF:
1800 case IFN_GOMP_SIMD_LAST_LANE:
1801 case IFN_GOMP_SIMD_ORDERED_START:
1802 case IFN_GOMP_SIMD_ORDERED_END:
1803 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1804 break;
1805 default:
1806 break;
1807 }
1808 }
1809
1810 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1811 RESX and EH_DISPATCH. */
1812 if (id->eh_map)
1813 switch (gimple_code (copy))
1814 {
1815 case GIMPLE_CALL:
1816 {
1817 tree r, fndecl = gimple_call_fndecl (copy);
1818 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1819 switch (DECL_FUNCTION_CODE (fndecl))
1820 {
1821 case BUILT_IN_EH_COPY_VALUES:
1822 r = gimple_call_arg (copy, 1);
1823 r = remap_eh_region_tree_nr (r, id);
1824 gimple_call_set_arg (copy, 1, r);
1825 /* FALLTHRU */
1826
1827 case BUILT_IN_EH_POINTER:
1828 case BUILT_IN_EH_FILTER:
1829 r = gimple_call_arg (copy, 0);
1830 r = remap_eh_region_tree_nr (r, id);
1831 gimple_call_set_arg (copy, 0, r);
1832 break;
1833
1834 default:
1835 break;
1836 }
1837
1838 /* Reset alias info if we didn't apply measures to
1839 keep it valid over inlining by setting DECL_PT_UID. */
1840 if (!id->src_cfun->gimple_df
1841 || !id->src_cfun->gimple_df->ipa_pta)
1842 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1843 }
1844 break;
1845
1846 case GIMPLE_RESX:
1847 {
1848 gresx *resx_stmt = as_a <gresx *> (copy);
1849 int r = gimple_resx_region (resx_stmt);
1850 r = remap_eh_region_nr (r, id);
1851 gimple_resx_set_region (resx_stmt, r);
1852 }
1853 break;
1854
1855 case GIMPLE_EH_DISPATCH:
1856 {
1857 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1858 int r = gimple_eh_dispatch_region (eh_dispatch);
1859 r = remap_eh_region_nr (r, id);
1860 gimple_eh_dispatch_set_region (eh_dispatch, r);
1861 }
1862 break;
1863
1864 default:
1865 break;
1866 }
1867 }
1868
1869 /* If STMT has a block defined, map it to the newly constructed block. */
1870 if (tree block = gimple_block (copy))
1871 {
1872 tree *n;
1873 n = id->decl_map->get (block);
1874 gcc_assert (n);
1875 gimple_set_block (copy, *n);
1876 }
1877
1878 if (id->reset_location)
1879 gimple_set_location (copy, input_location);
1880
1881 /* Debug statements ought to be rebuilt and not copied. */
1882 gcc_checking_assert (!is_gimple_debug (copy));
1883
1884 /* Remap all the operands in COPY. */
1885 memset (&wi, 0, sizeof (wi));
1886 wi.info = id;
1887 if (skip_first)
1888 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1889 else
1890 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1891
1892 /* Clear the copied virtual operands. We are not remapping them here
1893 but are going to recreate them from scratch. */
1894 if (gimple_has_mem_ops (copy))
1895 {
1896 gimple_set_vdef (copy, NULL_TREE);
1897 gimple_set_vuse (copy, NULL_TREE);
1898 }
1899
1900 gimple_seq_add_stmt (&stmts, copy);
1901 return stmts;
1902 }
1903
1904
1905 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1906 later */
1907
1908 static basic_block
1909 copy_bb (copy_body_data *id, basic_block bb,
1910 profile_count num, profile_count den)
1911 {
1912 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1913 basic_block copy_basic_block;
1914 tree decl;
1915 basic_block prev;
1916
1917 profile_count::adjust_for_ipa_scaling (&num, &den);
1918
1919 /* Search for previous copied basic block. */
1920 prev = bb->prev_bb;
1921 while (!prev->aux)
1922 prev = prev->prev_bb;
1923
1924 /* create_basic_block() will append every new block to
1925 basic_block_info automatically. */
1926 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1927 copy_basic_block->count = bb->count.apply_scale (num, den);
1928
1929 copy_gsi = gsi_start_bb (copy_basic_block);
1930
1931 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1932 {
1933 gimple_seq stmts;
1934 gimple *stmt = gsi_stmt (gsi);
1935 gimple *orig_stmt = stmt;
1936 gimple_stmt_iterator stmts_gsi;
1937 bool stmt_added = false;
1938
1939 id->regimplify = false;
1940 stmts = remap_gimple_stmt (stmt, id);
1941
1942 if (gimple_seq_empty_p (stmts))
1943 continue;
1944
1945 seq_gsi = copy_gsi;
1946
1947 for (stmts_gsi = gsi_start (stmts);
1948 !gsi_end_p (stmts_gsi); )
1949 {
1950 stmt = gsi_stmt (stmts_gsi);
1951
1952 /* Advance iterator now before stmt is moved to seq_gsi. */
1953 gsi_next (&stmts_gsi);
1954
1955 if (gimple_nop_p (stmt))
1956 continue;
1957
1958 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1959 orig_stmt);
1960
1961 /* With return slot optimization we can end up with
1962 non-gimple (foo *)&this->m, fix that here. */
1963 if (is_gimple_assign (stmt)
1964 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1965 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1966 {
1967 tree new_rhs;
1968 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1969 gimple_assign_rhs1 (stmt),
1970 true, NULL, false,
1971 GSI_CONTINUE_LINKING);
1972 gimple_assign_set_rhs1 (stmt, new_rhs);
1973 id->regimplify = false;
1974 }
1975
1976 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1977
1978 if (id->regimplify)
1979 gimple_regimplify_operands (stmt, &seq_gsi);
1980
1981 stmt_added = true;
1982 }
1983
1984 if (!stmt_added)
1985 continue;
1986
1987 /* If copy_basic_block has been empty at the start of this iteration,
1988 call gsi_start_bb again to get at the newly added statements. */
1989 if (gsi_end_p (copy_gsi))
1990 copy_gsi = gsi_start_bb (copy_basic_block);
1991 else
1992 gsi_next (&copy_gsi);
1993
1994 /* Process the new statement. The call to gimple_regimplify_operands
1995 possibly turned the statement into multiple statements, we
1996 need to process all of them. */
1997 do
1998 {
1999 tree fn;
2000 gcall *call_stmt;
2001
2002 stmt = gsi_stmt (copy_gsi);
2003 call_stmt = dyn_cast <gcall *> (stmt);
2004 if (call_stmt
2005 && gimple_call_va_arg_pack_p (call_stmt)
2006 && id->call_stmt
2007 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2008 {
2009 /* __builtin_va_arg_pack () should be replaced by
2010 all arguments corresponding to ... in the caller. */
2011 tree p;
2012 gcall *new_call;
2013 vec<tree> argarray;
2014 size_t nargs = gimple_call_num_args (id->call_stmt);
2015 size_t n;
2016
2017 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2018 nargs--;
2019
2020 /* Create the new array of arguments. */
2021 n = nargs + gimple_call_num_args (call_stmt);
2022 argarray.create (n);
2023 argarray.safe_grow_cleared (n);
2024
2025 /* Copy all the arguments before '...' */
2026 memcpy (argarray.address (),
2027 gimple_call_arg_ptr (call_stmt, 0),
2028 gimple_call_num_args (call_stmt) * sizeof (tree));
2029
2030 /* Append the arguments passed in '...' */
2031 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2032 gimple_call_arg_ptr (id->call_stmt, 0)
2033 + (gimple_call_num_args (id->call_stmt) - nargs),
2034 nargs * sizeof (tree));
2035
2036 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2037 argarray);
2038
2039 argarray.release ();
2040
2041 /* Copy all GIMPLE_CALL flags, location and block, except
2042 GF_CALL_VA_ARG_PACK. */
2043 gimple_call_copy_flags (new_call, call_stmt);
2044 gimple_call_set_va_arg_pack (new_call, false);
2045 /* location includes block. */
2046 gimple_set_location (new_call, gimple_location (stmt));
2047 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2048
2049 gsi_replace (&copy_gsi, new_call, false);
2050 stmt = new_call;
2051 }
2052 else if (call_stmt
2053 && id->call_stmt
2054 && (decl = gimple_call_fndecl (stmt))
2055 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2056 {
2057 /* __builtin_va_arg_pack_len () should be replaced by
2058 the number of anonymous arguments. */
2059 size_t nargs = gimple_call_num_args (id->call_stmt);
2060 tree count, p;
2061 gimple *new_stmt;
2062
2063 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2064 nargs--;
2065
2066 if (!gimple_call_lhs (stmt))
2067 {
2068 /* Drop unused calls. */
2069 gsi_remove (&copy_gsi, false);
2070 continue;
2071 }
2072 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2073 {
2074 count = build_int_cst (integer_type_node, nargs);
2075 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2076 gsi_replace (&copy_gsi, new_stmt, false);
2077 stmt = new_stmt;
2078 }
2079 else if (nargs != 0)
2080 {
2081 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2082 count = build_int_cst (integer_type_node, nargs);
2083 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2084 PLUS_EXPR, newlhs, count);
2085 gimple_call_set_lhs (stmt, newlhs);
2086 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2087 }
2088 }
2089 else if (call_stmt
2090 && id->call_stmt
2091 && gimple_call_internal_p (stmt)
2092 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2093 {
2094 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2095 gsi_remove (&copy_gsi, false);
2096 continue;
2097 }
2098
2099 /* Statements produced by inlining can be unfolded, especially
2100 when we constant propagated some operands. We can't fold
2101 them right now for two reasons:
2102 1) folding require SSA_NAME_DEF_STMTs to be correct
2103 2) we can't change function calls to builtins.
2104 So we just mark statement for later folding. We mark
2105 all new statements, instead just statements that has changed
2106 by some nontrivial substitution so even statements made
2107 foldable indirectly are updated. If this turns out to be
2108 expensive, copy_body can be told to watch for nontrivial
2109 changes. */
2110 if (id->statements_to_fold)
2111 id->statements_to_fold->add (stmt);
2112
2113 /* We're duplicating a CALL_EXPR. Find any corresponding
2114 callgraph edges and update or duplicate them. */
2115 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2116 {
2117 struct cgraph_edge *edge;
2118
2119 switch (id->transform_call_graph_edges)
2120 {
2121 case CB_CGE_DUPLICATE:
2122 edge = id->src_node->get_edge (orig_stmt);
2123 if (edge)
2124 {
2125 struct cgraph_edge *old_edge = edge;
2126 profile_count old_cnt = edge->count;
2127 edge = edge->clone (id->dst_node, call_stmt,
2128 gimple_uid (stmt),
2129 num, den,
2130 true);
2131
2132 /* Speculative calls consist of two edges - direct and
2133 indirect. Duplicate the whole thing and distribute
2134 frequencies accordingly. */
2135 if (edge->speculative)
2136 {
2137 struct cgraph_edge *direct, *indirect;
2138 struct ipa_ref *ref;
2139
2140 gcc_assert (!edge->indirect_unknown_callee);
2141 old_edge->speculative_call_info (direct, indirect, ref);
2142
2143 profile_count indir_cnt = indirect->count;
2144 indirect = indirect->clone (id->dst_node, call_stmt,
2145 gimple_uid (stmt),
2146 num, den,
2147 true);
2148
2149 profile_probability prob
2150 = indir_cnt.probability_in (old_cnt + indir_cnt);
2151 indirect->count
2152 = copy_basic_block->count.apply_probability (prob);
2153 edge->count = copy_basic_block->count - indirect->count;
2154 id->dst_node->clone_reference (ref, stmt);
2155 }
2156 else
2157 edge->count = copy_basic_block->count;
2158 }
2159 break;
2160
2161 case CB_CGE_MOVE_CLONES:
2162 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2163 call_stmt);
2164 edge = id->dst_node->get_edge (stmt);
2165 break;
2166
2167 case CB_CGE_MOVE:
2168 edge = id->dst_node->get_edge (orig_stmt);
2169 if (edge)
2170 edge->set_call_stmt (call_stmt);
2171 break;
2172
2173 default:
2174 gcc_unreachable ();
2175 }
2176
2177 /* Constant propagation on argument done during inlining
2178 may create new direct call. Produce an edge for it. */
2179 if ((!edge
2180 || (edge->indirect_inlining_edge
2181 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2182 && id->dst_node->definition
2183 && (fn = gimple_call_fndecl (stmt)) != NULL)
2184 {
2185 struct cgraph_node *dest = cgraph_node::get_create (fn);
2186
2187 /* We have missing edge in the callgraph. This can happen
2188 when previous inlining turned an indirect call into a
2189 direct call by constant propagating arguments or we are
2190 producing dead clone (for further cloning). In all
2191 other cases we hit a bug (incorrect node sharing is the
2192 most common reason for missing edges). */
2193 gcc_assert (!dest->definition
2194 || dest->address_taken
2195 || !id->src_node->definition
2196 || !id->dst_node->definition);
2197 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2198 id->dst_node->create_edge_including_clones
2199 (dest, orig_stmt, call_stmt, bb->count,
2200 CIF_ORIGINALLY_INDIRECT_CALL);
2201 else
2202 id->dst_node->create_edge (dest, call_stmt,
2203 bb->count)->inline_failed
2204 = CIF_ORIGINALLY_INDIRECT_CALL;
2205 if (dump_file)
2206 {
2207 fprintf (dump_file, "Created new direct edge to %s\n",
2208 dest->name ());
2209 }
2210 }
2211
2212 notice_special_calls (as_a <gcall *> (stmt));
2213 }
2214
2215 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2216 id->eh_map, id->eh_lp_nr);
2217
2218 gsi_next (&copy_gsi);
2219 }
2220 while (!gsi_end_p (copy_gsi));
2221
2222 copy_gsi = gsi_last_bb (copy_basic_block);
2223 }
2224
2225 return copy_basic_block;
2226 }
2227
2228 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2229 form is quite easy, since dominator relationship for old basic blocks does
2230 not change.
2231
2232 There is however exception where inlining might change dominator relation
2233 across EH edges from basic block within inlined functions destinating
2234 to landing pads in function we inline into.
2235
2236 The function fills in PHI_RESULTs of such PHI nodes if they refer
2237 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2238 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2239 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2240 set, and this means that there will be no overlapping live ranges
2241 for the underlying symbol.
2242
2243 This might change in future if we allow redirecting of EH edges and
2244 we might want to change way build CFG pre-inlining to include
2245 all the possible edges then. */
2246 static void
2247 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2248 bool can_throw, bool nonlocal_goto)
2249 {
2250 edge e;
2251 edge_iterator ei;
2252
2253 FOR_EACH_EDGE (e, ei, bb->succs)
2254 if (!e->dest->aux
2255 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2256 {
2257 gphi *phi;
2258 gphi_iterator si;
2259
2260 if (!nonlocal_goto)
2261 gcc_assert (e->flags & EDGE_EH);
2262
2263 if (!can_throw)
2264 gcc_assert (!(e->flags & EDGE_EH));
2265
2266 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2267 {
2268 edge re;
2269
2270 phi = si.phi ();
2271
2272 /* For abnormal goto/call edges the receiver can be the
2273 ENTRY_BLOCK. Do not assert this cannot happen. */
2274
2275 gcc_assert ((e->flags & EDGE_EH)
2276 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2277
2278 re = find_edge (ret_bb, e->dest);
2279 gcc_checking_assert (re);
2280 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2281 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2282
2283 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2284 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2285 }
2286 }
2287 }
2288
2289 /* Insert clobbers for automatic variables of inlined ID->src_fn
2290 function at the start of basic block ID->eh_landing_pad_dest. */
2291
2292 static void
2293 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2294 {
2295 tree var;
2296 basic_block bb = id->eh_landing_pad_dest;
2297 live_vars_map *vars = NULL;
2298 unsigned int cnt = 0;
2299 unsigned int i;
2300 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2301 if (VAR_P (var)
2302 && !DECL_HARD_REGISTER (var)
2303 && !TREE_THIS_VOLATILE (var)
2304 && !DECL_HAS_VALUE_EXPR_P (var)
2305 && !is_gimple_reg (var)
2306 && auto_var_in_fn_p (var, id->src_fn)
2307 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2308 {
2309 tree *t = id->decl_map->get (var);
2310 if (!t)
2311 continue;
2312 tree new_var = *t;
2313 if (VAR_P (new_var)
2314 && !DECL_HARD_REGISTER (new_var)
2315 && !TREE_THIS_VOLATILE (new_var)
2316 && !DECL_HAS_VALUE_EXPR_P (new_var)
2317 && !is_gimple_reg (new_var)
2318 && auto_var_in_fn_p (new_var, id->dst_fn))
2319 {
2320 if (vars == NULL)
2321 vars = new live_vars_map;
2322 vars->put (DECL_UID (var), cnt++);
2323 }
2324 }
2325 if (vars == NULL)
2326 return;
2327
2328 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2329 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2330 if (VAR_P (var))
2331 {
2332 edge e;
2333 edge_iterator ei;
2334 bool needed = false;
2335 unsigned int *v = vars->get (DECL_UID (var));
2336 if (v == NULL)
2337 continue;
2338 FOR_EACH_EDGE (e, ei, bb->preds)
2339 if ((e->flags & EDGE_EH) != 0
2340 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2341 {
2342 basic_block src_bb = (basic_block) e->src->aux;
2343
2344 if (bitmap_bit_p (&live[src_bb->index], *v))
2345 {
2346 needed = true;
2347 break;
2348 }
2349 }
2350 if (needed)
2351 {
2352 tree new_var = *id->decl_map->get (var);
2353 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2354 tree clobber = build_clobber (TREE_TYPE (new_var));
2355 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2356 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2357 }
2358 }
2359 destroy_live_vars (live);
2360 delete vars;
2361 }
2362
2363 /* Copy edges from BB into its copy constructed earlier, scale profile
2364 accordingly. Edges will be taken care of later. Assume aux
2365 pointers to point to the copies of each BB. Return true if any
2366 debug stmts are left after a statement that must end the basic block. */
2367
2368 static bool
2369 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2370 basic_block ret_bb, basic_block abnormal_goto_dest,
2371 copy_body_data *id)
2372 {
2373 basic_block new_bb = (basic_block) bb->aux;
2374 edge_iterator ei;
2375 edge old_edge;
2376 gimple_stmt_iterator si;
2377 bool need_debug_cleanup = false;
2378
2379 /* Use the indices from the original blocks to create edges for the
2380 new ones. */
2381 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2382 if (!(old_edge->flags & EDGE_EH))
2383 {
2384 edge new_edge;
2385 int flags = old_edge->flags;
2386 location_t locus = old_edge->goto_locus;
2387
2388 /* Return edges do get a FALLTHRU flag when they get inlined. */
2389 if (old_edge->dest->index == EXIT_BLOCK
2390 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2391 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2392 flags |= EDGE_FALLTHRU;
2393
2394 new_edge
2395 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2396 new_edge->probability = old_edge->probability;
2397 if (!id->reset_location)
2398 new_edge->goto_locus = remap_location (locus, id);
2399 }
2400
2401 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2402 return false;
2403
2404 /* When doing function splitting, we must decrease count of the return block
2405 which was previously reachable by block we did not copy. */
2406 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2407 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2408 if (old_edge->src->index != ENTRY_BLOCK
2409 && !old_edge->src->aux)
2410 new_bb->count -= old_edge->count ().apply_scale (num, den);
2411
2412 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2413 {
2414 gimple *copy_stmt;
2415 bool can_throw, nonlocal_goto;
2416
2417 copy_stmt = gsi_stmt (si);
2418 if (!is_gimple_debug (copy_stmt))
2419 update_stmt (copy_stmt);
2420
2421 /* Do this before the possible split_block. */
2422 gsi_next (&si);
2423
2424 /* If this tree could throw an exception, there are two
2425 cases where we need to add abnormal edge(s): the
2426 tree wasn't in a region and there is a "current
2427 region" in the caller; or the original tree had
2428 EH edges. In both cases split the block after the tree,
2429 and add abnormal edge(s) as needed; we need both
2430 those from the callee and the caller.
2431 We check whether the copy can throw, because the const
2432 propagation can change an INDIRECT_REF which throws
2433 into a COMPONENT_REF which doesn't. If the copy
2434 can throw, the original could also throw. */
2435 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2436 nonlocal_goto
2437 = (stmt_can_make_abnormal_goto (copy_stmt)
2438 && !computed_goto_p (copy_stmt));
2439
2440 if (can_throw || nonlocal_goto)
2441 {
2442 if (!gsi_end_p (si))
2443 {
2444 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2445 gsi_next (&si);
2446 if (gsi_end_p (si))
2447 need_debug_cleanup = true;
2448 }
2449 if (!gsi_end_p (si))
2450 /* Note that bb's predecessor edges aren't necessarily
2451 right at this point; split_block doesn't care. */
2452 {
2453 edge e = split_block (new_bb, copy_stmt);
2454
2455 new_bb = e->dest;
2456 new_bb->aux = e->src->aux;
2457 si = gsi_start_bb (new_bb);
2458 }
2459 }
2460
2461 bool update_probs = false;
2462
2463 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2464 {
2465 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2466 update_probs = true;
2467 }
2468 else if (can_throw)
2469 {
2470 make_eh_edges (copy_stmt);
2471 update_probs = true;
2472 }
2473
2474 /* EH edges may not match old edges. Copy as much as possible. */
2475 if (update_probs)
2476 {
2477 edge e;
2478 edge_iterator ei;
2479 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2480
2481 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2482 if ((old_edge->flags & EDGE_EH)
2483 && (e = find_edge (copy_stmt_bb,
2484 (basic_block) old_edge->dest->aux))
2485 && (e->flags & EDGE_EH))
2486 e->probability = old_edge->probability;
2487
2488 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2489 if (e->flags & EDGE_EH)
2490 {
2491 if (!e->probability.initialized_p ())
2492 e->probability = profile_probability::never ();
2493 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2494 {
2495 if (id->eh_landing_pad_dest == NULL)
2496 id->eh_landing_pad_dest = e->dest;
2497 else
2498 gcc_assert (id->eh_landing_pad_dest == e->dest);
2499 }
2500 }
2501 }
2502
2503
2504 /* If the call we inline cannot make abnormal goto do not add
2505 additional abnormal edges but only retain those already present
2506 in the original function body. */
2507 if (abnormal_goto_dest == NULL)
2508 nonlocal_goto = false;
2509 if (nonlocal_goto)
2510 {
2511 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2512
2513 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2514 nonlocal_goto = false;
2515 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2516 in OpenMP regions which aren't allowed to be left abnormally.
2517 So, no need to add abnormal edge in that case. */
2518 else if (is_gimple_call (copy_stmt)
2519 && gimple_call_internal_p (copy_stmt)
2520 && (gimple_call_internal_fn (copy_stmt)
2521 == IFN_ABNORMAL_DISPATCHER)
2522 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2523 nonlocal_goto = false;
2524 else
2525 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2526 EDGE_ABNORMAL);
2527 }
2528
2529 if ((can_throw || nonlocal_goto)
2530 && gimple_in_ssa_p (cfun))
2531 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2532 can_throw, nonlocal_goto);
2533 }
2534 return need_debug_cleanup;
2535 }
2536
2537 /* Copy the PHIs. All blocks and edges are copied, some blocks
2538 was possibly split and new outgoing EH edges inserted.
2539 BB points to the block of original function and AUX pointers links
2540 the original and newly copied blocks. */
2541
2542 static void
2543 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2544 {
2545 basic_block const new_bb = (basic_block) bb->aux;
2546 edge_iterator ei;
2547 gphi *phi;
2548 gphi_iterator si;
2549 edge new_edge;
2550 bool inserted = false;
2551
2552 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2553 {
2554 tree res, new_res;
2555 gphi *new_phi;
2556
2557 phi = si.phi ();
2558 res = PHI_RESULT (phi);
2559 new_res = res;
2560 if (!virtual_operand_p (res))
2561 {
2562 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2563 if (EDGE_COUNT (new_bb->preds) == 0)
2564 {
2565 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2566 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2567 }
2568 else
2569 {
2570 new_phi = create_phi_node (new_res, new_bb);
2571 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2572 {
2573 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2574 bb);
2575 tree arg;
2576 tree new_arg;
2577 edge_iterator ei2;
2578 location_t locus;
2579
2580 /* When doing partial cloning, we allow PHIs on the entry
2581 block as long as all the arguments are the same.
2582 Find any input edge to see argument to copy. */
2583 if (!old_edge)
2584 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2585 if (!old_edge->src->aux)
2586 break;
2587
2588 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2589 new_arg = arg;
2590 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2591 gcc_assert (new_arg);
2592 /* With return slot optimization we can end up with
2593 non-gimple (foo *)&this->m, fix that here. */
2594 if (TREE_CODE (new_arg) != SSA_NAME
2595 && TREE_CODE (new_arg) != FUNCTION_DECL
2596 && !is_gimple_val (new_arg))
2597 {
2598 gimple_seq stmts = NULL;
2599 new_arg = force_gimple_operand (new_arg, &stmts, true,
2600 NULL);
2601 gsi_insert_seq_on_edge (new_edge, stmts);
2602 inserted = true;
2603 }
2604 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2605 if (id->reset_location)
2606 locus = input_location;
2607 else
2608 locus = remap_location (locus, id);
2609 add_phi_arg (new_phi, new_arg, new_edge, locus);
2610 }
2611 }
2612 }
2613 }
2614
2615 /* Commit the delayed edge insertions. */
2616 if (inserted)
2617 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2618 gsi_commit_one_edge_insert (new_edge, NULL);
2619 }
2620
2621
2622 /* Wrapper for remap_decl so it can be used as a callback. */
2623
2624 static tree
2625 remap_decl_1 (tree decl, void *data)
2626 {
2627 return remap_decl (decl, (copy_body_data *) data);
2628 }
2629
2630 /* Build struct function and associated datastructures for the new clone
2631 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2632 the cfun to the function of new_fndecl (and current_function_decl too). */
2633
2634 static void
2635 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2636 {
2637 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2638
2639 if (!DECL_ARGUMENTS (new_fndecl))
2640 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2641 if (!DECL_RESULT (new_fndecl))
2642 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2643
2644 /* Register specific tree functions. */
2645 gimple_register_cfg_hooks ();
2646
2647 /* Get clean struct function. */
2648 push_struct_function (new_fndecl);
2649
2650 /* We will rebuild these, so just sanity check that they are empty. */
2651 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2652 gcc_assert (cfun->local_decls == NULL);
2653 gcc_assert (cfun->cfg == NULL);
2654 gcc_assert (cfun->decl == new_fndecl);
2655
2656 /* Copy items we preserve during cloning. */
2657 cfun->static_chain_decl = src_cfun->static_chain_decl;
2658 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2659 cfun->function_end_locus = src_cfun->function_end_locus;
2660 cfun->curr_properties = src_cfun->curr_properties;
2661 cfun->last_verified = src_cfun->last_verified;
2662 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2663 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2664 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2665 cfun->calls_eh_return = src_cfun->calls_eh_return;
2666 cfun->stdarg = src_cfun->stdarg;
2667 cfun->after_inlining = src_cfun->after_inlining;
2668 cfun->can_throw_non_call_exceptions
2669 = src_cfun->can_throw_non_call_exceptions;
2670 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2671 cfun->returns_struct = src_cfun->returns_struct;
2672 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2673
2674 init_empty_tree_cfg ();
2675
2676 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2677
2678 profile_count num = count;
2679 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2680 profile_count::adjust_for_ipa_scaling (&num, &den);
2681
2682 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2683 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2684 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2685 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2686 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2687 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2688 if (src_cfun->eh)
2689 init_eh_for_function ();
2690
2691 if (src_cfun->gimple_df)
2692 {
2693 init_tree_ssa (cfun);
2694 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2695 if (cfun->gimple_df->in_ssa_p)
2696 init_ssa_operands (cfun);
2697 }
2698 }
2699
2700 /* Helper function for copy_cfg_body. Move debug stmts from the end
2701 of NEW_BB to the beginning of successor basic blocks when needed. If the
2702 successor has multiple predecessors, reset them, otherwise keep
2703 their value. */
2704
2705 static void
2706 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2707 {
2708 edge e;
2709 edge_iterator ei;
2710 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2711
2712 if (gsi_end_p (si)
2713 || gsi_one_before_end_p (si)
2714 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2715 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2716 return;
2717
2718 FOR_EACH_EDGE (e, ei, new_bb->succs)
2719 {
2720 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2721 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2722 while (is_gimple_debug (gsi_stmt (ssi)))
2723 {
2724 gimple *stmt = gsi_stmt (ssi);
2725 gdebug *new_stmt;
2726 tree var;
2727 tree value;
2728
2729 /* For the last edge move the debug stmts instead of copying
2730 them. */
2731 if (ei_one_before_end_p (ei))
2732 {
2733 si = ssi;
2734 gsi_prev (&ssi);
2735 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2736 {
2737 gimple_debug_bind_reset_value (stmt);
2738 gimple_set_location (stmt, UNKNOWN_LOCATION);
2739 }
2740 gsi_remove (&si, false);
2741 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2742 continue;
2743 }
2744
2745 if (gimple_debug_bind_p (stmt))
2746 {
2747 var = gimple_debug_bind_get_var (stmt);
2748 if (single_pred_p (e->dest))
2749 {
2750 value = gimple_debug_bind_get_value (stmt);
2751 value = unshare_expr (value);
2752 new_stmt = gimple_build_debug_bind (var, value, stmt);
2753 }
2754 else
2755 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2756 }
2757 else if (gimple_debug_source_bind_p (stmt))
2758 {
2759 var = gimple_debug_source_bind_get_var (stmt);
2760 value = gimple_debug_source_bind_get_value (stmt);
2761 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2762 }
2763 else if (gimple_debug_nonbind_marker_p (stmt))
2764 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2765 else
2766 gcc_unreachable ();
2767 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2768 id->debug_stmts.safe_push (new_stmt);
2769 gsi_prev (&ssi);
2770 }
2771 }
2772 }
2773
2774 /* Make a copy of the sub-loops of SRC_PARENT and place them
2775 as siblings of DEST_PARENT. */
2776
2777 static void
2778 copy_loops (copy_body_data *id,
2779 struct loop *dest_parent, struct loop *src_parent)
2780 {
2781 struct loop *src_loop = src_parent->inner;
2782 while (src_loop)
2783 {
2784 if (!id->blocks_to_copy
2785 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2786 {
2787 struct loop *dest_loop = alloc_loop ();
2788
2789 /* Assign the new loop its header and latch and associate
2790 those with the new loop. */
2791 dest_loop->header = (basic_block)src_loop->header->aux;
2792 dest_loop->header->loop_father = dest_loop;
2793 if (src_loop->latch != NULL)
2794 {
2795 dest_loop->latch = (basic_block)src_loop->latch->aux;
2796 dest_loop->latch->loop_father = dest_loop;
2797 }
2798
2799 /* Copy loop meta-data. */
2800 copy_loop_info (src_loop, dest_loop);
2801 if (dest_loop->unroll)
2802 cfun->has_unroll = true;
2803 if (dest_loop->force_vectorize)
2804 cfun->has_force_vectorize_loops = true;
2805 if (id->src_cfun->last_clique != 0)
2806 dest_loop->owned_clique
2807 = remap_dependence_clique (id,
2808 src_loop->owned_clique
2809 ? src_loop->owned_clique : 1);
2810
2811 /* Finally place it into the loop array and the loop tree. */
2812 place_new_loop (cfun, dest_loop);
2813 flow_loop_tree_node_add (dest_parent, dest_loop);
2814
2815 if (src_loop->simduid)
2816 {
2817 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2818 cfun->has_simduid_loops = true;
2819 }
2820
2821 /* Recurse. */
2822 copy_loops (id, dest_loop, src_loop);
2823 }
2824 src_loop = src_loop->next;
2825 }
2826 }
2827
2828 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2829
2830 void
2831 redirect_all_calls (copy_body_data * id, basic_block bb)
2832 {
2833 gimple_stmt_iterator si;
2834 gimple *last = last_stmt (bb);
2835 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2836 {
2837 gimple *stmt = gsi_stmt (si);
2838 if (is_gimple_call (stmt))
2839 {
2840 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2841 if (edge)
2842 {
2843 edge->redirect_call_stmt_to_callee ();
2844 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2845 gimple_purge_dead_eh_edges (bb);
2846 }
2847 }
2848 }
2849 }
2850
2851 /* Make a copy of the body of FN so that it can be inserted inline in
2852 another function. Walks FN via CFG, returns new fndecl. */
2853
2854 static tree
2855 copy_cfg_body (copy_body_data * id,
2856 basic_block entry_block_map, basic_block exit_block_map,
2857 basic_block new_entry)
2858 {
2859 tree callee_fndecl = id->src_fn;
2860 /* Original cfun for the callee, doesn't change. */
2861 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2862 struct function *cfun_to_copy;
2863 basic_block bb;
2864 tree new_fndecl = NULL;
2865 bool need_debug_cleanup = false;
2866 int last;
2867 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2868 profile_count num = entry_block_map->count;
2869
2870 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2871
2872 /* Register specific tree functions. */
2873 gimple_register_cfg_hooks ();
2874
2875 /* If we are inlining just region of the function, make sure to connect
2876 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2877 part of loop, we must compute frequency and probability of
2878 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2879 probabilities of edges incoming from nonduplicated region. */
2880 if (new_entry)
2881 {
2882 edge e;
2883 edge_iterator ei;
2884 den = profile_count::zero ();
2885
2886 FOR_EACH_EDGE (e, ei, new_entry->preds)
2887 if (!e->src->aux)
2888 den += e->count ();
2889 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
2890 }
2891
2892 profile_count::adjust_for_ipa_scaling (&num, &den);
2893
2894 /* Must have a CFG here at this point. */
2895 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2896 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2897
2898
2899 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2900 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2901 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2902 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2903
2904 /* Duplicate any exception-handling regions. */
2905 if (cfun->eh)
2906 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2907 remap_decl_1, id);
2908
2909 /* Use aux pointers to map the original blocks to copy. */
2910 FOR_EACH_BB_FN (bb, cfun_to_copy)
2911 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2912 {
2913 basic_block new_bb = copy_bb (id, bb, num, den);
2914 bb->aux = new_bb;
2915 new_bb->aux = bb;
2916 new_bb->loop_father = entry_block_map->loop_father;
2917 }
2918
2919 last = last_basic_block_for_fn (cfun);
2920
2921 /* Now that we've duplicated the blocks, duplicate their edges. */
2922 basic_block abnormal_goto_dest = NULL;
2923 if (id->call_stmt
2924 && stmt_can_make_abnormal_goto (id->call_stmt))
2925 {
2926 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2927
2928 bb = gimple_bb (id->call_stmt);
2929 gsi_next (&gsi);
2930 if (gsi_end_p (gsi))
2931 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2932 }
2933 FOR_ALL_BB_FN (bb, cfun_to_copy)
2934 if (!id->blocks_to_copy
2935 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2936 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
2937 abnormal_goto_dest, id);
2938
2939 if (id->eh_landing_pad_dest)
2940 {
2941 add_clobbers_to_eh_landing_pad (id);
2942 id->eh_landing_pad_dest = NULL;
2943 }
2944
2945 if (new_entry)
2946 {
2947 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
2948 EDGE_FALLTHRU);
2949 e->probability = profile_probability::always ();
2950 }
2951
2952 /* Duplicate the loop tree, if available and wanted. */
2953 if (loops_for_fn (src_cfun) != NULL
2954 && current_loops != NULL)
2955 {
2956 copy_loops (id, entry_block_map->loop_father,
2957 get_loop (src_cfun, 0));
2958 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2959 loops_state_set (LOOPS_NEED_FIXUP);
2960 }
2961
2962 /* If the loop tree in the source function needed fixup, mark the
2963 destination loop tree for fixup, too. */
2964 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2965 loops_state_set (LOOPS_NEED_FIXUP);
2966
2967 if (gimple_in_ssa_p (cfun))
2968 FOR_ALL_BB_FN (bb, cfun_to_copy)
2969 if (!id->blocks_to_copy
2970 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2971 copy_phis_for_bb (bb, id);
2972
2973 FOR_ALL_BB_FN (bb, cfun_to_copy)
2974 if (bb->aux)
2975 {
2976 if (need_debug_cleanup
2977 && bb->index != ENTRY_BLOCK
2978 && bb->index != EXIT_BLOCK)
2979 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2980 /* Update call edge destinations. This cannot be done before loop
2981 info is updated, because we may split basic blocks. */
2982 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2983 && bb->index != ENTRY_BLOCK
2984 && bb->index != EXIT_BLOCK)
2985 redirect_all_calls (id, (basic_block)bb->aux);
2986 ((basic_block)bb->aux)->aux = NULL;
2987 bb->aux = NULL;
2988 }
2989
2990 /* Zero out AUX fields of newly created block during EH edge
2991 insertion. */
2992 for (; last < last_basic_block_for_fn (cfun); last++)
2993 {
2994 if (need_debug_cleanup)
2995 maybe_move_debug_stmts_to_successors (id,
2996 BASIC_BLOCK_FOR_FN (cfun, last));
2997 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2998 /* Update call edge destinations. This cannot be done before loop
2999 info is updated, because we may split basic blocks. */
3000 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3001 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3002 }
3003 entry_block_map->aux = NULL;
3004 exit_block_map->aux = NULL;
3005
3006 if (id->eh_map)
3007 {
3008 delete id->eh_map;
3009 id->eh_map = NULL;
3010 }
3011 if (id->dependence_map)
3012 {
3013 delete id->dependence_map;
3014 id->dependence_map = NULL;
3015 }
3016
3017 return new_fndecl;
3018 }
3019
3020 /* Copy the debug STMT using ID. We deal with these statements in a
3021 special way: if any variable in their VALUE expression wasn't
3022 remapped yet, we won't remap it, because that would get decl uids
3023 out of sync, causing codegen differences between -g and -g0. If
3024 this arises, we drop the VALUE expression altogether. */
3025
3026 static void
3027 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3028 {
3029 tree t, *n;
3030 struct walk_stmt_info wi;
3031
3032 if (tree block = gimple_block (stmt))
3033 {
3034 n = id->decl_map->get (block);
3035 gimple_set_block (stmt, n ? *n : id->block);
3036 }
3037
3038 if (gimple_debug_nonbind_marker_p (stmt))
3039 return;
3040
3041 /* Remap all the operands in COPY. */
3042 memset (&wi, 0, sizeof (wi));
3043 wi.info = id;
3044
3045 processing_debug_stmt = 1;
3046
3047 if (gimple_debug_source_bind_p (stmt))
3048 t = gimple_debug_source_bind_get_var (stmt);
3049 else if (gimple_debug_bind_p (stmt))
3050 t = gimple_debug_bind_get_var (stmt);
3051 else
3052 gcc_unreachable ();
3053
3054 if (TREE_CODE (t) == PARM_DECL && id->debug_map
3055 && (n = id->debug_map->get (t)))
3056 {
3057 gcc_assert (VAR_P (*n));
3058 t = *n;
3059 }
3060 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3061 /* T is a non-localized variable. */;
3062 else
3063 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3064
3065 if (gimple_debug_bind_p (stmt))
3066 {
3067 gimple_debug_bind_set_var (stmt, t);
3068
3069 if (gimple_debug_bind_has_value_p (stmt))
3070 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3071 remap_gimple_op_r, &wi, NULL);
3072
3073 /* Punt if any decl couldn't be remapped. */
3074 if (processing_debug_stmt < 0)
3075 gimple_debug_bind_reset_value (stmt);
3076 }
3077 else if (gimple_debug_source_bind_p (stmt))
3078 {
3079 gimple_debug_source_bind_set_var (stmt, t);
3080 /* When inlining and source bind refers to one of the optimized
3081 away parameters, change the source bind into normal debug bind
3082 referring to the corresponding DEBUG_EXPR_DECL that should have
3083 been bound before the call stmt. */
3084 t = gimple_debug_source_bind_get_value (stmt);
3085 if (t != NULL_TREE
3086 && TREE_CODE (t) == PARM_DECL
3087 && id->call_stmt)
3088 {
3089 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3090 unsigned int i;
3091 if (debug_args != NULL)
3092 {
3093 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3094 if ((**debug_args)[i] == DECL_ORIGIN (t)
3095 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3096 {
3097 t = (**debug_args)[i + 1];
3098 stmt->subcode = GIMPLE_DEBUG_BIND;
3099 gimple_debug_bind_set_value (stmt, t);
3100 break;
3101 }
3102 }
3103 }
3104 if (gimple_debug_source_bind_p (stmt))
3105 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3106 remap_gimple_op_r, &wi, NULL);
3107 }
3108
3109 processing_debug_stmt = 0;
3110
3111 update_stmt (stmt);
3112 }
3113
3114 /* Process deferred debug stmts. In order to give values better odds
3115 of being successfully remapped, we delay the processing of debug
3116 stmts until all other stmts that might require remapping are
3117 processed. */
3118
3119 static void
3120 copy_debug_stmts (copy_body_data *id)
3121 {
3122 size_t i;
3123 gdebug *stmt;
3124
3125 if (!id->debug_stmts.exists ())
3126 return;
3127
3128 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3129 copy_debug_stmt (stmt, id);
3130
3131 id->debug_stmts.release ();
3132 }
3133
3134 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3135 another function. */
3136
3137 static tree
3138 copy_tree_body (copy_body_data *id)
3139 {
3140 tree fndecl = id->src_fn;
3141 tree body = DECL_SAVED_TREE (fndecl);
3142
3143 walk_tree (&body, copy_tree_body_r, id, NULL);
3144
3145 return body;
3146 }
3147
3148 /* Make a copy of the body of FN so that it can be inserted inline in
3149 another function. */
3150
3151 static tree
3152 copy_body (copy_body_data *id,
3153 basic_block entry_block_map, basic_block exit_block_map,
3154 basic_block new_entry)
3155 {
3156 tree fndecl = id->src_fn;
3157 tree body;
3158
3159 /* If this body has a CFG, walk CFG and copy. */
3160 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3161 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3162 new_entry);
3163 copy_debug_stmts (id);
3164
3165 return body;
3166 }
3167
3168 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3169 defined in function FN, or of a data member thereof. */
3170
3171 static bool
3172 self_inlining_addr_expr (tree value, tree fn)
3173 {
3174 tree var;
3175
3176 if (TREE_CODE (value) != ADDR_EXPR)
3177 return false;
3178
3179 var = get_base_address (TREE_OPERAND (value, 0));
3180
3181 return var && auto_var_in_fn_p (var, fn);
3182 }
3183
3184 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3185 lexical block and line number information from base_stmt, if given,
3186 or from the last stmt of the block otherwise. */
3187
3188 static gimple *
3189 insert_init_debug_bind (copy_body_data *id,
3190 basic_block bb, tree var, tree value,
3191 gimple *base_stmt)
3192 {
3193 gimple *note;
3194 gimple_stmt_iterator gsi;
3195 tree tracked_var;
3196
3197 if (!gimple_in_ssa_p (id->src_cfun))
3198 return NULL;
3199
3200 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3201 return NULL;
3202
3203 tracked_var = target_for_debug_bind (var);
3204 if (!tracked_var)
3205 return NULL;
3206
3207 if (bb)
3208 {
3209 gsi = gsi_last_bb (bb);
3210 if (!base_stmt && !gsi_end_p (gsi))
3211 base_stmt = gsi_stmt (gsi);
3212 }
3213
3214 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3215
3216 if (bb)
3217 {
3218 if (!gsi_end_p (gsi))
3219 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3220 else
3221 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3222 }
3223
3224 return note;
3225 }
3226
3227 static void
3228 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3229 {
3230 /* If VAR represents a zero-sized variable, it's possible that the
3231 assignment statement may result in no gimple statements. */
3232 if (init_stmt)
3233 {
3234 gimple_stmt_iterator si = gsi_last_bb (bb);
3235
3236 /* We can end up with init statements that store to a non-register
3237 from a rhs with a conversion. Handle that here by forcing the
3238 rhs into a temporary. gimple_regimplify_operands is not
3239 prepared to do this for us. */
3240 if (!is_gimple_debug (init_stmt)
3241 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3242 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3243 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3244 {
3245 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3246 gimple_expr_type (init_stmt),
3247 gimple_assign_rhs1 (init_stmt));
3248 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3249 GSI_NEW_STMT);
3250 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3251 gimple_assign_set_rhs1 (init_stmt, rhs);
3252 }
3253 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3254 gimple_regimplify_operands (init_stmt, &si);
3255
3256 if (!is_gimple_debug (init_stmt))
3257 {
3258 tree def = gimple_assign_lhs (init_stmt);
3259 insert_init_debug_bind (id, bb, def, def, init_stmt);
3260 }
3261 }
3262 }
3263
3264 /* Initialize parameter P with VALUE. If needed, produce init statement
3265 at the end of BB. When BB is NULL, we return init statement to be
3266 output later. */
3267 static gimple *
3268 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3269 basic_block bb, tree *vars)
3270 {
3271 gimple *init_stmt = NULL;
3272 tree var;
3273 tree rhs = value;
3274 tree def = (gimple_in_ssa_p (cfun)
3275 ? ssa_default_def (id->src_cfun, p) : NULL);
3276
3277 if (value
3278 && value != error_mark_node
3279 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3280 {
3281 /* If we can match up types by promotion/demotion do so. */
3282 if (fold_convertible_p (TREE_TYPE (p), value))
3283 rhs = fold_convert (TREE_TYPE (p), value);
3284 else
3285 {
3286 /* ??? For valid programs we should not end up here.
3287 Still if we end up with truly mismatched types here, fall back
3288 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3289 GIMPLE to the following passes. */
3290 if (!is_gimple_reg_type (TREE_TYPE (value))
3291 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3292 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3293 else
3294 rhs = build_zero_cst (TREE_TYPE (p));
3295 }
3296 }
3297
3298 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3299 here since the type of this decl must be visible to the calling
3300 function. */
3301 var = copy_decl_to_var (p, id);
3302
3303 /* Declare this new variable. */
3304 DECL_CHAIN (var) = *vars;
3305 *vars = var;
3306
3307 /* Make gimplifier happy about this variable. */
3308 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3309
3310 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3311 we would not need to create a new variable here at all, if it
3312 weren't for debug info. Still, we can just use the argument
3313 value. */
3314 if (TREE_READONLY (p)
3315 && !TREE_ADDRESSABLE (p)
3316 && value && !TREE_SIDE_EFFECTS (value)
3317 && !def)
3318 {
3319 /* We may produce non-gimple trees by adding NOPs or introduce
3320 invalid sharing when operand is not really constant.
3321 It is not big deal to prohibit constant propagation here as
3322 we will constant propagate in DOM1 pass anyway. */
3323 if (is_gimple_min_invariant (value)
3324 && useless_type_conversion_p (TREE_TYPE (p),
3325 TREE_TYPE (value))
3326 /* We have to be very careful about ADDR_EXPR. Make sure
3327 the base variable isn't a local variable of the inlined
3328 function, e.g., when doing recursive inlining, direct or
3329 mutually-recursive or whatever, which is why we don't
3330 just test whether fn == current_function_decl. */
3331 && ! self_inlining_addr_expr (value, fn))
3332 {
3333 insert_decl_map (id, p, value);
3334 insert_debug_decl_map (id, p, var);
3335 return insert_init_debug_bind (id, bb, var, value, NULL);
3336 }
3337 }
3338
3339 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3340 that way, when the PARM_DECL is encountered, it will be
3341 automatically replaced by the VAR_DECL. */
3342 insert_decl_map (id, p, var);
3343
3344 /* Even if P was TREE_READONLY, the new VAR should not be.
3345 In the original code, we would have constructed a
3346 temporary, and then the function body would have never
3347 changed the value of P. However, now, we will be
3348 constructing VAR directly. The constructor body may
3349 change its value multiple times as it is being
3350 constructed. Therefore, it must not be TREE_READONLY;
3351 the back-end assumes that TREE_READONLY variable is
3352 assigned to only once. */
3353 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3354 TREE_READONLY (var) = 0;
3355
3356 /* If there is no setup required and we are in SSA, take the easy route
3357 replacing all SSA names representing the function parameter by the
3358 SSA name passed to function.
3359
3360 We need to construct map for the variable anyway as it might be used
3361 in different SSA names when parameter is set in function.
3362
3363 Do replacement at -O0 for const arguments replaced by constant.
3364 This is important for builtin_constant_p and other construct requiring
3365 constant argument to be visible in inlined function body. */
3366 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3367 && (optimize
3368 || (TREE_READONLY (p)
3369 && is_gimple_min_invariant (rhs)))
3370 && (TREE_CODE (rhs) == SSA_NAME
3371 || is_gimple_min_invariant (rhs))
3372 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3373 {
3374 insert_decl_map (id, def, rhs);
3375 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3376 }
3377
3378 /* If the value of argument is never used, don't care about initializing
3379 it. */
3380 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3381 {
3382 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3383 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3384 }
3385
3386 /* Initialize this VAR_DECL from the equivalent argument. Convert
3387 the argument to the proper type in case it was promoted. */
3388 if (value)
3389 {
3390 if (rhs == error_mark_node)
3391 {
3392 insert_decl_map (id, p, var);
3393 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3394 }
3395
3396 STRIP_USELESS_TYPE_CONVERSION (rhs);
3397
3398 /* If we are in SSA form properly remap the default definition
3399 or assign to a dummy SSA name if the parameter is unused and
3400 we are not optimizing. */
3401 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3402 {
3403 if (def)
3404 {
3405 def = remap_ssa_name (def, id);
3406 init_stmt = gimple_build_assign (def, rhs);
3407 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3408 set_ssa_default_def (cfun, var, NULL);
3409 }
3410 else if (!optimize)
3411 {
3412 def = make_ssa_name (var);
3413 init_stmt = gimple_build_assign (def, rhs);
3414 }
3415 }
3416 else
3417 init_stmt = gimple_build_assign (var, rhs);
3418
3419 if (bb && init_stmt)
3420 insert_init_stmt (id, bb, init_stmt);
3421 }
3422 return init_stmt;
3423 }
3424
3425 /* Generate code to initialize the parameters of the function at the
3426 top of the stack in ID from the GIMPLE_CALL STMT. */
3427
3428 static void
3429 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3430 tree fn, basic_block bb)
3431 {
3432 tree parms;
3433 size_t i;
3434 tree p;
3435 tree vars = NULL_TREE;
3436 tree static_chain = gimple_call_chain (stmt);
3437
3438 /* Figure out what the parameters are. */
3439 parms = DECL_ARGUMENTS (fn);
3440
3441 /* Loop through the parameter declarations, replacing each with an
3442 equivalent VAR_DECL, appropriately initialized. */
3443 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3444 {
3445 tree val;
3446 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3447 setup_one_parameter (id, p, val, fn, bb, &vars);
3448 }
3449 /* After remapping parameters remap their types. This has to be done
3450 in a second loop over all parameters to appropriately remap
3451 variable sized arrays when the size is specified in a
3452 parameter following the array. */
3453 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3454 {
3455 tree *varp = id->decl_map->get (p);
3456 if (varp && VAR_P (*varp))
3457 {
3458 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3459 ? ssa_default_def (id->src_cfun, p) : NULL);
3460 tree var = *varp;
3461 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3462 /* Also remap the default definition if it was remapped
3463 to the default definition of the parameter replacement
3464 by the parameter setup. */
3465 if (def)
3466 {
3467 tree *defp = id->decl_map->get (def);
3468 if (defp
3469 && TREE_CODE (*defp) == SSA_NAME
3470 && SSA_NAME_VAR (*defp) == var)
3471 TREE_TYPE (*defp) = TREE_TYPE (var);
3472 }
3473 }
3474 }
3475
3476 /* Initialize the static chain. */
3477 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3478 gcc_assert (fn != current_function_decl);
3479 if (p)
3480 {
3481 /* No static chain? Seems like a bug in tree-nested.c. */
3482 gcc_assert (static_chain);
3483
3484 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3485 }
3486
3487 declare_inline_vars (id->block, vars);
3488 }
3489
3490
3491 /* Declare a return variable to replace the RESULT_DECL for the
3492 function we are calling. An appropriate DECL_STMT is returned.
3493 The USE_STMT is filled to contain a use of the declaration to
3494 indicate the return value of the function.
3495
3496 RETURN_SLOT, if non-null is place where to store the result. It
3497 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3498 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3499
3500 The return value is a (possibly null) value that holds the result
3501 as seen by the caller. */
3502
3503 static tree
3504 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3505 basic_block entry_bb)
3506 {
3507 tree callee = id->src_fn;
3508 tree result = DECL_RESULT (callee);
3509 tree callee_type = TREE_TYPE (result);
3510 tree caller_type;
3511 tree var, use;
3512
3513 /* Handle type-mismatches in the function declaration return type
3514 vs. the call expression. */
3515 if (modify_dest)
3516 caller_type = TREE_TYPE (modify_dest);
3517 else
3518 caller_type = TREE_TYPE (TREE_TYPE (callee));
3519
3520 /* We don't need to do anything for functions that don't return anything. */
3521 if (VOID_TYPE_P (callee_type))
3522 return NULL_TREE;
3523
3524 /* If there was a return slot, then the return value is the
3525 dereferenced address of that object. */
3526 if (return_slot)
3527 {
3528 /* The front end shouldn't have used both return_slot and
3529 a modify expression. */
3530 gcc_assert (!modify_dest);
3531 if (DECL_BY_REFERENCE (result))
3532 {
3533 tree return_slot_addr = build_fold_addr_expr (return_slot);
3534 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3535
3536 /* We are going to construct *&return_slot and we can't do that
3537 for variables believed to be not addressable.
3538
3539 FIXME: This check possibly can match, because values returned
3540 via return slot optimization are not believed to have address
3541 taken by alias analysis. */
3542 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3543 var = return_slot_addr;
3544 }
3545 else
3546 {
3547 var = return_slot;
3548 gcc_assert (TREE_CODE (var) != SSA_NAME);
3549 if (TREE_ADDRESSABLE (result))
3550 mark_addressable (var);
3551 }
3552 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3553 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3554 && !DECL_GIMPLE_REG_P (result)
3555 && DECL_P (var))
3556 DECL_GIMPLE_REG_P (var) = 0;
3557 use = NULL;
3558 goto done;
3559 }
3560
3561 /* All types requiring non-trivial constructors should have been handled. */
3562 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3563
3564 /* Attempt to avoid creating a new temporary variable. */
3565 if (modify_dest
3566 && TREE_CODE (modify_dest) != SSA_NAME)
3567 {
3568 bool use_it = false;
3569
3570 /* We can't use MODIFY_DEST if there's type promotion involved. */
3571 if (!useless_type_conversion_p (callee_type, caller_type))
3572 use_it = false;
3573
3574 /* ??? If we're assigning to a variable sized type, then we must
3575 reuse the destination variable, because we've no good way to
3576 create variable sized temporaries at this point. */
3577 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3578 use_it = true;
3579
3580 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3581 reuse it as the result of the call directly. Don't do this if
3582 it would promote MODIFY_DEST to addressable. */
3583 else if (TREE_ADDRESSABLE (result))
3584 use_it = false;
3585 else
3586 {
3587 tree base_m = get_base_address (modify_dest);
3588
3589 /* If the base isn't a decl, then it's a pointer, and we don't
3590 know where that's going to go. */
3591 if (!DECL_P (base_m))
3592 use_it = false;
3593 else if (is_global_var (base_m))
3594 use_it = false;
3595 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3596 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3597 && !DECL_GIMPLE_REG_P (result)
3598 && DECL_GIMPLE_REG_P (base_m))
3599 use_it = false;
3600 else if (!TREE_ADDRESSABLE (base_m))
3601 use_it = true;
3602 }
3603
3604 if (use_it)
3605 {
3606 var = modify_dest;
3607 use = NULL;
3608 goto done;
3609 }
3610 }
3611
3612 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3613
3614 var = copy_result_decl_to_var (result, id);
3615 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3616
3617 /* Do not have the rest of GCC warn about this variable as it should
3618 not be visible to the user. */
3619 TREE_NO_WARNING (var) = 1;
3620
3621 declare_inline_vars (id->block, var);
3622
3623 /* Build the use expr. If the return type of the function was
3624 promoted, convert it back to the expected type. */
3625 use = var;
3626 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3627 {
3628 /* If we can match up types by promotion/demotion do so. */
3629 if (fold_convertible_p (caller_type, var))
3630 use = fold_convert (caller_type, var);
3631 else
3632 {
3633 /* ??? For valid programs we should not end up here.
3634 Still if we end up with truly mismatched types here, fall back
3635 to using a MEM_REF to not leak invalid GIMPLE to the following
3636 passes. */
3637 /* Prevent var from being written into SSA form. */
3638 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3639 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3640 DECL_GIMPLE_REG_P (var) = false;
3641 else if (is_gimple_reg_type (TREE_TYPE (var)))
3642 TREE_ADDRESSABLE (var) = true;
3643 use = fold_build2 (MEM_REF, caller_type,
3644 build_fold_addr_expr (var),
3645 build_int_cst (ptr_type_node, 0));
3646 }
3647 }
3648
3649 STRIP_USELESS_TYPE_CONVERSION (use);
3650
3651 if (DECL_BY_REFERENCE (result))
3652 {
3653 TREE_ADDRESSABLE (var) = 1;
3654 var = build_fold_addr_expr (var);
3655 }
3656
3657 done:
3658 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3659 way, when the RESULT_DECL is encountered, it will be
3660 automatically replaced by the VAR_DECL.
3661
3662 When returning by reference, ensure that RESULT_DECL remaps to
3663 gimple_val. */
3664 if (DECL_BY_REFERENCE (result)
3665 && !is_gimple_val (var))
3666 {
3667 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3668 insert_decl_map (id, result, temp);
3669 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3670 it's default_def SSA_NAME. */
3671 if (gimple_in_ssa_p (id->src_cfun)
3672 && is_gimple_reg (result))
3673 {
3674 temp = make_ssa_name (temp);
3675 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3676 }
3677 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3678 }
3679 else
3680 insert_decl_map (id, result, var);
3681
3682 /* Remember this so we can ignore it in remap_decls. */
3683 id->retvar = var;
3684 return use;
3685 }
3686
3687 /* Determine if the function can be copied. If so return NULL. If
3688 not return a string describng the reason for failure. */
3689
3690 const char *
3691 copy_forbidden (struct function *fun)
3692 {
3693 const char *reason = fun->cannot_be_copied_reason;
3694
3695 /* Only examine the function once. */
3696 if (fun->cannot_be_copied_set)
3697 return reason;
3698
3699 /* We cannot copy a function that receives a non-local goto
3700 because we cannot remap the destination label used in the
3701 function that is performing the non-local goto. */
3702 /* ??? Actually, this should be possible, if we work at it.
3703 No doubt there's just a handful of places that simply
3704 assume it doesn't happen and don't substitute properly. */
3705 if (fun->has_nonlocal_label)
3706 {
3707 reason = G_("function %q+F can never be copied "
3708 "because it receives a non-local goto");
3709 goto fail;
3710 }
3711
3712 if (fun->has_forced_label_in_static)
3713 {
3714 reason = G_("function %q+F can never be copied because it saves "
3715 "address of local label in a static variable");
3716 goto fail;
3717 }
3718
3719 fail:
3720 fun->cannot_be_copied_reason = reason;
3721 fun->cannot_be_copied_set = true;
3722 return reason;
3723 }
3724
3725
3726 static const char *inline_forbidden_reason;
3727
3728 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3729 iff a function cannot be inlined. Also sets the reason why. */
3730
3731 static tree
3732 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3733 struct walk_stmt_info *wip)
3734 {
3735 tree fn = (tree) wip->info;
3736 tree t;
3737 gimple *stmt = gsi_stmt (*gsi);
3738
3739 switch (gimple_code (stmt))
3740 {
3741 case GIMPLE_CALL:
3742 /* Refuse to inline alloca call unless user explicitly forced so as
3743 this may change program's memory overhead drastically when the
3744 function using alloca is called in loop. In GCC present in
3745 SPEC2000 inlining into schedule_block cause it to require 2GB of
3746 RAM instead of 256MB. Don't do so for alloca calls emitted for
3747 VLA objects as those can't cause unbounded growth (they're always
3748 wrapped inside stack_save/stack_restore regions. */
3749 if (gimple_maybe_alloca_call_p (stmt)
3750 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3751 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3752 {
3753 inline_forbidden_reason
3754 = G_("function %q+F can never be inlined because it uses "
3755 "alloca (override using the always_inline attribute)");
3756 *handled_ops_p = true;
3757 return fn;
3758 }
3759
3760 t = gimple_call_fndecl (stmt);
3761 if (t == NULL_TREE)
3762 break;
3763
3764 /* We cannot inline functions that call setjmp. */
3765 if (setjmp_call_p (t))
3766 {
3767 inline_forbidden_reason
3768 = G_("function %q+F can never be inlined because it uses setjmp");
3769 *handled_ops_p = true;
3770 return t;
3771 }
3772
3773 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3774 switch (DECL_FUNCTION_CODE (t))
3775 {
3776 /* We cannot inline functions that take a variable number of
3777 arguments. */
3778 case BUILT_IN_VA_START:
3779 case BUILT_IN_NEXT_ARG:
3780 case BUILT_IN_VA_END:
3781 inline_forbidden_reason
3782 = G_("function %q+F can never be inlined because it "
3783 "uses variable argument lists");
3784 *handled_ops_p = true;
3785 return t;
3786
3787 case BUILT_IN_LONGJMP:
3788 /* We can't inline functions that call __builtin_longjmp at
3789 all. The non-local goto machinery really requires the
3790 destination be in a different function. If we allow the
3791 function calling __builtin_longjmp to be inlined into the
3792 function calling __builtin_setjmp, Things will Go Awry. */
3793 inline_forbidden_reason
3794 = G_("function %q+F can never be inlined because "
3795 "it uses setjmp-longjmp exception handling");
3796 *handled_ops_p = true;
3797 return t;
3798
3799 case BUILT_IN_NONLOCAL_GOTO:
3800 /* Similarly. */
3801 inline_forbidden_reason
3802 = G_("function %q+F can never be inlined because "
3803 "it uses non-local goto");
3804 *handled_ops_p = true;
3805 return t;
3806
3807 case BUILT_IN_RETURN:
3808 case BUILT_IN_APPLY_ARGS:
3809 /* If a __builtin_apply_args caller would be inlined,
3810 it would be saving arguments of the function it has
3811 been inlined into. Similarly __builtin_return would
3812 return from the function the inline has been inlined into. */
3813 inline_forbidden_reason
3814 = G_("function %q+F can never be inlined because "
3815 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3816 *handled_ops_p = true;
3817 return t;
3818
3819 default:
3820 break;
3821 }
3822 break;
3823
3824 case GIMPLE_GOTO:
3825 t = gimple_goto_dest (stmt);
3826
3827 /* We will not inline a function which uses computed goto. The
3828 addresses of its local labels, which may be tucked into
3829 global storage, are of course not constant across
3830 instantiations, which causes unexpected behavior. */
3831 if (TREE_CODE (t) != LABEL_DECL)
3832 {
3833 inline_forbidden_reason
3834 = G_("function %q+F can never be inlined "
3835 "because it contains a computed goto");
3836 *handled_ops_p = true;
3837 return t;
3838 }
3839 break;
3840
3841 default:
3842 break;
3843 }
3844
3845 *handled_ops_p = false;
3846 return NULL_TREE;
3847 }
3848
3849 /* Return true if FNDECL is a function that cannot be inlined into
3850 another one. */
3851
3852 static bool
3853 inline_forbidden_p (tree fndecl)
3854 {
3855 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3856 struct walk_stmt_info wi;
3857 basic_block bb;
3858 bool forbidden_p = false;
3859
3860 /* First check for shared reasons not to copy the code. */
3861 inline_forbidden_reason = copy_forbidden (fun);
3862 if (inline_forbidden_reason != NULL)
3863 return true;
3864
3865 /* Next, walk the statements of the function looking for
3866 constraucts we can't handle, or are non-optimal for inlining. */
3867 hash_set<tree> visited_nodes;
3868 memset (&wi, 0, sizeof (wi));
3869 wi.info = (void *) fndecl;
3870 wi.pset = &visited_nodes;
3871
3872 FOR_EACH_BB_FN (bb, fun)
3873 {
3874 gimple *ret;
3875 gimple_seq seq = bb_seq (bb);
3876 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3877 forbidden_p = (ret != NULL);
3878 if (forbidden_p)
3879 break;
3880 }
3881
3882 return forbidden_p;
3883 }
3884 \f
3885 /* Return false if the function FNDECL cannot be inlined on account of its
3886 attributes, true otherwise. */
3887 static bool
3888 function_attribute_inlinable_p (const_tree fndecl)
3889 {
3890 if (targetm.attribute_table)
3891 {
3892 const_tree a;
3893
3894 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3895 {
3896 const_tree name = TREE_PURPOSE (a);
3897 int i;
3898
3899 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3900 if (is_attribute_p (targetm.attribute_table[i].name, name))
3901 return targetm.function_attribute_inlinable_p (fndecl);
3902 }
3903 }
3904
3905 return true;
3906 }
3907
3908 /* Returns nonzero if FN is a function that does not have any
3909 fundamental inline blocking properties. */
3910
3911 bool
3912 tree_inlinable_function_p (tree fn)
3913 {
3914 bool inlinable = true;
3915 bool do_warning;
3916 tree always_inline;
3917
3918 /* If we've already decided this function shouldn't be inlined,
3919 there's no need to check again. */
3920 if (DECL_UNINLINABLE (fn))
3921 return false;
3922
3923 /* We only warn for functions declared `inline' by the user. */
3924 do_warning = (warn_inline
3925 && DECL_DECLARED_INLINE_P (fn)
3926 && !DECL_NO_INLINE_WARNING_P (fn)
3927 && !DECL_IN_SYSTEM_HEADER (fn));
3928
3929 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3930
3931 if (flag_no_inline
3932 && always_inline == NULL)
3933 {
3934 if (do_warning)
3935 warning (OPT_Winline, "function %q+F can never be inlined because it "
3936 "is suppressed using %<-fno-inline%>", fn);
3937 inlinable = false;
3938 }
3939
3940 else if (!function_attribute_inlinable_p (fn))
3941 {
3942 if (do_warning)
3943 warning (OPT_Winline, "function %q+F can never be inlined because it "
3944 "uses attributes conflicting with inlining", fn);
3945 inlinable = false;
3946 }
3947
3948 else if (inline_forbidden_p (fn))
3949 {
3950 /* See if we should warn about uninlinable functions. Previously,
3951 some of these warnings would be issued while trying to expand
3952 the function inline, but that would cause multiple warnings
3953 about functions that would for example call alloca. But since
3954 this a property of the function, just one warning is enough.
3955 As a bonus we can now give more details about the reason why a
3956 function is not inlinable. */
3957 if (always_inline)
3958 error (inline_forbidden_reason, fn);
3959 else if (do_warning)
3960 warning (OPT_Winline, inline_forbidden_reason, fn);
3961
3962 inlinable = false;
3963 }
3964
3965 /* Squirrel away the result so that we don't have to check again. */
3966 DECL_UNINLINABLE (fn) = !inlinable;
3967
3968 return inlinable;
3969 }
3970
3971 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3972 word size and take possible memcpy call into account and return
3973 cost based on whether optimizing for size or speed according to SPEED_P. */
3974
3975 int
3976 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3977 {
3978 HOST_WIDE_INT size;
3979
3980 gcc_assert (!VOID_TYPE_P (type));
3981
3982 if (TREE_CODE (type) == VECTOR_TYPE)
3983 {
3984 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
3985 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
3986 int orig_mode_size
3987 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
3988 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
3989 return ((orig_mode_size + simd_mode_size - 1)
3990 / simd_mode_size);
3991 }
3992
3993 size = int_size_in_bytes (type);
3994
3995 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3996 /* Cost of a memcpy call, 3 arguments and the call. */
3997 return 4;
3998 else
3999 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4000 }
4001
4002 /* Returns cost of operation CODE, according to WEIGHTS */
4003
4004 static int
4005 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4006 tree op1 ATTRIBUTE_UNUSED, tree op2)
4007 {
4008 switch (code)
4009 {
4010 /* These are "free" conversions, or their presumed cost
4011 is folded into other operations. */
4012 case RANGE_EXPR:
4013 CASE_CONVERT:
4014 case COMPLEX_EXPR:
4015 case PAREN_EXPR:
4016 case VIEW_CONVERT_EXPR:
4017 return 0;
4018
4019 /* Assign cost of 1 to usual operations.
4020 ??? We may consider mapping RTL costs to this. */
4021 case COND_EXPR:
4022 case VEC_COND_EXPR:
4023 case VEC_PERM_EXPR:
4024
4025 case PLUS_EXPR:
4026 case POINTER_PLUS_EXPR:
4027 case POINTER_DIFF_EXPR:
4028 case MINUS_EXPR:
4029 case MULT_EXPR:
4030 case MULT_HIGHPART_EXPR:
4031
4032 case ADDR_SPACE_CONVERT_EXPR:
4033 case FIXED_CONVERT_EXPR:
4034 case FIX_TRUNC_EXPR:
4035
4036 case NEGATE_EXPR:
4037 case FLOAT_EXPR:
4038 case MIN_EXPR:
4039 case MAX_EXPR:
4040 case ABS_EXPR:
4041 case ABSU_EXPR:
4042
4043 case LSHIFT_EXPR:
4044 case RSHIFT_EXPR:
4045 case LROTATE_EXPR:
4046 case RROTATE_EXPR:
4047
4048 case BIT_IOR_EXPR:
4049 case BIT_XOR_EXPR:
4050 case BIT_AND_EXPR:
4051 case BIT_NOT_EXPR:
4052
4053 case TRUTH_ANDIF_EXPR:
4054 case TRUTH_ORIF_EXPR:
4055 case TRUTH_AND_EXPR:
4056 case TRUTH_OR_EXPR:
4057 case TRUTH_XOR_EXPR:
4058 case TRUTH_NOT_EXPR:
4059
4060 case LT_EXPR:
4061 case LE_EXPR:
4062 case GT_EXPR:
4063 case GE_EXPR:
4064 case EQ_EXPR:
4065 case NE_EXPR:
4066 case ORDERED_EXPR:
4067 case UNORDERED_EXPR:
4068
4069 case UNLT_EXPR:
4070 case UNLE_EXPR:
4071 case UNGT_EXPR:
4072 case UNGE_EXPR:
4073 case UNEQ_EXPR:
4074 case LTGT_EXPR:
4075
4076 case CONJ_EXPR:
4077
4078 case PREDECREMENT_EXPR:
4079 case PREINCREMENT_EXPR:
4080 case POSTDECREMENT_EXPR:
4081 case POSTINCREMENT_EXPR:
4082
4083 case REALIGN_LOAD_EXPR:
4084
4085 case WIDEN_SUM_EXPR:
4086 case WIDEN_MULT_EXPR:
4087 case DOT_PROD_EXPR:
4088 case SAD_EXPR:
4089 case WIDEN_MULT_PLUS_EXPR:
4090 case WIDEN_MULT_MINUS_EXPR:
4091 case WIDEN_LSHIFT_EXPR:
4092
4093 case VEC_WIDEN_MULT_HI_EXPR:
4094 case VEC_WIDEN_MULT_LO_EXPR:
4095 case VEC_WIDEN_MULT_EVEN_EXPR:
4096 case VEC_WIDEN_MULT_ODD_EXPR:
4097 case VEC_UNPACK_HI_EXPR:
4098 case VEC_UNPACK_LO_EXPR:
4099 case VEC_UNPACK_FLOAT_HI_EXPR:
4100 case VEC_UNPACK_FLOAT_LO_EXPR:
4101 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4102 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4103 case VEC_PACK_TRUNC_EXPR:
4104 case VEC_PACK_SAT_EXPR:
4105 case VEC_PACK_FIX_TRUNC_EXPR:
4106 case VEC_PACK_FLOAT_EXPR:
4107 case VEC_WIDEN_LSHIFT_HI_EXPR:
4108 case VEC_WIDEN_LSHIFT_LO_EXPR:
4109 case VEC_DUPLICATE_EXPR:
4110 case VEC_SERIES_EXPR:
4111
4112 return 1;
4113
4114 /* Few special cases of expensive operations. This is useful
4115 to avoid inlining on functions having too many of these. */
4116 case TRUNC_DIV_EXPR:
4117 case CEIL_DIV_EXPR:
4118 case FLOOR_DIV_EXPR:
4119 case ROUND_DIV_EXPR:
4120 case EXACT_DIV_EXPR:
4121 case TRUNC_MOD_EXPR:
4122 case CEIL_MOD_EXPR:
4123 case FLOOR_MOD_EXPR:
4124 case ROUND_MOD_EXPR:
4125 case RDIV_EXPR:
4126 if (TREE_CODE (op2) != INTEGER_CST)
4127 return weights->div_mod_cost;
4128 return 1;
4129
4130 /* Bit-field insertion needs several shift and mask operations. */
4131 case BIT_INSERT_EXPR:
4132 return 3;
4133
4134 default:
4135 /* We expect a copy assignment with no operator. */
4136 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4137 return 0;
4138 }
4139 }
4140
4141
4142 /* Estimate number of instructions that will be created by expanding
4143 the statements in the statement sequence STMTS.
4144 WEIGHTS contains weights attributed to various constructs. */
4145
4146 int
4147 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4148 {
4149 int cost;
4150 gimple_stmt_iterator gsi;
4151
4152 cost = 0;
4153 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4154 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4155
4156 return cost;
4157 }
4158
4159
4160 /* Estimate number of instructions that will be created by expanding STMT.
4161 WEIGHTS contains weights attributed to various constructs. */
4162
4163 int
4164 estimate_num_insns (gimple *stmt, eni_weights *weights)
4165 {
4166 unsigned cost, i;
4167 enum gimple_code code = gimple_code (stmt);
4168 tree lhs;
4169 tree rhs;
4170
4171 switch (code)
4172 {
4173 case GIMPLE_ASSIGN:
4174 /* Try to estimate the cost of assignments. We have three cases to
4175 deal with:
4176 1) Simple assignments to registers;
4177 2) Stores to things that must live in memory. This includes
4178 "normal" stores to scalars, but also assignments of large
4179 structures, or constructors of big arrays;
4180
4181 Let us look at the first two cases, assuming we have "a = b + C":
4182 <GIMPLE_ASSIGN <var_decl "a">
4183 <plus_expr <var_decl "b"> <constant C>>
4184 If "a" is a GIMPLE register, the assignment to it is free on almost
4185 any target, because "a" usually ends up in a real register. Hence
4186 the only cost of this expression comes from the PLUS_EXPR, and we
4187 can ignore the GIMPLE_ASSIGN.
4188 If "a" is not a GIMPLE register, the assignment to "a" will most
4189 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4190 of moving something into "a", which we compute using the function
4191 estimate_move_cost. */
4192 if (gimple_clobber_p (stmt))
4193 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4194
4195 lhs = gimple_assign_lhs (stmt);
4196 rhs = gimple_assign_rhs1 (stmt);
4197
4198 cost = 0;
4199
4200 /* Account for the cost of moving to / from memory. */
4201 if (gimple_store_p (stmt))
4202 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4203 if (gimple_assign_load_p (stmt))
4204 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4205
4206 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4207 gimple_assign_rhs1 (stmt),
4208 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4209 == GIMPLE_BINARY_RHS
4210 ? gimple_assign_rhs2 (stmt) : NULL);
4211 break;
4212
4213 case GIMPLE_COND:
4214 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4215 gimple_op (stmt, 0),
4216 gimple_op (stmt, 1));
4217 break;
4218
4219 case GIMPLE_SWITCH:
4220 {
4221 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4222 /* Take into account cost of the switch + guess 2 conditional jumps for
4223 each case label.
4224
4225 TODO: once the switch expansion logic is sufficiently separated, we can
4226 do better job on estimating cost of the switch. */
4227 if (weights->time_based)
4228 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4229 else
4230 cost = gimple_switch_num_labels (switch_stmt) * 2;
4231 }
4232 break;
4233
4234 case GIMPLE_CALL:
4235 {
4236 tree decl;
4237
4238 if (gimple_call_internal_p (stmt))
4239 return 0;
4240 else if ((decl = gimple_call_fndecl (stmt))
4241 && fndecl_built_in_p (decl))
4242 {
4243 /* Do not special case builtins where we see the body.
4244 This just confuse inliner. */
4245 struct cgraph_node *node;
4246 if (!(node = cgraph_node::get (decl))
4247 || node->definition)
4248 ;
4249 /* For buitins that are likely expanded to nothing or
4250 inlined do not account operand costs. */
4251 else if (is_simple_builtin (decl))
4252 return 0;
4253 else if (is_inexpensive_builtin (decl))
4254 return weights->target_builtin_call_cost;
4255 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4256 {
4257 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4258 specialize the cheap expansion we do here.
4259 ??? This asks for a more general solution. */
4260 switch (DECL_FUNCTION_CODE (decl))
4261 {
4262 case BUILT_IN_POW:
4263 case BUILT_IN_POWF:
4264 case BUILT_IN_POWL:
4265 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4266 && (real_equal
4267 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4268 &dconst2)))
4269 return estimate_operator_cost
4270 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4271 gimple_call_arg (stmt, 0));
4272 break;
4273
4274 default:
4275 break;
4276 }
4277 }
4278 }
4279
4280 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4281 if (gimple_call_lhs (stmt))
4282 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4283 weights->time_based);
4284 for (i = 0; i < gimple_call_num_args (stmt); i++)
4285 {
4286 tree arg = gimple_call_arg (stmt, i);
4287 cost += estimate_move_cost (TREE_TYPE (arg),
4288 weights->time_based);
4289 }
4290 break;
4291 }
4292
4293 case GIMPLE_RETURN:
4294 return weights->return_cost;
4295
4296 case GIMPLE_GOTO:
4297 case GIMPLE_LABEL:
4298 case GIMPLE_NOP:
4299 case GIMPLE_PHI:
4300 case GIMPLE_PREDICT:
4301 case GIMPLE_DEBUG:
4302 return 0;
4303
4304 case GIMPLE_ASM:
4305 {
4306 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4307 /* 1000 means infinity. This avoids overflows later
4308 with very long asm statements. */
4309 if (count > 1000)
4310 count = 1000;
4311 /* If this asm is asm inline, count anything as minimum size. */
4312 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4313 count = MIN (1, count);
4314 return MAX (1, count);
4315 }
4316
4317 case GIMPLE_RESX:
4318 /* This is either going to be an external function call with one
4319 argument, or two register copy statements plus a goto. */
4320 return 2;
4321
4322 case GIMPLE_EH_DISPATCH:
4323 /* ??? This is going to turn into a switch statement. Ideally
4324 we'd have a look at the eh region and estimate the number of
4325 edges involved. */
4326 return 10;
4327
4328 case GIMPLE_BIND:
4329 return estimate_num_insns_seq (
4330 gimple_bind_body (as_a <gbind *> (stmt)),
4331 weights);
4332
4333 case GIMPLE_EH_FILTER:
4334 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4335
4336 case GIMPLE_CATCH:
4337 return estimate_num_insns_seq (gimple_catch_handler (
4338 as_a <gcatch *> (stmt)),
4339 weights);
4340
4341 case GIMPLE_TRY:
4342 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4343 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4344
4345 /* OMP directives are generally very expensive. */
4346
4347 case GIMPLE_OMP_RETURN:
4348 case GIMPLE_OMP_SECTIONS_SWITCH:
4349 case GIMPLE_OMP_ATOMIC_STORE:
4350 case GIMPLE_OMP_CONTINUE:
4351 /* ...except these, which are cheap. */
4352 return 0;
4353
4354 case GIMPLE_OMP_ATOMIC_LOAD:
4355 return weights->omp_cost;
4356
4357 case GIMPLE_OMP_FOR:
4358 return (weights->omp_cost
4359 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4360 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4361
4362 case GIMPLE_OMP_PARALLEL:
4363 case GIMPLE_OMP_TASK:
4364 case GIMPLE_OMP_CRITICAL:
4365 case GIMPLE_OMP_MASTER:
4366 case GIMPLE_OMP_TASKGROUP:
4367 case GIMPLE_OMP_ORDERED:
4368 case GIMPLE_OMP_SECTION:
4369 case GIMPLE_OMP_SECTIONS:
4370 case GIMPLE_OMP_SINGLE:
4371 case GIMPLE_OMP_TARGET:
4372 case GIMPLE_OMP_TEAMS:
4373 return (weights->omp_cost
4374 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4375
4376 case GIMPLE_TRANSACTION:
4377 return (weights->tm_cost
4378 + estimate_num_insns_seq (gimple_transaction_body (
4379 as_a <gtransaction *> (stmt)),
4380 weights));
4381
4382 default:
4383 gcc_unreachable ();
4384 }
4385
4386 return cost;
4387 }
4388
4389 /* Estimate number of instructions that will be created by expanding
4390 function FNDECL. WEIGHTS contains weights attributed to various
4391 constructs. */
4392
4393 int
4394 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4395 {
4396 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4397 gimple_stmt_iterator bsi;
4398 basic_block bb;
4399 int n = 0;
4400
4401 gcc_assert (my_function && my_function->cfg);
4402 FOR_EACH_BB_FN (bb, my_function)
4403 {
4404 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4405 n += estimate_num_insns (gsi_stmt (bsi), weights);
4406 }
4407
4408 return n;
4409 }
4410
4411
4412 /* Initializes weights used by estimate_num_insns. */
4413
4414 void
4415 init_inline_once (void)
4416 {
4417 eni_size_weights.call_cost = 1;
4418 eni_size_weights.indirect_call_cost = 3;
4419 eni_size_weights.target_builtin_call_cost = 1;
4420 eni_size_weights.div_mod_cost = 1;
4421 eni_size_weights.omp_cost = 40;
4422 eni_size_weights.tm_cost = 10;
4423 eni_size_weights.time_based = false;
4424 eni_size_weights.return_cost = 1;
4425
4426 /* Estimating time for call is difficult, since we have no idea what the
4427 called function does. In the current uses of eni_time_weights,
4428 underestimating the cost does less harm than overestimating it, so
4429 we choose a rather small value here. */
4430 eni_time_weights.call_cost = 10;
4431 eni_time_weights.indirect_call_cost = 15;
4432 eni_time_weights.target_builtin_call_cost = 1;
4433 eni_time_weights.div_mod_cost = 10;
4434 eni_time_weights.omp_cost = 40;
4435 eni_time_weights.tm_cost = 40;
4436 eni_time_weights.time_based = true;
4437 eni_time_weights.return_cost = 2;
4438 }
4439
4440
4441 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4442
4443 static void
4444 prepend_lexical_block (tree current_block, tree new_block)
4445 {
4446 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4447 BLOCK_SUBBLOCKS (current_block) = new_block;
4448 BLOCK_SUPERCONTEXT (new_block) = current_block;
4449 }
4450
4451 /* Add local variables from CALLEE to CALLER. */
4452
4453 static inline void
4454 add_local_variables (struct function *callee, struct function *caller,
4455 copy_body_data *id)
4456 {
4457 tree var;
4458 unsigned ix;
4459
4460 FOR_EACH_LOCAL_DECL (callee, ix, var)
4461 if (!can_be_nonlocal (var, id))
4462 {
4463 tree new_var = remap_decl (var, id);
4464
4465 /* Remap debug-expressions. */
4466 if (VAR_P (new_var)
4467 && DECL_HAS_DEBUG_EXPR_P (var)
4468 && new_var != var)
4469 {
4470 tree tem = DECL_DEBUG_EXPR (var);
4471 bool old_regimplify = id->regimplify;
4472 id->remapping_type_depth++;
4473 walk_tree (&tem, copy_tree_body_r, id, NULL);
4474 id->remapping_type_depth--;
4475 id->regimplify = old_regimplify;
4476 SET_DECL_DEBUG_EXPR (new_var, tem);
4477 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4478 }
4479 add_local_decl (caller, new_var);
4480 }
4481 }
4482
4483 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4484 have brought in or introduced any debug stmts for SRCVAR. */
4485
4486 static inline void
4487 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4488 {
4489 tree *remappedvarp = id->decl_map->get (srcvar);
4490
4491 if (!remappedvarp)
4492 return;
4493
4494 if (!VAR_P (*remappedvarp))
4495 return;
4496
4497 if (*remappedvarp == id->retvar)
4498 return;
4499
4500 tree tvar = target_for_debug_bind (*remappedvarp);
4501 if (!tvar)
4502 return;
4503
4504 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4505 id->call_stmt);
4506 gimple_seq_add_stmt (bindings, stmt);
4507 }
4508
4509 /* For each inlined variable for which we may have debug bind stmts,
4510 add before GSI a final debug stmt resetting it, marking the end of
4511 its life, so that var-tracking knows it doesn't have to compute
4512 further locations for it. */
4513
4514 static inline void
4515 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4516 {
4517 tree var;
4518 unsigned ix;
4519 gimple_seq bindings = NULL;
4520
4521 if (!gimple_in_ssa_p (id->src_cfun))
4522 return;
4523
4524 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4525 return;
4526
4527 for (var = DECL_ARGUMENTS (id->src_fn);
4528 var; var = DECL_CHAIN (var))
4529 reset_debug_binding (id, var, &bindings);
4530
4531 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4532 reset_debug_binding (id, var, &bindings);
4533
4534 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4535 }
4536
4537 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4538
4539 static bool
4540 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4541 {
4542 tree use_retvar;
4543 tree fn;
4544 hash_map<tree, tree> *dst;
4545 hash_map<tree, tree> *st = NULL;
4546 tree return_slot;
4547 tree modify_dest;
4548 struct cgraph_edge *cg_edge;
4549 cgraph_inline_failed_t reason;
4550 basic_block return_block;
4551 edge e;
4552 gimple_stmt_iterator gsi, stmt_gsi;
4553 bool successfully_inlined = false;
4554 bool purge_dead_abnormal_edges;
4555 gcall *call_stmt;
4556 unsigned int prop_mask, src_properties;
4557 struct function *dst_cfun;
4558 tree simduid;
4559 use_operand_p use;
4560 gimple *simtenter_stmt = NULL;
4561 vec<tree> *simtvars_save;
4562
4563 /* The gimplifier uses input_location in too many places, such as
4564 internal_get_tmp_var (). */
4565 location_t saved_location = input_location;
4566 input_location = gimple_location (stmt);
4567
4568 /* From here on, we're only interested in CALL_EXPRs. */
4569 call_stmt = dyn_cast <gcall *> (stmt);
4570 if (!call_stmt)
4571 goto egress;
4572
4573 cg_edge = id->dst_node->get_edge (stmt);
4574 gcc_checking_assert (cg_edge);
4575 /* First, see if we can figure out what function is being called.
4576 If we cannot, then there is no hope of inlining the function. */
4577 if (cg_edge->indirect_unknown_callee)
4578 goto egress;
4579 fn = cg_edge->callee->decl;
4580 gcc_checking_assert (fn);
4581
4582 /* If FN is a declaration of a function in a nested scope that was
4583 globally declared inline, we don't set its DECL_INITIAL.
4584 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4585 C++ front-end uses it for cdtors to refer to their internal
4586 declarations, that are not real functions. Fortunately those
4587 don't have trees to be saved, so we can tell by checking their
4588 gimple_body. */
4589 if (!DECL_INITIAL (fn)
4590 && DECL_ABSTRACT_ORIGIN (fn)
4591 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4592 fn = DECL_ABSTRACT_ORIGIN (fn);
4593
4594 /* Don't try to inline functions that are not well-suited to inlining. */
4595 if (cg_edge->inline_failed)
4596 {
4597 reason = cg_edge->inline_failed;
4598 /* If this call was originally indirect, we do not want to emit any
4599 inlining related warnings or sorry messages because there are no
4600 guarantees regarding those. */
4601 if (cg_edge->indirect_inlining_edge)
4602 goto egress;
4603
4604 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4605 /* For extern inline functions that get redefined we always
4606 silently ignored always_inline flag. Better behavior would
4607 be to be able to keep both bodies and use extern inline body
4608 for inlining, but we can't do that because frontends overwrite
4609 the body. */
4610 && !cg_edge->callee->local.redefined_extern_inline
4611 /* During early inline pass, report only when optimization is
4612 not turned on. */
4613 && (symtab->global_info_ready
4614 || !optimize
4615 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4616 /* PR 20090218-1_0.c. Body can be provided by another module. */
4617 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4618 {
4619 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4620 cgraph_inline_failed_string (reason));
4621 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4622 inform (gimple_location (stmt), "called from here");
4623 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4624 inform (DECL_SOURCE_LOCATION (cfun->decl),
4625 "called from this function");
4626 }
4627 else if (warn_inline
4628 && DECL_DECLARED_INLINE_P (fn)
4629 && !DECL_NO_INLINE_WARNING_P (fn)
4630 && !DECL_IN_SYSTEM_HEADER (fn)
4631 && reason != CIF_UNSPECIFIED
4632 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4633 /* Do not warn about not inlined recursive calls. */
4634 && !cg_edge->recursive_p ()
4635 /* Avoid warnings during early inline pass. */
4636 && symtab->global_info_ready)
4637 {
4638 auto_diagnostic_group d;
4639 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4640 fn, _(cgraph_inline_failed_string (reason))))
4641 {
4642 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4643 inform (gimple_location (stmt), "called from here");
4644 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4645 inform (DECL_SOURCE_LOCATION (cfun->decl),
4646 "called from this function");
4647 }
4648 }
4649 goto egress;
4650 }
4651 id->src_node = cg_edge->callee;
4652
4653 /* If callee is thunk, all we need is to adjust the THIS pointer
4654 and redirect to function being thunked. */
4655 if (id->src_node->thunk.thunk_p)
4656 {
4657 cgraph_edge *edge;
4658 tree virtual_offset = NULL;
4659 profile_count count = cg_edge->count;
4660 tree op;
4661 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4662
4663 cg_edge->remove ();
4664 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4665 gimple_uid (stmt),
4666 profile_count::one (),
4667 profile_count::one (),
4668 true);
4669 edge->count = count;
4670 if (id->src_node->thunk.virtual_offset_p)
4671 virtual_offset = size_int (id->src_node->thunk.virtual_value);
4672 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4673 NULL);
4674 gsi_insert_before (&iter, gimple_build_assign (op,
4675 gimple_call_arg (stmt, 0)),
4676 GSI_NEW_STMT);
4677 gcc_assert (id->src_node->thunk.this_adjusting);
4678 op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
4679 virtual_offset, id->src_node->thunk.indirect_offset);
4680
4681 gimple_call_set_arg (stmt, 0, op);
4682 gimple_call_set_fndecl (stmt, edge->callee->decl);
4683 update_stmt (stmt);
4684 id->src_node->remove ();
4685 expand_call_inline (bb, stmt, id);
4686 maybe_remove_unused_call_args (cfun, stmt);
4687 return true;
4688 }
4689 fn = cg_edge->callee->decl;
4690 cg_edge->callee->get_untransformed_body ();
4691
4692 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4693 cg_edge->callee->verify ();
4694
4695 /* We will be inlining this callee. */
4696 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4697
4698 /* Update the callers EH personality. */
4699 if (DECL_FUNCTION_PERSONALITY (fn))
4700 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4701 = DECL_FUNCTION_PERSONALITY (fn);
4702
4703 /* Split the block before the GIMPLE_CALL. */
4704 stmt_gsi = gsi_for_stmt (stmt);
4705 gsi_prev (&stmt_gsi);
4706 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4707 bb = e->src;
4708 return_block = e->dest;
4709 remove_edge (e);
4710
4711 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4712 been the source of abnormal edges. In this case, schedule
4713 the removal of dead abnormal edges. */
4714 gsi = gsi_start_bb (return_block);
4715 gsi_next (&gsi);
4716 purge_dead_abnormal_edges = gsi_end_p (gsi);
4717
4718 stmt_gsi = gsi_start_bb (return_block);
4719
4720 /* Build a block containing code to initialize the arguments, the
4721 actual inline expansion of the body, and a label for the return
4722 statements within the function to jump to. The type of the
4723 statement expression is the return type of the function call.
4724 ??? If the call does not have an associated block then we will
4725 remap all callee blocks to NULL, effectively dropping most of
4726 its debug information. This should only happen for calls to
4727 artificial decls inserted by the compiler itself. We need to
4728 either link the inlined blocks into the caller block tree or
4729 not refer to them in any way to not break GC for locations. */
4730 if (tree block = gimple_block (stmt))
4731 {
4732 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4733 to make inlined_function_outer_scope_p return true on this BLOCK. */
4734 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4735 if (loc == UNKNOWN_LOCATION)
4736 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4737 if (loc == UNKNOWN_LOCATION)
4738 loc = BUILTINS_LOCATION;
4739 id->block = make_node (BLOCK);
4740 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4741 BLOCK_SOURCE_LOCATION (id->block) = loc;
4742 prepend_lexical_block (block, id->block);
4743 }
4744
4745 /* Local declarations will be replaced by their equivalents in this map. */
4746 st = id->decl_map;
4747 id->decl_map = new hash_map<tree, tree>;
4748 dst = id->debug_map;
4749 id->debug_map = NULL;
4750 if (flag_stack_reuse != SR_NONE)
4751 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4752
4753 /* Record the function we are about to inline. */
4754 id->src_fn = fn;
4755 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4756 id->reset_location = DECL_IGNORED_P (fn);
4757 id->call_stmt = call_stmt;
4758
4759 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4760 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4761 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4762 simtvars_save = id->dst_simt_vars;
4763 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4764 && (simduid = bb->loop_father->simduid) != NULL_TREE
4765 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4766 && single_imm_use (simduid, &use, &simtenter_stmt)
4767 && is_gimple_call (simtenter_stmt)
4768 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4769 vec_alloc (id->dst_simt_vars, 0);
4770 else
4771 id->dst_simt_vars = NULL;
4772
4773 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4774 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4775
4776 /* If the src function contains an IFN_VA_ARG, then so will the dst
4777 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4778 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4779 src_properties = id->src_cfun->curr_properties & prop_mask;
4780 if (src_properties != prop_mask)
4781 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4782 dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
4783
4784 gcc_assert (!id->src_cfun->after_inlining);
4785
4786 id->entry_bb = bb;
4787 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4788 {
4789 gimple_stmt_iterator si = gsi_last_bb (bb);
4790 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4791 NOT_TAKEN),
4792 GSI_NEW_STMT);
4793 }
4794 initialize_inlined_parameters (id, stmt, fn, bb);
4795 if (debug_nonbind_markers_p && debug_inline_points && id->block
4796 && inlined_function_outer_scope_p (id->block))
4797 {
4798 gimple_stmt_iterator si = gsi_last_bb (bb);
4799 gsi_insert_after (&si, gimple_build_debug_inline_entry
4800 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4801 GSI_NEW_STMT);
4802 }
4803
4804 if (DECL_INITIAL (fn))
4805 {
4806 if (gimple_block (stmt))
4807 {
4808 tree *var;
4809
4810 prepend_lexical_block (id->block,
4811 remap_blocks (DECL_INITIAL (fn), id));
4812 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4813 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4814 == NULL_TREE));
4815 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4816 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4817 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4818 under it. The parameters can be then evaluated in the debugger,
4819 but don't show in backtraces. */
4820 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4821 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4822 {
4823 tree v = *var;
4824 *var = TREE_CHAIN (v);
4825 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4826 BLOCK_VARS (id->block) = v;
4827 }
4828 else
4829 var = &TREE_CHAIN (*var);
4830 }
4831 else
4832 remap_blocks_to_null (DECL_INITIAL (fn), id);
4833 }
4834
4835 /* Return statements in the function body will be replaced by jumps
4836 to the RET_LABEL. */
4837 gcc_assert (DECL_INITIAL (fn));
4838 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4839
4840 /* Find the LHS to which the result of this call is assigned. */
4841 return_slot = NULL;
4842 if (gimple_call_lhs (stmt))
4843 {
4844 modify_dest = gimple_call_lhs (stmt);
4845
4846 /* The function which we are inlining might not return a value,
4847 in which case we should issue a warning that the function
4848 does not return a value. In that case the optimizers will
4849 see that the variable to which the value is assigned was not
4850 initialized. We do not want to issue a warning about that
4851 uninitialized variable. */
4852 if (DECL_P (modify_dest))
4853 TREE_NO_WARNING (modify_dest) = 1;
4854
4855 if (gimple_call_return_slot_opt_p (call_stmt))
4856 {
4857 return_slot = modify_dest;
4858 modify_dest = NULL;
4859 }
4860 }
4861 else
4862 modify_dest = NULL;
4863
4864 /* If we are inlining a call to the C++ operator new, we don't want
4865 to use type based alias analysis on the return value. Otherwise
4866 we may get confused if the compiler sees that the inlined new
4867 function returns a pointer which was just deleted. See bug
4868 33407. */
4869 if (DECL_IS_OPERATOR_NEW (fn))
4870 {
4871 return_slot = NULL;
4872 modify_dest = NULL;
4873 }
4874
4875 /* Declare the return variable for the function. */
4876 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
4877
4878 /* Add local vars in this inlined callee to caller. */
4879 add_local_variables (id->src_cfun, cfun, id);
4880
4881 if (dump_enabled_p ())
4882 {
4883 char buf[128];
4884 snprintf (buf, sizeof(buf), "%4.2f",
4885 cg_edge->sreal_frequency ().to_double ());
4886 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
4887 call_stmt,
4888 "Inlining %C to %C with frequency %s\n",
4889 id->src_node, id->dst_node, buf);
4890 if (dump_file && (dump_flags & TDF_DETAILS))
4891 {
4892 id->src_node->dump (dump_file);
4893 id->dst_node->dump (dump_file);
4894 }
4895 }
4896
4897 /* This is it. Duplicate the callee body. Assume callee is
4898 pre-gimplified. Note that we must not alter the caller
4899 function in any way before this point, as this CALL_EXPR may be
4900 a self-referential call; if we're calling ourselves, we need to
4901 duplicate our body before altering anything. */
4902 copy_body (id, bb, return_block, NULL);
4903
4904 reset_debug_bindings (id, stmt_gsi);
4905
4906 if (flag_stack_reuse != SR_NONE)
4907 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
4908 if (!TREE_THIS_VOLATILE (p))
4909 {
4910 tree *varp = id->decl_map->get (p);
4911 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
4912 {
4913 tree clobber = build_constructor (TREE_TYPE (*varp), NULL);
4914 gimple *clobber_stmt;
4915 TREE_THIS_VOLATILE (clobber) = 1;
4916 clobber_stmt = gimple_build_assign (*varp, clobber);
4917 gimple_set_location (clobber_stmt, gimple_location (stmt));
4918 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4919 }
4920 }
4921
4922 /* Reset the escaped solution. */
4923 if (cfun->gimple_df)
4924 pt_solution_reset (&cfun->gimple_df->escaped);
4925
4926 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
4927 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
4928 {
4929 size_t nargs = gimple_call_num_args (simtenter_stmt);
4930 vec<tree> *vars = id->dst_simt_vars;
4931 auto_vec<tree> newargs (nargs + vars->length ());
4932 for (size_t i = 0; i < nargs; i++)
4933 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
4934 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
4935 {
4936 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
4937 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
4938 }
4939 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
4940 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
4941 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
4942 gsi_replace (&gsi, g, false);
4943 }
4944 vec_free (id->dst_simt_vars);
4945 id->dst_simt_vars = simtvars_save;
4946
4947 /* Clean up. */
4948 if (id->debug_map)
4949 {
4950 delete id->debug_map;
4951 id->debug_map = dst;
4952 }
4953 delete id->decl_map;
4954 id->decl_map = st;
4955
4956 /* Unlink the calls virtual operands before replacing it. */
4957 unlink_stmt_vdef (stmt);
4958 if (gimple_vdef (stmt)
4959 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4960 release_ssa_name (gimple_vdef (stmt));
4961
4962 /* If the inlined function returns a result that we care about,
4963 substitute the GIMPLE_CALL with an assignment of the return
4964 variable to the LHS of the call. That is, if STMT was
4965 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4966 if (use_retvar && gimple_call_lhs (stmt))
4967 {
4968 gimple *old_stmt = stmt;
4969 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4970 gimple_set_location (stmt, gimple_location (old_stmt));
4971 gsi_replace (&stmt_gsi, stmt, false);
4972 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4973 /* Append a clobber for id->retvar if easily possible. */
4974 if (flag_stack_reuse != SR_NONE
4975 && id->retvar
4976 && VAR_P (id->retvar)
4977 && id->retvar != return_slot
4978 && id->retvar != modify_dest
4979 && !TREE_THIS_VOLATILE (id->retvar)
4980 && !is_gimple_reg (id->retvar)
4981 && !stmt_ends_bb_p (stmt))
4982 {
4983 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
4984 gimple *clobber_stmt;
4985 TREE_THIS_VOLATILE (clobber) = 1;
4986 clobber_stmt = gimple_build_assign (id->retvar, clobber);
4987 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
4988 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
4989 }
4990 }
4991 else
4992 {
4993 /* Handle the case of inlining a function with no return
4994 statement, which causes the return value to become undefined. */
4995 if (gimple_call_lhs (stmt)
4996 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4997 {
4998 tree name = gimple_call_lhs (stmt);
4999 tree var = SSA_NAME_VAR (name);
5000 tree def = var ? ssa_default_def (cfun, var) : NULL;
5001
5002 if (def)
5003 {
5004 /* If the variable is used undefined, make this name
5005 undefined via a move. */
5006 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5007 gsi_replace (&stmt_gsi, stmt, true);
5008 }
5009 else
5010 {
5011 if (!var)
5012 {
5013 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5014 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5015 }
5016 /* Otherwise make this variable undefined. */
5017 gsi_remove (&stmt_gsi, true);
5018 set_ssa_default_def (cfun, var, name);
5019 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5020 }
5021 }
5022 /* Replace with a clobber for id->retvar. */
5023 else if (flag_stack_reuse != SR_NONE
5024 && id->retvar
5025 && VAR_P (id->retvar)
5026 && id->retvar != return_slot
5027 && id->retvar != modify_dest
5028 && !TREE_THIS_VOLATILE (id->retvar)
5029 && !is_gimple_reg (id->retvar))
5030 {
5031 tree clobber = build_constructor (TREE_TYPE (id->retvar), NULL);
5032 gimple *clobber_stmt;
5033 TREE_THIS_VOLATILE (clobber) = 1;
5034 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5035 gimple_set_location (clobber_stmt, gimple_location (stmt));
5036 gsi_replace (&stmt_gsi, clobber_stmt, false);
5037 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5038 }
5039 else
5040 gsi_remove (&stmt_gsi, true);
5041 }
5042
5043 if (purge_dead_abnormal_edges)
5044 {
5045 gimple_purge_dead_eh_edges (return_block);
5046 gimple_purge_dead_abnormal_call_edges (return_block);
5047 }
5048
5049 /* If the value of the new expression is ignored, that's OK. We
5050 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5051 the equivalent inlined version either. */
5052 if (is_gimple_assign (stmt))
5053 {
5054 gcc_assert (gimple_assign_single_p (stmt)
5055 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5056 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5057 }
5058
5059 id->add_clobbers_to_eh_landing_pads = 0;
5060
5061 /* Output the inlining info for this abstract function, since it has been
5062 inlined. If we don't do this now, we can lose the information about the
5063 variables in the function when the blocks get blown away as soon as we
5064 remove the cgraph node. */
5065 if (gimple_block (stmt))
5066 (*debug_hooks->outlining_inline_function) (fn);
5067
5068 /* Update callgraph if needed. */
5069 cg_edge->callee->remove ();
5070
5071 id->block = NULL_TREE;
5072 id->retvar = NULL_TREE;
5073 successfully_inlined = true;
5074
5075 egress:
5076 input_location = saved_location;
5077 return successfully_inlined;
5078 }
5079
5080 /* Expand call statements reachable from STMT_P.
5081 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5082 in a MODIFY_EXPR. */
5083
5084 static bool
5085 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
5086 {
5087 gimple_stmt_iterator gsi;
5088 bool inlined = false;
5089
5090 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5091 {
5092 gimple *stmt = gsi_stmt (gsi);
5093 gsi_prev (&gsi);
5094
5095 if (is_gimple_call (stmt)
5096 && !gimple_call_internal_p (stmt))
5097 inlined |= expand_call_inline (bb, stmt, id);
5098 }
5099
5100 return inlined;
5101 }
5102
5103
5104 /* Walk all basic blocks created after FIRST and try to fold every statement
5105 in the STATEMENTS pointer set. */
5106
5107 static void
5108 fold_marked_statements (int first, hash_set<gimple *> *statements)
5109 {
5110 for (; first < last_basic_block_for_fn (cfun); first++)
5111 if (BASIC_BLOCK_FOR_FN (cfun, first))
5112 {
5113 gimple_stmt_iterator gsi;
5114
5115 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5116 !gsi_end_p (gsi);
5117 gsi_next (&gsi))
5118 if (statements->contains (gsi_stmt (gsi)))
5119 {
5120 gimple *old_stmt = gsi_stmt (gsi);
5121 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
5122
5123 if (old_decl && fndecl_built_in_p (old_decl))
5124 {
5125 /* Folding builtins can create multiple instructions,
5126 we need to look at all of them. */
5127 gimple_stmt_iterator i2 = gsi;
5128 gsi_prev (&i2);
5129 if (fold_stmt (&gsi))
5130 {
5131 gimple *new_stmt;
5132 /* If a builtin at the end of a bb folded into nothing,
5133 the following loop won't work. */
5134 if (gsi_end_p (gsi))
5135 {
5136 cgraph_update_edges_for_call_stmt (old_stmt,
5137 old_decl, NULL);
5138 break;
5139 }
5140 if (gsi_end_p (i2))
5141 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
5142 else
5143 gsi_next (&i2);
5144 while (1)
5145 {
5146 new_stmt = gsi_stmt (i2);
5147 update_stmt (new_stmt);
5148 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5149 new_stmt);
5150
5151 if (new_stmt == gsi_stmt (gsi))
5152 {
5153 /* It is okay to check only for the very last
5154 of these statements. If it is a throwing
5155 statement nothing will change. If it isn't
5156 this can remove EH edges. If that weren't
5157 correct then because some intermediate stmts
5158 throw, but not the last one. That would mean
5159 we'd have to split the block, which we can't
5160 here and we'd loose anyway. And as builtins
5161 probably never throw, this all
5162 is mood anyway. */
5163 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5164 new_stmt))
5165 gimple_purge_dead_eh_edges (
5166 BASIC_BLOCK_FOR_FN (cfun, first));
5167 break;
5168 }
5169 gsi_next (&i2);
5170 }
5171 }
5172 }
5173 else if (fold_stmt (&gsi))
5174 {
5175 /* Re-read the statement from GSI as fold_stmt() may
5176 have changed it. */
5177 gimple *new_stmt = gsi_stmt (gsi);
5178 update_stmt (new_stmt);
5179
5180 if (is_gimple_call (old_stmt)
5181 || is_gimple_call (new_stmt))
5182 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5183 new_stmt);
5184
5185 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5186 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
5187 first));
5188 }
5189 }
5190 }
5191 }
5192
5193 /* Expand calls to inline functions in the body of FN. */
5194
5195 unsigned int
5196 optimize_inline_calls (tree fn)
5197 {
5198 copy_body_data id;
5199 basic_block bb;
5200 int last = n_basic_blocks_for_fn (cfun);
5201 bool inlined_p = false;
5202
5203 /* Clear out ID. */
5204 memset (&id, 0, sizeof (id));
5205
5206 id.src_node = id.dst_node = cgraph_node::get (fn);
5207 gcc_assert (id.dst_node->definition);
5208 id.dst_fn = fn;
5209 /* Or any functions that aren't finished yet. */
5210 if (current_function_decl)
5211 id.dst_fn = current_function_decl;
5212
5213 id.copy_decl = copy_decl_maybe_to_var;
5214 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5215 id.transform_new_cfg = false;
5216 id.transform_return_to_modify = true;
5217 id.transform_parameter = true;
5218 id.transform_lang_insert_block = NULL;
5219 id.statements_to_fold = new hash_set<gimple *>;
5220
5221 push_gimplify_context ();
5222
5223 /* We make no attempts to keep dominance info up-to-date. */
5224 free_dominance_info (CDI_DOMINATORS);
5225 free_dominance_info (CDI_POST_DOMINATORS);
5226
5227 /* Register specific gimple functions. */
5228 gimple_register_cfg_hooks ();
5229
5230 /* Reach the trees by walking over the CFG, and note the
5231 enclosing basic-blocks in the call edges. */
5232 /* We walk the blocks going forward, because inlined function bodies
5233 will split id->current_basic_block, and the new blocks will
5234 follow it; we'll trudge through them, processing their CALL_EXPRs
5235 along the way. */
5236 FOR_EACH_BB_FN (bb, cfun)
5237 inlined_p |= gimple_expand_calls_inline (bb, &id);
5238
5239 pop_gimplify_context (NULL);
5240
5241 if (flag_checking)
5242 {
5243 struct cgraph_edge *e;
5244
5245 id.dst_node->verify ();
5246
5247 /* Double check that we inlined everything we are supposed to inline. */
5248 for (e = id.dst_node->callees; e; e = e->next_callee)
5249 gcc_assert (e->inline_failed);
5250 }
5251
5252 /* Fold queued statements. */
5253 update_max_bb_count ();
5254 fold_marked_statements (last, id.statements_to_fold);
5255 delete id.statements_to_fold;
5256
5257 gcc_assert (!id.debug_stmts.exists ());
5258
5259 /* If we didn't inline into the function there is nothing to do. */
5260 if (!inlined_p)
5261 return 0;
5262
5263 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5264 number_blocks (fn);
5265
5266 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5267
5268 if (flag_checking)
5269 id.dst_node->verify ();
5270
5271 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5272 not possible yet - the IPA passes might make various functions to not
5273 throw and they don't care to proactively update local EH info. This is
5274 done later in fixup_cfg pass that also execute the verification. */
5275 return (TODO_update_ssa
5276 | TODO_cleanup_cfg
5277 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5278 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5279 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5280 ? TODO_rebuild_frequencies : 0));
5281 }
5282
5283 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5284
5285 tree
5286 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5287 {
5288 enum tree_code code = TREE_CODE (*tp);
5289 enum tree_code_class cl = TREE_CODE_CLASS (code);
5290
5291 /* We make copies of most nodes. */
5292 if (IS_EXPR_CODE_CLASS (cl)
5293 || code == TREE_LIST
5294 || code == TREE_VEC
5295 || code == TYPE_DECL
5296 || code == OMP_CLAUSE)
5297 {
5298 /* Because the chain gets clobbered when we make a copy, we save it
5299 here. */
5300 tree chain = NULL_TREE, new_tree;
5301
5302 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5303 chain = TREE_CHAIN (*tp);
5304
5305 /* Copy the node. */
5306 new_tree = copy_node (*tp);
5307
5308 *tp = new_tree;
5309
5310 /* Now, restore the chain, if appropriate. That will cause
5311 walk_tree to walk into the chain as well. */
5312 if (code == PARM_DECL
5313 || code == TREE_LIST
5314 || code == OMP_CLAUSE)
5315 TREE_CHAIN (*tp) = chain;
5316
5317 /* For now, we don't update BLOCKs when we make copies. So, we
5318 have to nullify all BIND_EXPRs. */
5319 if (TREE_CODE (*tp) == BIND_EXPR)
5320 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5321 }
5322 else if (code == CONSTRUCTOR)
5323 {
5324 /* CONSTRUCTOR nodes need special handling because
5325 we need to duplicate the vector of elements. */
5326 tree new_tree;
5327
5328 new_tree = copy_node (*tp);
5329 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5330 *tp = new_tree;
5331 }
5332 else if (code == STATEMENT_LIST)
5333 /* We used to just abort on STATEMENT_LIST, but we can run into them
5334 with statement-expressions (c++/40975). */
5335 copy_statement_list (tp);
5336 else if (TREE_CODE_CLASS (code) == tcc_type)
5337 *walk_subtrees = 0;
5338 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5339 *walk_subtrees = 0;
5340 else if (TREE_CODE_CLASS (code) == tcc_constant)
5341 *walk_subtrees = 0;
5342 return NULL_TREE;
5343 }
5344
5345 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5346 information indicating to what new SAVE_EXPR this one should be mapped,
5347 use that one. Otherwise, create a new node and enter it in ST. FN is
5348 the function into which the copy will be placed. */
5349
5350 static void
5351 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5352 {
5353 tree *n;
5354 tree t;
5355
5356 /* See if we already encountered this SAVE_EXPR. */
5357 n = st->get (*tp);
5358
5359 /* If we didn't already remap this SAVE_EXPR, do so now. */
5360 if (!n)
5361 {
5362 t = copy_node (*tp);
5363
5364 /* Remember this SAVE_EXPR. */
5365 st->put (*tp, t);
5366 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5367 st->put (t, t);
5368 }
5369 else
5370 {
5371 /* We've already walked into this SAVE_EXPR; don't do it again. */
5372 *walk_subtrees = 0;
5373 t = *n;
5374 }
5375
5376 /* Replace this SAVE_EXPR with the copy. */
5377 *tp = t;
5378 }
5379
5380 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5381 label, copies the declaration and enters it in the splay_tree in DATA (which
5382 is really a 'copy_body_data *'. */
5383
5384 static tree
5385 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5386 bool *handled_ops_p ATTRIBUTE_UNUSED,
5387 struct walk_stmt_info *wi)
5388 {
5389 copy_body_data *id = (copy_body_data *) wi->info;
5390 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5391
5392 if (stmt)
5393 {
5394 tree decl = gimple_label_label (stmt);
5395
5396 /* Copy the decl and remember the copy. */
5397 insert_decl_map (id, decl, id->copy_decl (decl, id));
5398 }
5399
5400 return NULL_TREE;
5401 }
5402
5403 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5404 struct walk_stmt_info *wi);
5405
5406 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5407 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5408 remaps all local declarations to appropriate replacements in gimple
5409 operands. */
5410
5411 static tree
5412 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5413 {
5414 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5415 copy_body_data *id = (copy_body_data *) wi->info;
5416 hash_map<tree, tree> *st = id->decl_map;
5417 tree *n;
5418 tree expr = *tp;
5419
5420 /* For recursive invocations this is no longer the LHS itself. */
5421 bool is_lhs = wi->is_lhs;
5422 wi->is_lhs = false;
5423
5424 if (TREE_CODE (expr) == SSA_NAME)
5425 {
5426 *tp = remap_ssa_name (*tp, id);
5427 *walk_subtrees = 0;
5428 if (is_lhs)
5429 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5430 }
5431 /* Only a local declaration (variable or label). */
5432 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5433 || TREE_CODE (expr) == LABEL_DECL)
5434 {
5435 /* Lookup the declaration. */
5436 n = st->get (expr);
5437
5438 /* If it's there, remap it. */
5439 if (n)
5440 *tp = *n;
5441 *walk_subtrees = 0;
5442 }
5443 else if (TREE_CODE (expr) == STATEMENT_LIST
5444 || TREE_CODE (expr) == BIND_EXPR
5445 || TREE_CODE (expr) == SAVE_EXPR)
5446 gcc_unreachable ();
5447 else if (TREE_CODE (expr) == TARGET_EXPR)
5448 {
5449 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5450 It's OK for this to happen if it was part of a subtree that
5451 isn't immediately expanded, such as operand 2 of another
5452 TARGET_EXPR. */
5453 if (!TREE_OPERAND (expr, 1))
5454 {
5455 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5456 TREE_OPERAND (expr, 3) = NULL_TREE;
5457 }
5458 }
5459 else if (TREE_CODE (expr) == OMP_CLAUSE)
5460 {
5461 /* Before the omplower pass completes, some OMP clauses can contain
5462 sequences that are neither copied by gimple_seq_copy nor walked by
5463 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5464 in those situations, we have to copy and process them explicitely. */
5465
5466 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5467 {
5468 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5469 seq = duplicate_remap_omp_clause_seq (seq, wi);
5470 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5471 }
5472 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5473 {
5474 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5475 seq = duplicate_remap_omp_clause_seq (seq, wi);
5476 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5477 }
5478 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5479 {
5480 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5481 seq = duplicate_remap_omp_clause_seq (seq, wi);
5482 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5483 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5484 seq = duplicate_remap_omp_clause_seq (seq, wi);
5485 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5486 }
5487 }
5488
5489 /* Keep iterating. */
5490 return NULL_TREE;
5491 }
5492
5493
5494 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5495 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5496 remaps all local declarations to appropriate replacements in gimple
5497 statements. */
5498
5499 static tree
5500 replace_locals_stmt (gimple_stmt_iterator *gsip,
5501 bool *handled_ops_p ATTRIBUTE_UNUSED,
5502 struct walk_stmt_info *wi)
5503 {
5504 copy_body_data *id = (copy_body_data *) wi->info;
5505 gimple *gs = gsi_stmt (*gsip);
5506
5507 if (gbind *stmt = dyn_cast <gbind *> (gs))
5508 {
5509 tree block = gimple_bind_block (stmt);
5510
5511 if (block)
5512 {
5513 remap_block (&block, id);
5514 gimple_bind_set_block (stmt, block);
5515 }
5516
5517 /* This will remap a lot of the same decls again, but this should be
5518 harmless. */
5519 if (gimple_bind_vars (stmt))
5520 {
5521 tree old_var, decls = gimple_bind_vars (stmt);
5522
5523 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5524 if (!can_be_nonlocal (old_var, id)
5525 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5526 remap_decl (old_var, id);
5527
5528 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5529 id->prevent_decl_creation_for_types = true;
5530 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5531 id->prevent_decl_creation_for_types = false;
5532 }
5533 }
5534
5535 /* Keep iterating. */
5536 return NULL_TREE;
5537 }
5538
5539 /* Create a copy of SEQ and remap all decls in it. */
5540
5541 static gimple_seq
5542 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5543 {
5544 if (!seq)
5545 return NULL;
5546
5547 /* If there are any labels in OMP sequences, they can be only referred to in
5548 the sequence itself and therefore we can do both here. */
5549 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5550 gimple_seq copy = gimple_seq_copy (seq);
5551 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5552 return copy;
5553 }
5554
5555 /* Copies everything in SEQ and replaces variables and labels local to
5556 current_function_decl. */
5557
5558 gimple_seq
5559 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5560 {
5561 copy_body_data id;
5562 struct walk_stmt_info wi;
5563 gimple_seq copy;
5564
5565 /* There's nothing to do for NULL_TREE. */
5566 if (seq == NULL)
5567 return seq;
5568
5569 /* Set up ID. */
5570 memset (&id, 0, sizeof (id));
5571 id.src_fn = current_function_decl;
5572 id.dst_fn = current_function_decl;
5573 id.src_cfun = cfun;
5574 id.decl_map = new hash_map<tree, tree>;
5575 id.debug_map = NULL;
5576
5577 id.copy_decl = copy_decl_no_change;
5578 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5579 id.transform_new_cfg = false;
5580 id.transform_return_to_modify = false;
5581 id.transform_parameter = false;
5582 id.transform_lang_insert_block = NULL;
5583
5584 /* Walk the tree once to find local labels. */
5585 memset (&wi, 0, sizeof (wi));
5586 hash_set<tree> visited;
5587 wi.info = &id;
5588 wi.pset = &visited;
5589 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5590
5591 copy = gimple_seq_copy (seq);
5592
5593 /* Walk the copy, remapping decls. */
5594 memset (&wi, 0, sizeof (wi));
5595 wi.info = &id;
5596 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5597
5598 /* Clean up. */
5599 delete id.decl_map;
5600 if (id.debug_map)
5601 delete id.debug_map;
5602 if (id.dependence_map)
5603 {
5604 delete id.dependence_map;
5605 id.dependence_map = NULL;
5606 }
5607
5608 return copy;
5609 }
5610
5611
5612 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5613
5614 static tree
5615 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5616 {
5617 if (*tp == data)
5618 return (tree) data;
5619 else
5620 return NULL;
5621 }
5622
5623 DEBUG_FUNCTION bool
5624 debug_find_tree (tree top, tree search)
5625 {
5626 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5627 }
5628
5629
5630 /* Declare the variables created by the inliner. Add all the variables in
5631 VARS to BIND_EXPR. */
5632
5633 static void
5634 declare_inline_vars (tree block, tree vars)
5635 {
5636 tree t;
5637 for (t = vars; t; t = DECL_CHAIN (t))
5638 {
5639 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5640 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5641 add_local_decl (cfun, t);
5642 }
5643
5644 if (block)
5645 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5646 }
5647
5648 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5649 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5650 VAR_DECL translation. */
5651
5652 tree
5653 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5654 {
5655 /* Don't generate debug information for the copy if we wouldn't have
5656 generated it for the copy either. */
5657 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5658 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5659
5660 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5661 declaration inspired this copy. */
5662 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5663
5664 /* The new variable/label has no RTL, yet. */
5665 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5666 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5667 SET_DECL_RTL (copy, 0);
5668 /* For vector typed decls make sure to update DECL_MODE according
5669 to the new function context. */
5670 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5671 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5672
5673 /* These args would always appear unused, if not for this. */
5674 TREE_USED (copy) = 1;
5675
5676 /* Set the context for the new declaration. */
5677 if (!DECL_CONTEXT (decl))
5678 /* Globals stay global. */
5679 ;
5680 else if (DECL_CONTEXT (decl) != id->src_fn)
5681 /* Things that weren't in the scope of the function we're inlining
5682 from aren't in the scope we're inlining to, either. */
5683 ;
5684 else if (TREE_STATIC (decl))
5685 /* Function-scoped static variables should stay in the original
5686 function. */
5687 ;
5688 else
5689 {
5690 /* Ordinary automatic local variables are now in the scope of the
5691 new function. */
5692 DECL_CONTEXT (copy) = id->dst_fn;
5693 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5694 {
5695 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5696 DECL_ATTRIBUTES (copy)
5697 = tree_cons (get_identifier ("omp simt private"), NULL,
5698 DECL_ATTRIBUTES (copy));
5699 id->dst_simt_vars->safe_push (copy);
5700 }
5701 }
5702
5703 return copy;
5704 }
5705
5706 static tree
5707 copy_decl_to_var (tree decl, copy_body_data *id)
5708 {
5709 tree copy, type;
5710
5711 gcc_assert (TREE_CODE (decl) == PARM_DECL
5712 || TREE_CODE (decl) == RESULT_DECL);
5713
5714 type = TREE_TYPE (decl);
5715
5716 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5717 VAR_DECL, DECL_NAME (decl), type);
5718 if (DECL_PT_UID_SET_P (decl))
5719 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5720 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5721 TREE_READONLY (copy) = TREE_READONLY (decl);
5722 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5723 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5724
5725 return copy_decl_for_dup_finish (id, decl, copy);
5726 }
5727
5728 /* Like copy_decl_to_var, but create a return slot object instead of a
5729 pointer variable for return by invisible reference. */
5730
5731 static tree
5732 copy_result_decl_to_var (tree decl, copy_body_data *id)
5733 {
5734 tree copy, type;
5735
5736 gcc_assert (TREE_CODE (decl) == PARM_DECL
5737 || TREE_CODE (decl) == RESULT_DECL);
5738
5739 type = TREE_TYPE (decl);
5740 if (DECL_BY_REFERENCE (decl))
5741 type = TREE_TYPE (type);
5742
5743 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5744 VAR_DECL, DECL_NAME (decl), type);
5745 if (DECL_PT_UID_SET_P (decl))
5746 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5747 TREE_READONLY (copy) = TREE_READONLY (decl);
5748 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5749 if (!DECL_BY_REFERENCE (decl))
5750 {
5751 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5752 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5753 }
5754
5755 return copy_decl_for_dup_finish (id, decl, copy);
5756 }
5757
5758 tree
5759 copy_decl_no_change (tree decl, copy_body_data *id)
5760 {
5761 tree copy;
5762
5763 copy = copy_node (decl);
5764
5765 /* The COPY is not abstract; it will be generated in DST_FN. */
5766 DECL_ABSTRACT_P (copy) = false;
5767 lang_hooks.dup_lang_specific_decl (copy);
5768
5769 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5770 been taken; it's for internal bookkeeping in expand_goto_internal. */
5771 if (TREE_CODE (copy) == LABEL_DECL)
5772 {
5773 TREE_ADDRESSABLE (copy) = 0;
5774 LABEL_DECL_UID (copy) = -1;
5775 }
5776
5777 return copy_decl_for_dup_finish (id, decl, copy);
5778 }
5779
5780 static tree
5781 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5782 {
5783 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5784 return copy_decl_to_var (decl, id);
5785 else
5786 return copy_decl_no_change (decl, id);
5787 }
5788
5789 /* Return a copy of the function's argument tree. */
5790 static tree
5791 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5792 bitmap args_to_skip, tree *vars)
5793 {
5794 tree arg, *parg;
5795 tree new_parm = NULL;
5796 int i = 0;
5797
5798 parg = &new_parm;
5799
5800 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5801 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5802 {
5803 tree new_tree = remap_decl (arg, id);
5804 if (TREE_CODE (new_tree) != PARM_DECL)
5805 new_tree = id->copy_decl (arg, id);
5806 lang_hooks.dup_lang_specific_decl (new_tree);
5807 *parg = new_tree;
5808 parg = &DECL_CHAIN (new_tree);
5809 }
5810 else if (!id->decl_map->get (arg))
5811 {
5812 /* Make an equivalent VAR_DECL. If the argument was used
5813 as temporary variable later in function, the uses will be
5814 replaced by local variable. */
5815 tree var = copy_decl_to_var (arg, id);
5816 insert_decl_map (id, arg, var);
5817 /* Declare this new variable. */
5818 DECL_CHAIN (var) = *vars;
5819 *vars = var;
5820 }
5821 return new_parm;
5822 }
5823
5824 /* Return a copy of the function's static chain. */
5825 static tree
5826 copy_static_chain (tree static_chain, copy_body_data * id)
5827 {
5828 tree *chain_copy, *pvar;
5829
5830 chain_copy = &static_chain;
5831 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5832 {
5833 tree new_tree = remap_decl (*pvar, id);
5834 lang_hooks.dup_lang_specific_decl (new_tree);
5835 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5836 *pvar = new_tree;
5837 }
5838 return static_chain;
5839 }
5840
5841 /* Return true if the function is allowed to be versioned.
5842 This is a guard for the versioning functionality. */
5843
5844 bool
5845 tree_versionable_function_p (tree fndecl)
5846 {
5847 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5848 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5849 }
5850
5851 /* Update clone info after duplication. */
5852
5853 static void
5854 update_clone_info (copy_body_data * id)
5855 {
5856 struct cgraph_node *node;
5857 if (!id->dst_node->clones)
5858 return;
5859 for (node = id->dst_node->clones; node != id->dst_node;)
5860 {
5861 /* First update replace maps to match the new body. */
5862 if (node->clone.tree_map)
5863 {
5864 unsigned int i;
5865 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5866 {
5867 struct ipa_replace_map *replace_info;
5868 replace_info = (*node->clone.tree_map)[i];
5869 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5870 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5871 }
5872 }
5873 if (node->clones)
5874 node = node->clones;
5875 else if (node->next_sibling_clone)
5876 node = node->next_sibling_clone;
5877 else
5878 {
5879 while (node != id->dst_node && !node->next_sibling_clone)
5880 node = node->clone_of;
5881 if (node != id->dst_node)
5882 node = node->next_sibling_clone;
5883 }
5884 }
5885 }
5886
5887 /* Create a copy of a function's tree.
5888 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5889 of the original function and the new copied function
5890 respectively. In case we want to replace a DECL
5891 tree with another tree while duplicating the function's
5892 body, TREE_MAP represents the mapping between these
5893 trees. If UPDATE_CLONES is set, the call_stmt fields
5894 of edges of clones of the function will be updated.
5895
5896 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5897 from new version.
5898 If SKIP_RETURN is true, the new version will return void.
5899 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5900 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5901 */
5902 void
5903 tree_function_versioning (tree old_decl, tree new_decl,
5904 vec<ipa_replace_map *, va_gc> *tree_map,
5905 bool update_clones, bitmap args_to_skip,
5906 bool skip_return, bitmap blocks_to_copy,
5907 basic_block new_entry)
5908 {
5909 struct cgraph_node *old_version_node;
5910 struct cgraph_node *new_version_node;
5911 copy_body_data id;
5912 tree p;
5913 unsigned i;
5914 struct ipa_replace_map *replace_info;
5915 basic_block old_entry_block, bb;
5916 auto_vec<gimple *, 10> init_stmts;
5917 tree vars = NULL_TREE;
5918 bitmap debug_args_to_skip = args_to_skip;
5919
5920 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5921 && TREE_CODE (new_decl) == FUNCTION_DECL);
5922 DECL_POSSIBLY_INLINED (old_decl) = 1;
5923
5924 old_version_node = cgraph_node::get (old_decl);
5925 gcc_checking_assert (old_version_node);
5926 new_version_node = cgraph_node::get (new_decl);
5927 gcc_checking_assert (new_version_node);
5928
5929 /* Copy over debug args. */
5930 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5931 {
5932 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5933 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5934 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5935 old_debug_args = decl_debug_args_lookup (old_decl);
5936 if (old_debug_args)
5937 {
5938 new_debug_args = decl_debug_args_insert (new_decl);
5939 *new_debug_args = vec_safe_copy (*old_debug_args);
5940 }
5941 }
5942
5943 /* Output the inlining info for this abstract function, since it has been
5944 inlined. If we don't do this now, we can lose the information about the
5945 variables in the function when the blocks get blown away as soon as we
5946 remove the cgraph node. */
5947 (*debug_hooks->outlining_inline_function) (old_decl);
5948
5949 DECL_ARTIFICIAL (new_decl) = 1;
5950 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5951 if (DECL_ORIGIN (old_decl) == old_decl)
5952 old_version_node->used_as_abstract_origin = true;
5953 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5954
5955 /* Prepare the data structures for the tree copy. */
5956 memset (&id, 0, sizeof (id));
5957
5958 /* Generate a new name for the new version. */
5959 id.statements_to_fold = new hash_set<gimple *>;
5960
5961 id.decl_map = new hash_map<tree, tree>;
5962 id.debug_map = NULL;
5963 id.src_fn = old_decl;
5964 id.dst_fn = new_decl;
5965 id.src_node = old_version_node;
5966 id.dst_node = new_version_node;
5967 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5968 id.blocks_to_copy = blocks_to_copy;
5969
5970 id.copy_decl = copy_decl_no_change;
5971 id.transform_call_graph_edges
5972 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5973 id.transform_new_cfg = true;
5974 id.transform_return_to_modify = false;
5975 id.transform_parameter = false;
5976 id.transform_lang_insert_block = NULL;
5977
5978 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5979 (DECL_STRUCT_FUNCTION (old_decl));
5980 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5981 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5982 initialize_cfun (new_decl, old_decl,
5983 new_entry ? new_entry->count : old_entry_block->count);
5984 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5985 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5986 = id.src_cfun->gimple_df->ipa_pta;
5987
5988 /* Copy the function's static chain. */
5989 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5990 if (p)
5991 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5992 = copy_static_chain (p, &id);
5993
5994 /* If there's a tree_map, prepare for substitution. */
5995 if (tree_map)
5996 for (i = 0; i < tree_map->length (); i++)
5997 {
5998 gimple *init;
5999 replace_info = (*tree_map)[i];
6000 if (replace_info->replace_p)
6001 {
6002 int parm_num = -1;
6003 if (!replace_info->old_tree)
6004 {
6005 int p = replace_info->parm_num;
6006 tree parm;
6007 tree req_type, new_type;
6008
6009 for (parm = DECL_ARGUMENTS (old_decl); p;
6010 parm = DECL_CHAIN (parm))
6011 p--;
6012 replace_info->old_tree = parm;
6013 parm_num = replace_info->parm_num;
6014 req_type = TREE_TYPE (parm);
6015 new_type = TREE_TYPE (replace_info->new_tree);
6016 if (!useless_type_conversion_p (req_type, new_type))
6017 {
6018 if (fold_convertible_p (req_type, replace_info->new_tree))
6019 replace_info->new_tree
6020 = fold_build1 (NOP_EXPR, req_type,
6021 replace_info->new_tree);
6022 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
6023 replace_info->new_tree
6024 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
6025 replace_info->new_tree);
6026 else
6027 {
6028 if (dump_file)
6029 {
6030 fprintf (dump_file, " const ");
6031 print_generic_expr (dump_file,
6032 replace_info->new_tree);
6033 fprintf (dump_file,
6034 " can't be converted to param ");
6035 print_generic_expr (dump_file, parm);
6036 fprintf (dump_file, "\n");
6037 }
6038 replace_info->old_tree = NULL;
6039 }
6040 }
6041 }
6042 else
6043 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
6044 if (replace_info->old_tree)
6045 {
6046 init = setup_one_parameter (&id, replace_info->old_tree,
6047 replace_info->new_tree, id.src_fn,
6048 NULL,
6049 &vars);
6050 if (init)
6051 init_stmts.safe_push (init);
6052 if (MAY_HAVE_DEBUG_BIND_STMTS && args_to_skip)
6053 {
6054 if (parm_num == -1)
6055 {
6056 tree parm;
6057 int p;
6058 for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
6059 parm = DECL_CHAIN (parm), p++)
6060 if (parm == replace_info->old_tree)
6061 {
6062 parm_num = p;
6063 break;
6064 }
6065 }
6066 if (parm_num != -1)
6067 {
6068 if (debug_args_to_skip == args_to_skip)
6069 {
6070 debug_args_to_skip = BITMAP_ALLOC (NULL);
6071 bitmap_copy (debug_args_to_skip, args_to_skip);
6072 }
6073 bitmap_clear_bit (debug_args_to_skip, parm_num);
6074 }
6075 }
6076 }
6077 }
6078 }
6079 /* Copy the function's arguments. */
6080 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6081 DECL_ARGUMENTS (new_decl)
6082 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
6083 args_to_skip, &vars);
6084
6085 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6086 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6087
6088 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6089
6090 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6091 /* Add local vars. */
6092 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6093
6094 if (DECL_RESULT (old_decl) == NULL_TREE)
6095 ;
6096 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6097 {
6098 DECL_RESULT (new_decl)
6099 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6100 RESULT_DECL, NULL_TREE, void_type_node);
6101 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6102 cfun->returns_struct = 0;
6103 cfun->returns_pcc_struct = 0;
6104 }
6105 else
6106 {
6107 tree old_name;
6108 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6109 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6110 if (gimple_in_ssa_p (id.src_cfun)
6111 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6112 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6113 {
6114 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6115 insert_decl_map (&id, old_name, new_name);
6116 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6117 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6118 }
6119 }
6120
6121 /* Set up the destination functions loop tree. */
6122 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6123 {
6124 cfun->curr_properties &= ~PROP_loops;
6125 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6126 cfun->curr_properties |= PROP_loops;
6127 }
6128
6129 /* Copy the Function's body. */
6130 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6131 new_entry);
6132
6133 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6134 number_blocks (new_decl);
6135
6136 /* We want to create the BB unconditionally, so that the addition of
6137 debug stmts doesn't affect BB count, which may in the end cause
6138 codegen differences. */
6139 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6140 while (init_stmts.length ())
6141 insert_init_stmt (&id, bb, init_stmts.pop ());
6142 update_clone_info (&id);
6143
6144 /* Remap the nonlocal_goto_save_area, if any. */
6145 if (cfun->nonlocal_goto_save_area)
6146 {
6147 struct walk_stmt_info wi;
6148
6149 memset (&wi, 0, sizeof (wi));
6150 wi.info = &id;
6151 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6152 }
6153
6154 /* Clean up. */
6155 delete id.decl_map;
6156 if (id.debug_map)
6157 delete id.debug_map;
6158 free_dominance_info (CDI_DOMINATORS);
6159 free_dominance_info (CDI_POST_DOMINATORS);
6160
6161 update_max_bb_count ();
6162 fold_marked_statements (0, id.statements_to_fold);
6163 delete id.statements_to_fold;
6164 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6165 if (id.dst_node->definition)
6166 cgraph_edge::rebuild_references ();
6167 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6168 {
6169 calculate_dominance_info (CDI_DOMINATORS);
6170 fix_loop_structure (NULL);
6171 }
6172 update_ssa (TODO_update_ssa);
6173
6174 /* After partial cloning we need to rescale frequencies, so they are
6175 within proper range in the cloned function. */
6176 if (new_entry)
6177 {
6178 struct cgraph_edge *e;
6179 rebuild_frequencies ();
6180
6181 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6182 for (e = new_version_node->callees; e; e = e->next_callee)
6183 {
6184 basic_block bb = gimple_bb (e->call_stmt);
6185 e->count = bb->count;
6186 }
6187 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6188 {
6189 basic_block bb = gimple_bb (e->call_stmt);
6190 e->count = bb->count;
6191 }
6192 }
6193
6194 if (debug_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
6195 {
6196 tree parm;
6197 vec<tree, va_gc> **debug_args = NULL;
6198 unsigned int len = 0;
6199 for (parm = DECL_ARGUMENTS (old_decl), i = 0;
6200 parm; parm = DECL_CHAIN (parm), i++)
6201 if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
6202 {
6203 tree ddecl;
6204
6205 if (debug_args == NULL)
6206 {
6207 debug_args = decl_debug_args_insert (new_decl);
6208 len = vec_safe_length (*debug_args);
6209 }
6210 ddecl = make_node (DEBUG_EXPR_DECL);
6211 DECL_ARTIFICIAL (ddecl) = 1;
6212 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6213 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6214 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6215 vec_safe_push (*debug_args, ddecl);
6216 }
6217 if (debug_args != NULL)
6218 {
6219 /* On the callee side, add
6220 DEBUG D#Y s=> parm
6221 DEBUG var => D#Y
6222 stmts to the first bb where var is a VAR_DECL created for the
6223 optimized away parameter in DECL_INITIAL block. This hints
6224 in the debug info that var (whole DECL_ORIGIN is the parm
6225 PARM_DECL) is optimized away, but could be looked up at the
6226 call site as value of D#X there. */
6227 tree var = vars, vexpr;
6228 gimple_stmt_iterator cgsi
6229 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6230 gimple *def_temp;
6231 var = vars;
6232 i = vec_safe_length (*debug_args);
6233 do
6234 {
6235 i -= 2;
6236 while (var != NULL_TREE
6237 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6238 var = TREE_CHAIN (var);
6239 if (var == NULL_TREE)
6240 break;
6241 vexpr = make_node (DEBUG_EXPR_DECL);
6242 parm = (**debug_args)[i];
6243 DECL_ARTIFICIAL (vexpr) = 1;
6244 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6245 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6246 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6247 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6248 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6249 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6250 }
6251 while (i > len);
6252 }
6253 }
6254
6255 if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6256 BITMAP_FREE (debug_args_to_skip);
6257 free_dominance_info (CDI_DOMINATORS);
6258 free_dominance_info (CDI_POST_DOMINATORS);
6259
6260 gcc_assert (!id.debug_stmts.exists ());
6261 pop_cfun ();
6262 return;
6263 }
6264
6265 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6266 the callee and return the inlined body on success. */
6267
6268 tree
6269 maybe_inline_call_in_expr (tree exp)
6270 {
6271 tree fn = get_callee_fndecl (exp);
6272
6273 /* We can only try to inline "const" functions. */
6274 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6275 {
6276 call_expr_arg_iterator iter;
6277 copy_body_data id;
6278 tree param, arg, t;
6279 hash_map<tree, tree> decl_map;
6280
6281 /* Remap the parameters. */
6282 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6283 param;
6284 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6285 decl_map.put (param, arg);
6286
6287 memset (&id, 0, sizeof (id));
6288 id.src_fn = fn;
6289 id.dst_fn = current_function_decl;
6290 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6291 id.decl_map = &decl_map;
6292
6293 id.copy_decl = copy_decl_no_change;
6294 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6295 id.transform_new_cfg = false;
6296 id.transform_return_to_modify = true;
6297 id.transform_parameter = true;
6298 id.transform_lang_insert_block = NULL;
6299
6300 /* Make sure not to unshare trees behind the front-end's back
6301 since front-end specific mechanisms may rely on sharing. */
6302 id.regimplify = false;
6303 id.do_not_unshare = true;
6304
6305 /* We're not inside any EH region. */
6306 id.eh_lp_nr = 0;
6307
6308 t = copy_tree_body (&id);
6309
6310 /* We can only return something suitable for use in a GENERIC
6311 expression tree. */
6312 if (TREE_CODE (t) == MODIFY_EXPR)
6313 return TREE_OPERAND (t, 1);
6314 }
6315
6316 return NULL_TREE;
6317 }
6318
6319 /* Duplicate a type, fields and all. */
6320
6321 tree
6322 build_duplicate_type (tree type)
6323 {
6324 struct copy_body_data id;
6325
6326 memset (&id, 0, sizeof (id));
6327 id.src_fn = current_function_decl;
6328 id.dst_fn = current_function_decl;
6329 id.src_cfun = cfun;
6330 id.decl_map = new hash_map<tree, tree>;
6331 id.debug_map = NULL;
6332 id.copy_decl = copy_decl_no_change;
6333
6334 type = remap_type_1 (type, &id);
6335
6336 delete id.decl_map;
6337 if (id.debug_map)
6338 delete id.debug_map;
6339
6340 TYPE_CANONICAL (type) = type;
6341
6342 return type;
6343 }
6344
6345 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6346 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6347 evaluation. */
6348
6349 tree
6350 copy_fn (tree fn, tree& parms, tree& result)
6351 {
6352 copy_body_data id;
6353 tree param;
6354 hash_map<tree, tree> decl_map;
6355
6356 tree *p = &parms;
6357 *p = NULL_TREE;
6358
6359 memset (&id, 0, sizeof (id));
6360 id.src_fn = fn;
6361 id.dst_fn = current_function_decl;
6362 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6363 id.decl_map = &decl_map;
6364
6365 id.copy_decl = copy_decl_no_change;
6366 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6367 id.transform_new_cfg = false;
6368 id.transform_return_to_modify = false;
6369 id.transform_parameter = true;
6370 id.transform_lang_insert_block = NULL;
6371
6372 /* Make sure not to unshare trees behind the front-end's back
6373 since front-end specific mechanisms may rely on sharing. */
6374 id.regimplify = false;
6375 id.do_not_unshare = true;
6376 id.do_not_fold = true;
6377
6378 /* We're not inside any EH region. */
6379 id.eh_lp_nr = 0;
6380
6381 /* Remap the parameters and result and return them to the caller. */
6382 for (param = DECL_ARGUMENTS (fn);
6383 param;
6384 param = DECL_CHAIN (param))
6385 {
6386 *p = remap_decl (param, &id);
6387 p = &DECL_CHAIN (*p);
6388 }
6389
6390 if (DECL_RESULT (fn))
6391 result = remap_decl (DECL_RESULT (fn), &id);
6392 else
6393 result = NULL_TREE;
6394
6395 return copy_tree_body (&id);
6396 }