]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-inline-transform.c
ipa-icf.c (sem_function::merge): Update function summaries.
[thirdparty/gcc.git] / gcc / ipa-inline-transform.c
CommitLineData
fee8b6da 1/* Callgraph transformations to handle inlining
a5544970 2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
fee8b6da
JH
3 Contributed by Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21/* The inline decisions are stored in callgraph in "inline plan" and
22 applied later.
23
24 To mark given call inline, use inline_call function.
25 The function marks the edge inlinable and, if necessary, produces
26 virtual clone in the callgraph representing the new copy of callee's
27 function body.
28
29 The inline plan is applied on given function body by inline_transform. */
30
31#include "config.h"
32#include "system.h"
33#include "coretypes.h"
34#include "tm.h"
957060b5 35#include "function.h"
fee8b6da 36#include "tree.h"
957060b5
AM
37#include "alloc-pool.h"
38#include "tree-pass.h"
39#include "cgraph.h"
442b4905 40#include "tree-cfg.h"
dd912cb8 41#include "symbol-summary.h"
8bc5448f 42#include "tree-vrp.h"
fee8b6da 43#include "ipa-prop.h"
27d020cf 44#include "ipa-fnsummary.h"
fee8b6da
JH
45#include "ipa-inline.h"
46#include "tree-inline.h"
09fcc0c0
JH
47#include "function.h"
48#include "cfg.h"
49#include "basic-block.h"
fee8b6da
JH
50
51int ncalls_inlined;
52int nfunctions_inlined;
53
1bad9c18 54/* Scale counts of NODE edges by NUM/DEN. */
fee8b6da
JH
55
56static void
1bad9c18
JH
57update_noncloned_counts (struct cgraph_node *node,
58 profile_count num, profile_count den)
fee8b6da
JH
59{
60 struct cgraph_edge *e;
8e7d1486 61
e4373d41 62 profile_count::adjust_for_ipa_scaling (&num, &den);
fee8b6da 63
fee8b6da
JH
64 for (e = node->callees; e; e = e->next_callee)
65 {
fee8b6da 66 if (!e->inline_failed)
1bad9c18 67 update_noncloned_counts (e->callee, num, den);
8e7d1486 68 e->count = e->count.apply_scale (num, den);
898b8927
JH
69 }
70 for (e = node->indirect_calls; e; e = e->next_callee)
1bad9c18 71 e->count = e->count.apply_scale (num, den);
8e7d1486 72 node->count = node->count.apply_scale (num, den);
fee8b6da
JH
73}
74
a5b1779f
JH
75/* We removed or are going to remove the last call to NODE.
76 Return true if we can and want proactively remove the NODE now.
77 This is important to do, since we want inliner to know when offline
78 copy of function was removed. */
79
80static bool
d142079a 81can_remove_node_now_p_1 (struct cgraph_node *node, struct cgraph_edge *e)
a5b1779f 82{
d142079a
JH
83 ipa_ref *ref;
84
85 FOR_EACH_ALIAS (node, ref)
86 {
87 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
88 if ((alias->callers && alias->callers != e)
89 || !can_remove_node_now_p_1 (alias, e))
90 return false;
91 }
a5b1779f
JH
92 /* FIXME: When address is taken of DECL_EXTERNAL function we still
93 can remove its offline copy, but we would need to keep unanalyzed node in
a6a543bf
JH
94 the callgraph so references can point to it.
95
96 Also for comdat group we can ignore references inside a group as we
97 want to prove the group as a whole to be dead. */
67348ccc 98 return (!node->address_taken
a6a543bf 99 && node->can_remove_if_no_direct_calls_and_refs_p ()
a5b1779f
JH
100 /* Inlining might enable more devirtualizing, so we want to remove
101 those only after all devirtualizable virtual calls are processed.
102 Lacking may edges in callgraph we just preserve them post
103 inlining. */
d142079a
JH
104 && (!DECL_VIRTUAL_P (node->decl)
105 || !opt_for_fn (node->decl, flag_devirtualize))
a5b1779f
JH
106 /* During early inlining some unanalyzed cgraph nodes might be in the
107 callgraph and they might reffer the function in question. */
31acf1bb 108 && !cgraph_new_nodes.exists ());
a5b1779f
JH
109}
110
6c69a029
JH
111/* We are going to eliminate last direct call to NODE (or alias of it) via edge E.
112 Verify that the NODE can be removed from unit and if it is contained in comdat
113 group that the whole comdat group is removable. */
114
115static bool
116can_remove_node_now_p (struct cgraph_node *node, struct cgraph_edge *e)
117{
118 struct cgraph_node *next;
d142079a 119 if (!can_remove_node_now_p_1 (node, e))
6c69a029
JH
120 return false;
121
122 /* When we see same comdat group, we need to be sure that all
123 items can be removed. */
8ccc8042 124 if (!node->same_comdat_group || !node->externally_visible)
6c69a029 125 return true;
d52f5295
ML
126 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
127 next != node; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
d142079a
JH
128 {
129 if (next->alias)
130 continue;
131 if ((next->callers && next->callers != e)
132 || !can_remove_node_now_p_1 (next, e))
133 return false;
134 }
6c69a029
JH
135 return true;
136}
137
d83fa499
EB
138/* Return true if NODE is a master clone with non-inline clones. */
139
140static bool
141master_clone_with_noninline_clones_p (struct cgraph_node *node)
142{
143 if (node->clone_of)
144 return false;
145
146 for (struct cgraph_node *n = node->clones; n; n = n->next_sibling_clone)
147 if (n->decl != node->decl)
148 return true;
149
150 return false;
151}
fee8b6da
JH
152
153/* E is expected to be an edge being inlined. Clone destination node of
154 the edge and redirect it to the new clone.
155 DUPLICATE is used for bookkeeping on whether we are actually creating new
156 clones or re-using node originally representing out-of-line function call.
bd936951
JH
157 By default the offline copy is removed, when it appears dead after inlining.
158 UPDATE_ORIGINAL prevents this transformation.
159 If OVERALL_SIZE is non-NULL, the size is updated to reflect the
1bad9c18 160 transformation. */
fee8b6da
JH
161
162void
163clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
1bad9c18 164 bool update_original, int *overall_size)
fee8b6da 165{
44a60244 166 struct cgraph_node *inlining_into;
09ce3660 167 struct cgraph_edge *next;
44a60244
MJ
168
169 if (e->caller->global.inlined_to)
170 inlining_into = e->caller->global.inlined_to;
171 else
172 inlining_into = e->caller;
173
fee8b6da
JH
174 if (duplicate)
175 {
176 /* We may eliminate the need for out-of-line copy to be output.
177 In that case just go ahead and re-use it. This is not just an
178 memory optimization. Making offline copy of fuction disappear
179 from the program will improve future decisions on inlining. */
180 if (!e->callee->callers->next_caller
181 /* Recursive inlining never wants the master clone to
182 be overwritten. */
183 && update_original
d83fa499
EB
184 && can_remove_node_now_p (e->callee, e)
185 /* We cannot overwrite a master clone with non-inline clones
186 until after these clones are materialized. */
187 && !master_clone_with_noninline_clones_p (e->callee))
fee8b6da 188 {
6c69a029
JH
189 /* TODO: When callee is in a comdat group, we could remove all of it,
190 including all inline clones inlined into it. That would however
191 need small function inlining to register edge removal hook to
192 maintain the priority queue.
193
194 For now we keep the ohter functions in the group in program until
195 cgraph_remove_unreachable_functions gets rid of them. */
fee8b6da 196 gcc_assert (!e->callee->global.inlined_to);
b91b562c 197 e->callee->remove_from_same_comdat_group ();
bb1e543c
JH
198 if (e->callee->definition
199 && inline_account_function_p (e->callee))
fee8b6da 200 {
bb1e543c 201 gcc_assert (!e->callee->alias);
fee8b6da 202 if (overall_size)
f658ad30 203 *overall_size -= ipa_size_summaries->get (e->callee)->size;
fee8b6da
JH
204 nfunctions_inlined++;
205 }
206 duplicate = false;
67348ccc 207 e->callee->externally_visible = false;
1bad9c18 208 update_noncloned_counts (e->callee, e->count, e->callee->count);
0bdad123
ML
209
210 dump_callgraph_transformation (e->callee, inlining_into,
211 "inlining to");
fee8b6da
JH
212 }
213 else
214 {
215 struct cgraph_node *n;
bd936951 216
d52f5295 217 n = e->callee->create_clone (e->callee->decl,
1bad9c18 218 e->count,
d52f5295
ML
219 update_original, vNULL, true,
220 inlining_into,
221 NULL);
4ad08ee8 222 n->used_as_abstract_origin = e->callee->used_as_abstract_origin;
3dafb85c 223 e->redirect_callee (n);
fee8b6da
JH
224 }
225 }
65d630d4 226 else
b91b562c 227 e->callee->remove_from_same_comdat_group ();
fee8b6da 228
44a60244 229 e->callee->global.inlined_to = inlining_into;
fee8b6da
JH
230
231 /* Recursively clone all bodies. */
09ce3660
JH
232 for (e = e->callee->callees; e; e = next)
233 {
234 next = e->next_callee;
235 if (!e->inline_failed)
1bad9c18 236 clone_inlined_nodes (e, duplicate, update_original, overall_size);
db66bf68
JH
237 }
238}
239
4517b378
MJ
240/* Check all speculations in N and if any seem useless, resolve them. When a
241 first edge is resolved, pop all edges from NEW_EDGES and insert them to
242 EDGE_SET. Then remove each resolved edge from EDGE_SET, if it is there. */
db66bf68
JH
243
244static bool
4517b378
MJ
245check_speculations_1 (cgraph_node *n, vec<cgraph_edge *> *new_edges,
246 hash_set <cgraph_edge *> *edge_set)
db66bf68
JH
247{
248 bool speculation_removed = false;
249 cgraph_edge *next;
250
251 for (cgraph_edge *e = n->callees; e; e = next)
252 {
253 next = e->next_callee;
09ce3660
JH
254 if (e->speculative && !speculation_useful_p (e, true))
255 {
4517b378
MJ
256 while (new_edges && !new_edges->is_empty ())
257 edge_set->add (new_edges->pop ());
258 edge_set->remove (e);
259
3dafb85c 260 e->resolve_speculation (NULL);
09ce3660
JH
261 speculation_removed = true;
262 }
db66bf68 263 else if (!e->inline_failed)
4517b378
MJ
264 speculation_removed |= check_speculations_1 (e->callee, new_edges,
265 edge_set);
09ce3660 266 }
db66bf68 267 return speculation_removed;
fee8b6da
JH
268}
269
4517b378
MJ
270/* Push E to NEW_EDGES. Called from hash_set traverse method, which
271 unfortunately means this function has to have external linkage, otherwise
272 the code will not compile with gcc 4.8. */
273
274bool
275push_all_edges_in_set_to_vec (cgraph_edge * const &e,
276 vec<cgraph_edge *> *new_edges)
277{
278 new_edges->safe_push (e);
279 return true;
280}
281
282/* Check all speculations in N and if any seem useless, resolve them and remove
283 them from NEW_EDGES. */
284
285static bool
286check_speculations (cgraph_node *n, vec<cgraph_edge *> *new_edges)
287{
288 hash_set <cgraph_edge *> edge_set;
289 bool res = check_speculations_1 (n, new_edges, &edge_set);
290 if (!edge_set.is_empty ())
291 edge_set.traverse <vec<cgraph_edge *> *,
292 push_all_edges_in_set_to_vec> (new_edges);
293 return res;
294}
295
4fd94d1e
MJ
296/* Mark all call graph edges coming out of NODE and all nodes that have been
297 inlined to it as in_polymorphic_cdtor. */
298
299static void
300mark_all_inlined_calls_cdtor (cgraph_node *node)
301{
302 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
303 {
304 cs->in_polymorphic_cdtor = true;
305 if (!cs->inline_failed)
09fcc0c0 306 mark_all_inlined_calls_cdtor (cs->callee);
4fd94d1e
MJ
307 }
308 for (cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
309 cs->in_polymorphic_cdtor = true;
310}
311
fee8b6da
JH
312
313/* Mark edge E as inlined and update callgraph accordingly. UPDATE_ORIGINAL
314 specify whether profile of original function should be updated. If any new
315 indirect edges are discovered in the process, add them to NEW_EDGES, unless
c170d40f
JH
316 it is NULL. If UPDATE_OVERALL_SUMMARY is false, do not bother to recompute overall
317 size of caller after inlining. Caller is required to eventually do it via
0bceb671 318 ipa_update_overall_fn_summary.
1bbb87c4 319 If callee_removed is non-NULL, set it to true if we removed callee node.
c170d40f
JH
320
321 Return true iff any new callgraph edges were discovered as a
fee8b6da
JH
322 result of inlining. */
323
324bool
325inline_call (struct cgraph_edge *e, bool update_original,
d52f5295 326 vec<cgraph_edge *> *new_edges,
1bbb87c4
JH
327 int *overall_size, bool update_overall_summary,
328 bool *callee_removed)
fee8b6da
JH
329{
330 int old_size = 0, new_size = 0;
331 struct cgraph_node *to = NULL;
332 struct cgraph_edge *curr = e;
d52f5295 333 struct cgraph_node *callee = e->callee->ultimate_alias_target ();
0f378cb5
JH
334 bool new_edges_found = false;
335
bddead15
RB
336 int estimated_growth = 0;
337 if (! update_overall_summary)
338 estimated_growth = estimate_edge_growth (e);
f107227b
JH
339 /* This is used only for assert bellow. */
340#if 0
0f378cb5
JH
341 bool predicated = inline_edge_summary (e)->predicate != NULL;
342#endif
fee8b6da
JH
343
344 /* Don't inline inlined edges. */
345 gcc_assert (e->inline_failed);
346 /* Don't even think of inlining inline clone. */
a5b1779f 347 gcc_assert (!callee->global.inlined_to);
fee8b6da 348
632b4f8e
JH
349 to = e->caller;
350 if (to->global.inlined_to)
351 to = to->global.inlined_to;
0b9004ed
JH
352 if (to->thunk.thunk_p)
353 {
c7ed8938 354 struct cgraph_node *target = to->callees->callee;
0b9004ed
JH
355 if (in_lto_p)
356 to->get_untransformed_body ();
357 to->expand_thunk (false, true);
c7ed8938
IE
358 /* When thunk is instrumented we may have multiple callees. */
359 for (e = to->callees; e && e->callee != target; e = e->next_callee)
360 ;
361 gcc_assert (e);
0b9004ed
JH
362 }
363
364
365 e->inline_failed = CIF_OK;
366 DECL_POSSIBLY_INLINED (callee->decl) = true;
632b4f8e 367
5058c037
JH
368 if (DECL_FUNCTION_PERSONALITY (callee->decl))
369 DECL_FUNCTION_PERSONALITY (to->decl)
370 = DECL_FUNCTION_PERSONALITY (callee->decl);
77719b06
ML
371
372 bool reload_optimization_node = false;
45285060
JH
373 if (!opt_for_fn (callee->decl, flag_strict_aliasing)
374 && opt_for_fn (to->decl, flag_strict_aliasing))
375 {
376 struct gcc_options opts = global_options;
377
f7f32acd 378 cl_optimization_restore (&opts, opts_for_fn (to->decl));
45285060
JH
379 opts.x_flag_strict_aliasing = false;
380 if (dump_file)
464d0118
ML
381 fprintf (dump_file, "Dropping flag_strict_aliasing on %s\n",
382 to->dump_name ());
45285060
JH
383 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (to->decl)
384 = build_optimization_node (&opts);
77719b06 385 reload_optimization_node = true;
45285060 386 }
5c846a81 387
56f62793
ML
388 ipa_fn_summary *caller_info = ipa_fn_summaries->get (to);
389 ipa_fn_summary *callee_info = ipa_fn_summaries->get (callee);
818b88a7
JH
390 if (!caller_info->fp_expressions && callee_info->fp_expressions)
391 {
392 caller_info->fp_expressions = true;
393 if (opt_for_fn (callee->decl, flag_rounding_math)
394 != opt_for_fn (to->decl, flag_rounding_math)
395 || opt_for_fn (callee->decl, flag_trapping_math)
396 != opt_for_fn (to->decl, flag_trapping_math)
397 || opt_for_fn (callee->decl, flag_unsafe_math_optimizations)
398 != opt_for_fn (to->decl, flag_unsafe_math_optimizations)
399 || opt_for_fn (callee->decl, flag_finite_math_only)
400 != opt_for_fn (to->decl, flag_finite_math_only)
401 || opt_for_fn (callee->decl, flag_signaling_nans)
402 != opt_for_fn (to->decl, flag_signaling_nans)
403 || opt_for_fn (callee->decl, flag_cx_limited_range)
404 != opt_for_fn (to->decl, flag_cx_limited_range)
405 || opt_for_fn (callee->decl, flag_signed_zeros)
406 != opt_for_fn (to->decl, flag_signed_zeros)
407 || opt_for_fn (callee->decl, flag_associative_math)
408 != opt_for_fn (to->decl, flag_associative_math)
409 || opt_for_fn (callee->decl, flag_reciprocal_math)
410 != opt_for_fn (to->decl, flag_reciprocal_math)
0d2f700f
JM
411 || opt_for_fn (callee->decl, flag_fp_int_builtin_inexact)
412 != opt_for_fn (to->decl, flag_fp_int_builtin_inexact)
818b88a7
JH
413 || opt_for_fn (callee->decl, flag_errno_math)
414 != opt_for_fn (to->decl, flag_errno_math))
415 {
416 struct gcc_options opts = global_options;
417
418 cl_optimization_restore (&opts, opts_for_fn (to->decl));
419 opts.x_flag_rounding_math
420 = opt_for_fn (callee->decl, flag_rounding_math);
421 opts.x_flag_trapping_math
422 = opt_for_fn (callee->decl, flag_trapping_math);
423 opts.x_flag_unsafe_math_optimizations
424 = opt_for_fn (callee->decl, flag_unsafe_math_optimizations);
425 opts.x_flag_finite_math_only
426 = opt_for_fn (callee->decl, flag_finite_math_only);
427 opts.x_flag_signaling_nans
428 = opt_for_fn (callee->decl, flag_signaling_nans);
429 opts.x_flag_cx_limited_range
430 = opt_for_fn (callee->decl, flag_cx_limited_range);
431 opts.x_flag_signed_zeros
432 = opt_for_fn (callee->decl, flag_signed_zeros);
433 opts.x_flag_associative_math
434 = opt_for_fn (callee->decl, flag_associative_math);
435 opts.x_flag_reciprocal_math
436 = opt_for_fn (callee->decl, flag_reciprocal_math);
0d2f700f
JM
437 opts.x_flag_fp_int_builtin_inexact
438 = opt_for_fn (callee->decl, flag_fp_int_builtin_inexact);
818b88a7
JH
439 opts.x_flag_errno_math
440 = opt_for_fn (callee->decl, flag_errno_math);
441 if (dump_file)
464d0118
ML
442 fprintf (dump_file, "Copying FP flags from %s to %s\n",
443 callee->dump_name (), to->dump_name ());
818b88a7
JH
444 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (to->decl)
445 = build_optimization_node (&opts);
77719b06 446 reload_optimization_node = true;
818b88a7
JH
447 }
448 }
5058c037 449
77719b06
ML
450 /* Reload global optimization flags. */
451 if (reload_optimization_node && DECL_STRUCT_FUNCTION (to->decl) == cfun)
452 set_cfun (cfun, true);
453
a5b1779f
JH
454 /* If aliases are involved, redirect edge to the actual destination and
455 possibly remove the aliases. */
456 if (e->callee != callee)
39e2db00
JH
457 {
458 struct cgraph_node *alias = e->callee, *next_alias;
3dafb85c 459 e->redirect_callee (callee);
39e2db00
JH
460 while (alias && alias != callee)
461 {
462 if (!alias->callers
8ccc8042
JH
463 && can_remove_node_now_p (alias,
464 !e->next_caller && !e->prev_caller ? e : NULL))
39e2db00 465 {
d52f5295
ML
466 next_alias = alias->get_alias_target ();
467 alias->remove ();
1bbb87c4
JH
468 if (callee_removed)
469 *callee_removed = true;
39e2db00
JH
470 alias = next_alias;
471 }
472 else
473 break;
474 }
475 }
a5b1779f 476
1bad9c18 477 clone_inlined_nodes (e, true, update_original, overall_size);
fee8b6da 478
fee8b6da 479 gcc_assert (curr->callee->global.inlined_to == to);
898b8927 480
f658ad30 481 old_size = ipa_size_summaries->get (to)->size;
0bceb671 482 ipa_merge_fn_summary_after_inlining (e);
4fd94d1e
MJ
483 if (e->in_polymorphic_cdtor)
484 mark_all_inlined_calls_cdtor (e->callee);
bb1e543c 485 if (opt_for_fn (e->caller->decl, optimize))
0f378cb5 486 new_edges_found = ipa_propagate_indirect_call_infos (curr, new_edges);
4517b378 487 check_speculations (e->callee, new_edges);
c170d40f 488 if (update_overall_summary)
0bceb671 489 ipa_update_overall_fn_summary (to);
bddead15
RB
490 else
491 /* Update self size by the estimate so overall function growth limits
492 work for further inlining into this function. Before inlining
493 the function we inlined to again we expect the caller to update
494 the overall summary. */
f658ad30
JH
495 ipa_size_summaries->get (to)->size += estimated_growth;
496 new_size = ipa_size_summaries->get (to)->size;
d250540a 497
1f26ac87
JM
498 if (callee->calls_comdat_local)
499 to->calls_comdat_local = true;
d52f5295 500 else if (to->calls_comdat_local && callee->comdat_local_p ())
1f26ac87
JM
501 {
502 struct cgraph_edge *se = to->callees;
503 for (; se; se = se->next_callee)
d52f5295 504 if (se->inline_failed && se->callee->comdat_local_p ())
1f26ac87
JM
505 break;
506 if (se == NULL)
507 to->calls_comdat_local = false;
508 }
509
f107227b
JH
510 /* FIXME: This assert suffers from roundoff errors, disable it for GCC 5
511 and revisit it after conversion to sreals in GCC 6.
512 See PR 65654. */
513#if 0
0f378cb5 514 /* Verify that estimated growth match real growth. Allow off-by-one
0bceb671 515 error due to ipa_fn_summary::size_scale roudoff errors. */
48b1474e 516 gcc_assert (!update_overall_summary || !overall_size || new_edges_found
0f378cb5 517 || abs (estimated_growth - (new_size - old_size)) <= 1
09ce3660 518 || speculation_removed
0f378cb5
JH
519 /* FIXME: a hack. Edges with false predicate are accounted
520 wrong, we should remove them from callgraph. */
521 || predicated);
522#endif
d250540a 523
8256d5ca
JH
524 /* Account the change of overall unit size; external functions will be
525 removed and are thus not accounted. */
bb1e543c 526 if (overall_size && inline_account_function_p (to))
fee8b6da
JH
527 *overall_size += new_size - old_size;
528 ncalls_inlined++;
529
0bceb671 530 /* This must happen after ipa_merge_fn_summary_after_inlining that rely on jump
25837a2f 531 functions of callee to not be updated. */
0f378cb5 532 return new_edges_found;
fee8b6da
JH
533}
534
535
536/* Copy function body of NODE and redirect all inline clones to it.
537 This is done before inline plan is applied to NODE when there are
538 still some inline clones if it.
539
073a8998 540 This is necessary because inline decisions are not really transitive
fee8b6da
JH
541 and the other inline clones may have different bodies. */
542
543static struct cgraph_node *
544save_inline_function_body (struct cgraph_node *node)
545{
546 struct cgraph_node *first_clone, *n;
547
548 if (dump_file)
549 fprintf (dump_file, "\nSaving body of %s for later reuse\n",
fec39fa6 550 node->name ());
fee8b6da 551
d52f5295 552 gcc_assert (node == cgraph_node::get (node->decl));
fee8b6da
JH
553
554 /* first_clone will be turned into real function. */
555 first_clone = node->clones;
ec6a1e35
JH
556
557 /* Arrange first clone to not be thunk as those do not have bodies. */
558 if (first_clone->thunk.thunk_p)
559 {
560 while (first_clone->thunk.thunk_p)
561 first_clone = first_clone->next_sibling_clone;
562 first_clone->prev_sibling_clone->next_sibling_clone
563 = first_clone->next_sibling_clone;
564 if (first_clone->next_sibling_clone)
565 first_clone->next_sibling_clone->prev_sibling_clone
566 = first_clone->prev_sibling_clone;
567 first_clone->next_sibling_clone = node->clones;
568 first_clone->prev_sibling_clone = NULL;
569 node->clones->prev_sibling_clone = first_clone;
570 node->clones = first_clone;
571 }
67348ccc 572 first_clone->decl = copy_node (node->decl);
aede2c10 573 first_clone->decl->decl_with_vis.symtab_node = first_clone;
d52f5295 574 gcc_assert (first_clone == cgraph_node::get (first_clone->decl));
fee8b6da
JH
575
576 /* Now reshape the clone tree, so all other clones descends from
577 first_clone. */
578 if (first_clone->next_sibling_clone)
579 {
ec6a1e35
JH
580 for (n = first_clone->next_sibling_clone; n->next_sibling_clone;
581 n = n->next_sibling_clone)
fee8b6da
JH
582 n->clone_of = first_clone;
583 n->clone_of = first_clone;
584 n->next_sibling_clone = first_clone->clones;
585 if (first_clone->clones)
586 first_clone->clones->prev_sibling_clone = n;
587 first_clone->clones = first_clone->next_sibling_clone;
588 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
589 first_clone->next_sibling_clone = NULL;
590 gcc_assert (!first_clone->prev_sibling_clone);
591 }
592 first_clone->clone_of = NULL;
593
594 /* Now node in question has no clones. */
595 node->clones = NULL;
596
1a3118e9
JH
597 /* Inline clones share decl with the function they are cloned
598 from. Walk the whole clone tree and redirect them all to the
599 new decl. */
fee8b6da
JH
600 if (first_clone->clones)
601 for (n = first_clone->clones; n != first_clone;)
602 {
67348ccc
DM
603 gcc_assert (n->decl == node->decl);
604 n->decl = first_clone->decl;
fee8b6da
JH
605 if (n->clones)
606 n = n->clones;
607 else if (n->next_sibling_clone)
608 n = n->next_sibling_clone;
609 else
610 {
611 while (n != first_clone && !n->next_sibling_clone)
612 n = n->clone_of;
613 if (n != first_clone)
614 n = n->next_sibling_clone;
615 }
616 }
617
618 /* Copy the OLD_VERSION_NODE function tree to the new version. */
67348ccc 619 tree_function_versioning (node->decl, first_clone->decl,
ff6686d2 620 NULL, NULL, true, NULL, NULL);
fee8b6da 621
1a3118e9
JH
622 /* The function will be short lived and removed after we inline all the clones,
623 but make it internal so we won't confuse ourself. */
67348ccc 624 DECL_EXTERNAL (first_clone->decl) = 0;
67348ccc
DM
625 TREE_PUBLIC (first_clone->decl) = 0;
626 DECL_COMDAT (first_clone->decl) = 0;
9771b263 627 first_clone->ipa_transforms_to_apply.release ();
fee8b6da 628
b4e93f45
JH
629 /* When doing recursive inlining, the clone may become unnecessary.
630 This is possible i.e. in the case when the recursive function is proved to be
631 non-throwing and the recursion happens only in the EH landing pad.
67914693 632 We cannot remove the clone until we are done with saving the body.
b4e93f45
JH
633 Remove it now. */
634 if (!first_clone->callers)
635 {
d52f5295 636 first_clone->remove_symbol_and_inline_clones ();
b4e93f45
JH
637 first_clone = NULL;
638 }
b2b29377 639 else if (flag_checking)
d52f5295 640 first_clone->verify ();
b2b29377 641
fee8b6da
JH
642 return first_clone;
643}
644
9c8305f8
JH
645/* Return true when function body of DECL still needs to be kept around
646 for later re-use. */
65d630d4 647static bool
9c8305f8
JH
648preserve_function_body_p (struct cgraph_node *node)
649{
3dafb85c 650 gcc_assert (symtab->global_info_ready);
67348ccc 651 gcc_assert (!node->alias && !node->thunk.thunk_p);
9c8305f8 652
ec6a1e35
JH
653 /* Look if there is any non-thunk clone around. */
654 for (node = node->clones; node; node = node->next_sibling_clone)
655 if (!node->thunk.thunk_p)
656 return true;
9c8305f8
JH
657 return false;
658}
fee8b6da
JH
659
660/* Apply inline plan to function. */
661
662unsigned int
663inline_transform (struct cgraph_node *node)
664{
665 unsigned int todo = 0;
e8aec975 666 struct cgraph_edge *e, *next;
2bf86c84 667 bool has_inline = false;
c9fc06dc 668
fee8b6da
JH
669 /* FIXME: Currently the pass manager is adding inline transform more than
670 once to some clones. This needs revisiting after WPA cleanups. */
671 if (cfun->after_inlining)
672 return 0;
673
674 /* We might need the body of this function so that we can expand
675 it inline somewhere else. */
9c8305f8 676 if (preserve_function_body_p (node))
fee8b6da
JH
677 save_inline_function_body (node);
678
e8aec975
JH
679 for (e = node->callees; e; e = next)
680 {
2bf86c84
JH
681 if (!e->inline_failed)
682 has_inline = true;
e8aec975 683 next = e->next_callee;
3dafb85c 684 e->redirect_call_stmt_to_callee ();
e8aec975 685 }
d122681a 686 node->remove_all_references ();
c9fc06dc
CB
687
688 timevar_push (TV_INTEGRATION);
bb1e543c 689 if (node->callees && (opt_for_fn (node->decl, optimize) || has_inline))
09fcc0c0
JH
690 {
691 profile_count num = node->count;
692 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
517048ce 693 bool scale = num.initialized_p () && !(num == den);
09fcc0c0
JH
694 if (scale)
695 {
517048ce 696 profile_count::adjust_for_ipa_scaling (&num, &den);
09fcc0c0
JH
697 if (dump_file)
698 {
699 fprintf (dump_file, "Applying count scale ");
700 num.dump (dump_file);
701 fprintf (dump_file, "/");
702 den.dump (dump_file);
703 fprintf (dump_file, "\n");
704 }
705
706 basic_block bb;
517048ce 707 cfun->cfg->count_max = profile_count::uninitialized ();
09fcc0c0 708 FOR_ALL_BB_FN (bb, cfun)
517048ce 709 {
8e7d1486 710 bb->count = bb->count.apply_scale (num, den);
517048ce
JH
711 cfun->cfg->count_max = cfun->cfg->count_max.max (bb->count);
712 }
09fcc0c0
JH
713 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
714 }
715 todo = optimize_inline_calls (current_function_decl);
716 }
c9fc06dc
CB
717 timevar_pop (TV_INTEGRATION);
718
f8698b37
RG
719 cfun->always_inline_functions_inlined = true;
720 cfun->after_inlining = true;
721 todo |= execute_fixup_cfg ();
722
55f01229
RG
723 if (!(todo & TODO_update_ssa_any))
724 /* Redirecting edges might lead to a need for vops to be recomputed. */
725 todo |= TODO_update_ssa_only_virtuals;
726
f8698b37 727 return todo;
fee8b6da 728}