]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ipa-inline-transform.c
Daily bump.
[thirdparty/gcc.git] / gcc / ipa-inline-transform.c
CommitLineData
fee8b6da 1/* Callgraph transformations to handle inlining
a5544970 2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
fee8b6da
JH
3 Contributed by Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21/* The inline decisions are stored in callgraph in "inline plan" and
22 applied later.
23
24 To mark given call inline, use inline_call function.
25 The function marks the edge inlinable and, if necessary, produces
26 virtual clone in the callgraph representing the new copy of callee's
27 function body.
28
29 The inline plan is applied on given function body by inline_transform. */
30
31#include "config.h"
32#include "system.h"
33#include "coretypes.h"
34#include "tm.h"
957060b5 35#include "function.h"
fee8b6da 36#include "tree.h"
957060b5
AM
37#include "alloc-pool.h"
38#include "tree-pass.h"
39#include "cgraph.h"
442b4905 40#include "tree-cfg.h"
dd912cb8 41#include "symbol-summary.h"
8bc5448f 42#include "tree-vrp.h"
fee8b6da 43#include "ipa-prop.h"
27d020cf 44#include "ipa-fnsummary.h"
fee8b6da
JH
45#include "ipa-inline.h"
46#include "tree-inline.h"
09fcc0c0
JH
47#include "function.h"
48#include "cfg.h"
49#include "basic-block.h"
fee8b6da
JH
50
51int ncalls_inlined;
52int nfunctions_inlined;
53
1bad9c18 54/* Scale counts of NODE edges by NUM/DEN. */
fee8b6da
JH
55
56static void
1bad9c18
JH
57update_noncloned_counts (struct cgraph_node *node,
58 profile_count num, profile_count den)
fee8b6da
JH
59{
60 struct cgraph_edge *e;
8e7d1486 61
e4373d41 62 profile_count::adjust_for_ipa_scaling (&num, &den);
fee8b6da 63
fee8b6da
JH
64 for (e = node->callees; e; e = e->next_callee)
65 {
fee8b6da 66 if (!e->inline_failed)
1bad9c18 67 update_noncloned_counts (e->callee, num, den);
8e7d1486 68 e->count = e->count.apply_scale (num, den);
898b8927
JH
69 }
70 for (e = node->indirect_calls; e; e = e->next_callee)
1bad9c18 71 e->count = e->count.apply_scale (num, den);
8e7d1486 72 node->count = node->count.apply_scale (num, den);
fee8b6da
JH
73}
74
a5b1779f
JH
75/* We removed or are going to remove the last call to NODE.
76 Return true if we can and want proactively remove the NODE now.
77 This is important to do, since we want inliner to know when offline
78 copy of function was removed. */
79
80static bool
d142079a 81can_remove_node_now_p_1 (struct cgraph_node *node, struct cgraph_edge *e)
a5b1779f 82{
d142079a
JH
83 ipa_ref *ref;
84
85 FOR_EACH_ALIAS (node, ref)
86 {
87 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
88 if ((alias->callers && alias->callers != e)
89 || !can_remove_node_now_p_1 (alias, e))
90 return false;
91 }
a5b1779f
JH
92 /* FIXME: When address is taken of DECL_EXTERNAL function we still
93 can remove its offline copy, but we would need to keep unanalyzed node in
a6a543bf
JH
94 the callgraph so references can point to it.
95
96 Also for comdat group we can ignore references inside a group as we
97 want to prove the group as a whole to be dead. */
67348ccc 98 return (!node->address_taken
a6a543bf 99 && node->can_remove_if_no_direct_calls_and_refs_p ()
a5b1779f
JH
100 /* Inlining might enable more devirtualizing, so we want to remove
101 those only after all devirtualizable virtual calls are processed.
102 Lacking may edges in callgraph we just preserve them post
103 inlining. */
d142079a
JH
104 && (!DECL_VIRTUAL_P (node->decl)
105 || !opt_for_fn (node->decl, flag_devirtualize))
a5b1779f
JH
106 /* During early inlining some unanalyzed cgraph nodes might be in the
107 callgraph and they might reffer the function in question. */
31acf1bb 108 && !cgraph_new_nodes.exists ());
a5b1779f
JH
109}
110
6c69a029
JH
111/* We are going to eliminate last direct call to NODE (or alias of it) via edge E.
112 Verify that the NODE can be removed from unit and if it is contained in comdat
113 group that the whole comdat group is removable. */
114
115static bool
116can_remove_node_now_p (struct cgraph_node *node, struct cgraph_edge *e)
117{
118 struct cgraph_node *next;
d142079a 119 if (!can_remove_node_now_p_1 (node, e))
6c69a029
JH
120 return false;
121
122 /* When we see same comdat group, we need to be sure that all
123 items can be removed. */
8ccc8042 124 if (!node->same_comdat_group || !node->externally_visible)
6c69a029 125 return true;
d52f5295
ML
126 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
127 next != node; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
d142079a
JH
128 {
129 if (next->alias)
130 continue;
131 if ((next->callers && next->callers != e)
132 || !can_remove_node_now_p_1 (next, e))
133 return false;
134 }
6c69a029
JH
135 return true;
136}
137
d83fa499
EB
138/* Return true if NODE is a master clone with non-inline clones. */
139
140static bool
141master_clone_with_noninline_clones_p (struct cgraph_node *node)
142{
143 if (node->clone_of)
144 return false;
145
146 for (struct cgraph_node *n = node->clones; n; n = n->next_sibling_clone)
147 if (n->decl != node->decl)
148 return true;
149
150 return false;
151}
fee8b6da
JH
152
153/* E is expected to be an edge being inlined. Clone destination node of
154 the edge and redirect it to the new clone.
155 DUPLICATE is used for bookkeeping on whether we are actually creating new
156 clones or re-using node originally representing out-of-line function call.
bd936951
JH
157 By default the offline copy is removed, when it appears dead after inlining.
158 UPDATE_ORIGINAL prevents this transformation.
159 If OVERALL_SIZE is non-NULL, the size is updated to reflect the
1bad9c18 160 transformation. */
fee8b6da
JH
161
162void
163clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
1bad9c18 164 bool update_original, int *overall_size)
fee8b6da 165{
44a60244 166 struct cgraph_node *inlining_into;
09ce3660 167 struct cgraph_edge *next;
44a60244
MJ
168
169 if (e->caller->global.inlined_to)
170 inlining_into = e->caller->global.inlined_to;
171 else
172 inlining_into = e->caller;
173
fee8b6da
JH
174 if (duplicate)
175 {
176 /* We may eliminate the need for out-of-line copy to be output.
177 In that case just go ahead and re-use it. This is not just an
178 memory optimization. Making offline copy of fuction disappear
179 from the program will improve future decisions on inlining. */
180 if (!e->callee->callers->next_caller
181 /* Recursive inlining never wants the master clone to
182 be overwritten. */
183 && update_original
d83fa499
EB
184 && can_remove_node_now_p (e->callee, e)
185 /* We cannot overwrite a master clone with non-inline clones
186 until after these clones are materialized. */
187 && !master_clone_with_noninline_clones_p (e->callee))
fee8b6da 188 {
6c69a029
JH
189 /* TODO: When callee is in a comdat group, we could remove all of it,
190 including all inline clones inlined into it. That would however
191 need small function inlining to register edge removal hook to
192 maintain the priority queue.
193
194 For now we keep the ohter functions in the group in program until
195 cgraph_remove_unreachable_functions gets rid of them. */
fee8b6da 196 gcc_assert (!e->callee->global.inlined_to);
b91b562c 197 e->callee->remove_from_same_comdat_group ();
bb1e543c
JH
198 if (e->callee->definition
199 && inline_account_function_p (e->callee))
fee8b6da 200 {
bb1e543c 201 gcc_assert (!e->callee->alias);
fee8b6da 202 if (overall_size)
f658ad30 203 *overall_size -= ipa_size_summaries->get (e->callee)->size;
fee8b6da
JH
204 nfunctions_inlined++;
205 }
206 duplicate = false;
67348ccc 207 e->callee->externally_visible = false;
1bad9c18 208 update_noncloned_counts (e->callee, e->count, e->callee->count);
0bdad123
ML
209
210 dump_callgraph_transformation (e->callee, inlining_into,
211 "inlining to");
fee8b6da
JH
212 }
213 else
214 {
215 struct cgraph_node *n;
bd936951 216
d52f5295 217 n = e->callee->create_clone (e->callee->decl,
1bad9c18 218 e->count,
d52f5295
ML
219 update_original, vNULL, true,
220 inlining_into,
221 NULL);
4ad08ee8 222 n->used_as_abstract_origin = e->callee->used_as_abstract_origin;
3dafb85c 223 e->redirect_callee (n);
fee8b6da
JH
224 }
225 }
65d630d4 226 else
b91b562c 227 e->callee->remove_from_same_comdat_group ();
fee8b6da 228
44a60244 229 e->callee->global.inlined_to = inlining_into;
fee8b6da
JH
230
231 /* Recursively clone all bodies. */
09ce3660
JH
232 for (e = e->callee->callees; e; e = next)
233 {
234 next = e->next_callee;
235 if (!e->inline_failed)
1bad9c18 236 clone_inlined_nodes (e, duplicate, update_original, overall_size);
db66bf68
JH
237 }
238}
239
4517b378
MJ
240/* Check all speculations in N and if any seem useless, resolve them. When a
241 first edge is resolved, pop all edges from NEW_EDGES and insert them to
242 EDGE_SET. Then remove each resolved edge from EDGE_SET, if it is there. */
db66bf68
JH
243
244static bool
4517b378
MJ
245check_speculations_1 (cgraph_node *n, vec<cgraph_edge *> *new_edges,
246 hash_set <cgraph_edge *> *edge_set)
db66bf68
JH
247{
248 bool speculation_removed = false;
249 cgraph_edge *next;
250
251 for (cgraph_edge *e = n->callees; e; e = next)
252 {
253 next = e->next_callee;
09ce3660
JH
254 if (e->speculative && !speculation_useful_p (e, true))
255 {
4517b378
MJ
256 while (new_edges && !new_edges->is_empty ())
257 edge_set->add (new_edges->pop ());
258 edge_set->remove (e);
259
3dafb85c 260 e->resolve_speculation (NULL);
09ce3660
JH
261 speculation_removed = true;
262 }
db66bf68 263 else if (!e->inline_failed)
4517b378
MJ
264 speculation_removed |= check_speculations_1 (e->callee, new_edges,
265 edge_set);
09ce3660 266 }
db66bf68 267 return speculation_removed;
fee8b6da
JH
268}
269
4517b378
MJ
270/* Push E to NEW_EDGES. Called from hash_set traverse method, which
271 unfortunately means this function has to have external linkage, otherwise
272 the code will not compile with gcc 4.8. */
273
274bool
275push_all_edges_in_set_to_vec (cgraph_edge * const &e,
276 vec<cgraph_edge *> *new_edges)
277{
278 new_edges->safe_push (e);
279 return true;
280}
281
282/* Check all speculations in N and if any seem useless, resolve them and remove
283 them from NEW_EDGES. */
284
285static bool
286check_speculations (cgraph_node *n, vec<cgraph_edge *> *new_edges)
287{
288 hash_set <cgraph_edge *> edge_set;
289 bool res = check_speculations_1 (n, new_edges, &edge_set);
290 if (!edge_set.is_empty ())
291 edge_set.traverse <vec<cgraph_edge *> *,
292 push_all_edges_in_set_to_vec> (new_edges);
293 return res;
294}
295
4fd94d1e
MJ
296/* Mark all call graph edges coming out of NODE and all nodes that have been
297 inlined to it as in_polymorphic_cdtor. */
298
299static void
300mark_all_inlined_calls_cdtor (cgraph_node *node)
301{
302 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
303 {
304 cs->in_polymorphic_cdtor = true;
305 if (!cs->inline_failed)
09fcc0c0 306 mark_all_inlined_calls_cdtor (cs->callee);
4fd94d1e
MJ
307 }
308 for (cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
309 cs->in_polymorphic_cdtor = true;
310}
311
fee8b6da
JH
312
313/* Mark edge E as inlined and update callgraph accordingly. UPDATE_ORIGINAL
314 specify whether profile of original function should be updated. If any new
315 indirect edges are discovered in the process, add them to NEW_EDGES, unless
c170d40f
JH
316 it is NULL. If UPDATE_OVERALL_SUMMARY is false, do not bother to recompute overall
317 size of caller after inlining. Caller is required to eventually do it via
0bceb671 318 ipa_update_overall_fn_summary.
1bbb87c4 319 If callee_removed is non-NULL, set it to true if we removed callee node.
c170d40f
JH
320
321 Return true iff any new callgraph edges were discovered as a
fee8b6da
JH
322 result of inlining. */
323
324bool
325inline_call (struct cgraph_edge *e, bool update_original,
d52f5295 326 vec<cgraph_edge *> *new_edges,
1bbb87c4
JH
327 int *overall_size, bool update_overall_summary,
328 bool *callee_removed)
fee8b6da
JH
329{
330 int old_size = 0, new_size = 0;
331 struct cgraph_node *to = NULL;
332 struct cgraph_edge *curr = e;
d52f5295 333 struct cgraph_node *callee = e->callee->ultimate_alias_target ();
0f378cb5
JH
334 bool new_edges_found = false;
335
bddead15
RB
336 int estimated_growth = 0;
337 if (! update_overall_summary)
338 estimated_growth = estimate_edge_growth (e);
f107227b
JH
339 /* This is used only for assert bellow. */
340#if 0
0f378cb5
JH
341 bool predicated = inline_edge_summary (e)->predicate != NULL;
342#endif
fee8b6da
JH
343
344 /* Don't inline inlined edges. */
345 gcc_assert (e->inline_failed);
346 /* Don't even think of inlining inline clone. */
a5b1779f 347 gcc_assert (!callee->global.inlined_to);
fee8b6da 348
632b4f8e
JH
349 to = e->caller;
350 if (to->global.inlined_to)
351 to = to->global.inlined_to;
0b9004ed
JH
352 if (to->thunk.thunk_p)
353 {
c7ed8938 354 struct cgraph_node *target = to->callees->callee;
a088d7b1 355 symtab->call_cgraph_removal_hooks (to);
0b9004ed
JH
356 if (in_lto_p)
357 to->get_untransformed_body ();
358 to->expand_thunk (false, true);
c7ed8938
IE
359 /* When thunk is instrumented we may have multiple callees. */
360 for (e = to->callees; e && e->callee != target; e = e->next_callee)
361 ;
a088d7b1 362 symtab->call_cgraph_insertion_hooks (to);
c7ed8938 363 gcc_assert (e);
0b9004ed
JH
364 }
365
366
367 e->inline_failed = CIF_OK;
368 DECL_POSSIBLY_INLINED (callee->decl) = true;
632b4f8e 369
5058c037
JH
370 if (DECL_FUNCTION_PERSONALITY (callee->decl))
371 DECL_FUNCTION_PERSONALITY (to->decl)
372 = DECL_FUNCTION_PERSONALITY (callee->decl);
77719b06
ML
373
374 bool reload_optimization_node = false;
45285060
JH
375 if (!opt_for_fn (callee->decl, flag_strict_aliasing)
376 && opt_for_fn (to->decl, flag_strict_aliasing))
377 {
378 struct gcc_options opts = global_options;
379
f7f32acd 380 cl_optimization_restore (&opts, opts_for_fn (to->decl));
45285060
JH
381 opts.x_flag_strict_aliasing = false;
382 if (dump_file)
464d0118
ML
383 fprintf (dump_file, "Dropping flag_strict_aliasing on %s\n",
384 to->dump_name ());
45285060
JH
385 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (to->decl)
386 = build_optimization_node (&opts);
77719b06 387 reload_optimization_node = true;
45285060 388 }
5c846a81 389
56f62793
ML
390 ipa_fn_summary *caller_info = ipa_fn_summaries->get (to);
391 ipa_fn_summary *callee_info = ipa_fn_summaries->get (callee);
818b88a7
JH
392 if (!caller_info->fp_expressions && callee_info->fp_expressions)
393 {
394 caller_info->fp_expressions = true;
395 if (opt_for_fn (callee->decl, flag_rounding_math)
396 != opt_for_fn (to->decl, flag_rounding_math)
397 || opt_for_fn (callee->decl, flag_trapping_math)
398 != opt_for_fn (to->decl, flag_trapping_math)
399 || opt_for_fn (callee->decl, flag_unsafe_math_optimizations)
400 != opt_for_fn (to->decl, flag_unsafe_math_optimizations)
401 || opt_for_fn (callee->decl, flag_finite_math_only)
402 != opt_for_fn (to->decl, flag_finite_math_only)
403 || opt_for_fn (callee->decl, flag_signaling_nans)
404 != opt_for_fn (to->decl, flag_signaling_nans)
405 || opt_for_fn (callee->decl, flag_cx_limited_range)
406 != opt_for_fn (to->decl, flag_cx_limited_range)
407 || opt_for_fn (callee->decl, flag_signed_zeros)
408 != opt_for_fn (to->decl, flag_signed_zeros)
409 || opt_for_fn (callee->decl, flag_associative_math)
410 != opt_for_fn (to->decl, flag_associative_math)
411 || opt_for_fn (callee->decl, flag_reciprocal_math)
412 != opt_for_fn (to->decl, flag_reciprocal_math)
0d2f700f
JM
413 || opt_for_fn (callee->decl, flag_fp_int_builtin_inexact)
414 != opt_for_fn (to->decl, flag_fp_int_builtin_inexact)
818b88a7
JH
415 || opt_for_fn (callee->decl, flag_errno_math)
416 != opt_for_fn (to->decl, flag_errno_math))
417 {
418 struct gcc_options opts = global_options;
419
420 cl_optimization_restore (&opts, opts_for_fn (to->decl));
421 opts.x_flag_rounding_math
422 = opt_for_fn (callee->decl, flag_rounding_math);
423 opts.x_flag_trapping_math
424 = opt_for_fn (callee->decl, flag_trapping_math);
425 opts.x_flag_unsafe_math_optimizations
426 = opt_for_fn (callee->decl, flag_unsafe_math_optimizations);
427 opts.x_flag_finite_math_only
428 = opt_for_fn (callee->decl, flag_finite_math_only);
429 opts.x_flag_signaling_nans
430 = opt_for_fn (callee->decl, flag_signaling_nans);
431 opts.x_flag_cx_limited_range
432 = opt_for_fn (callee->decl, flag_cx_limited_range);
433 opts.x_flag_signed_zeros
434 = opt_for_fn (callee->decl, flag_signed_zeros);
435 opts.x_flag_associative_math
436 = opt_for_fn (callee->decl, flag_associative_math);
437 opts.x_flag_reciprocal_math
438 = opt_for_fn (callee->decl, flag_reciprocal_math);
0d2f700f
JM
439 opts.x_flag_fp_int_builtin_inexact
440 = opt_for_fn (callee->decl, flag_fp_int_builtin_inexact);
818b88a7
JH
441 opts.x_flag_errno_math
442 = opt_for_fn (callee->decl, flag_errno_math);
443 if (dump_file)
464d0118
ML
444 fprintf (dump_file, "Copying FP flags from %s to %s\n",
445 callee->dump_name (), to->dump_name ());
818b88a7
JH
446 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (to->decl)
447 = build_optimization_node (&opts);
77719b06 448 reload_optimization_node = true;
818b88a7
JH
449 }
450 }
5058c037 451
77719b06
ML
452 /* Reload global optimization flags. */
453 if (reload_optimization_node && DECL_STRUCT_FUNCTION (to->decl) == cfun)
454 set_cfun (cfun, true);
455
a5b1779f
JH
456 /* If aliases are involved, redirect edge to the actual destination and
457 possibly remove the aliases. */
458 if (e->callee != callee)
39e2db00
JH
459 {
460 struct cgraph_node *alias = e->callee, *next_alias;
3dafb85c 461 e->redirect_callee (callee);
39e2db00
JH
462 while (alias && alias != callee)
463 {
464 if (!alias->callers
8ccc8042
JH
465 && can_remove_node_now_p (alias,
466 !e->next_caller && !e->prev_caller ? e : NULL))
39e2db00 467 {
d52f5295
ML
468 next_alias = alias->get_alias_target ();
469 alias->remove ();
1bbb87c4
JH
470 if (callee_removed)
471 *callee_removed = true;
39e2db00
JH
472 alias = next_alias;
473 }
474 else
475 break;
476 }
477 }
a5b1779f 478
1bad9c18 479 clone_inlined_nodes (e, true, update_original, overall_size);
fee8b6da 480
fee8b6da 481 gcc_assert (curr->callee->global.inlined_to == to);
898b8927 482
f658ad30 483 old_size = ipa_size_summaries->get (to)->size;
0bceb671 484 ipa_merge_fn_summary_after_inlining (e);
4fd94d1e
MJ
485 if (e->in_polymorphic_cdtor)
486 mark_all_inlined_calls_cdtor (e->callee);
bb1e543c 487 if (opt_for_fn (e->caller->decl, optimize))
0f378cb5 488 new_edges_found = ipa_propagate_indirect_call_infos (curr, new_edges);
4517b378 489 check_speculations (e->callee, new_edges);
c170d40f 490 if (update_overall_summary)
0bceb671 491 ipa_update_overall_fn_summary (to);
bddead15
RB
492 else
493 /* Update self size by the estimate so overall function growth limits
494 work for further inlining into this function. Before inlining
495 the function we inlined to again we expect the caller to update
496 the overall summary. */
f658ad30
JH
497 ipa_size_summaries->get (to)->size += estimated_growth;
498 new_size = ipa_size_summaries->get (to)->size;
d250540a 499
1f26ac87
JM
500 if (callee->calls_comdat_local)
501 to->calls_comdat_local = true;
d52f5295 502 else if (to->calls_comdat_local && callee->comdat_local_p ())
1f26ac87
JM
503 {
504 struct cgraph_edge *se = to->callees;
505 for (; se; se = se->next_callee)
d52f5295 506 if (se->inline_failed && se->callee->comdat_local_p ())
1f26ac87
JM
507 break;
508 if (se == NULL)
509 to->calls_comdat_local = false;
510 }
511
f107227b
JH
512 /* FIXME: This assert suffers from roundoff errors, disable it for GCC 5
513 and revisit it after conversion to sreals in GCC 6.
514 See PR 65654. */
515#if 0
0f378cb5 516 /* Verify that estimated growth match real growth. Allow off-by-one
0bceb671 517 error due to ipa_fn_summary::size_scale roudoff errors. */
48b1474e 518 gcc_assert (!update_overall_summary || !overall_size || new_edges_found
0f378cb5 519 || abs (estimated_growth - (new_size - old_size)) <= 1
09ce3660 520 || speculation_removed
0f378cb5
JH
521 /* FIXME: a hack. Edges with false predicate are accounted
522 wrong, we should remove them from callgraph. */
523 || predicated);
524#endif
d250540a 525
8256d5ca
JH
526 /* Account the change of overall unit size; external functions will be
527 removed and are thus not accounted. */
bb1e543c 528 if (overall_size && inline_account_function_p (to))
fee8b6da
JH
529 *overall_size += new_size - old_size;
530 ncalls_inlined++;
531
0bceb671 532 /* This must happen after ipa_merge_fn_summary_after_inlining that rely on jump
25837a2f 533 functions of callee to not be updated. */
0f378cb5 534 return new_edges_found;
fee8b6da
JH
535}
536
537
538/* Copy function body of NODE and redirect all inline clones to it.
539 This is done before inline plan is applied to NODE when there are
540 still some inline clones if it.
541
073a8998 542 This is necessary because inline decisions are not really transitive
fee8b6da
JH
543 and the other inline clones may have different bodies. */
544
545static struct cgraph_node *
546save_inline_function_body (struct cgraph_node *node)
547{
548 struct cgraph_node *first_clone, *n;
549
550 if (dump_file)
551 fprintf (dump_file, "\nSaving body of %s for later reuse\n",
fec39fa6 552 node->name ());
fee8b6da 553
d52f5295 554 gcc_assert (node == cgraph_node::get (node->decl));
fee8b6da
JH
555
556 /* first_clone will be turned into real function. */
557 first_clone = node->clones;
ec6a1e35
JH
558
559 /* Arrange first clone to not be thunk as those do not have bodies. */
560 if (first_clone->thunk.thunk_p)
561 {
562 while (first_clone->thunk.thunk_p)
563 first_clone = first_clone->next_sibling_clone;
564 first_clone->prev_sibling_clone->next_sibling_clone
565 = first_clone->next_sibling_clone;
566 if (first_clone->next_sibling_clone)
567 first_clone->next_sibling_clone->prev_sibling_clone
568 = first_clone->prev_sibling_clone;
569 first_clone->next_sibling_clone = node->clones;
570 first_clone->prev_sibling_clone = NULL;
571 node->clones->prev_sibling_clone = first_clone;
572 node->clones = first_clone;
573 }
67348ccc 574 first_clone->decl = copy_node (node->decl);
aede2c10 575 first_clone->decl->decl_with_vis.symtab_node = first_clone;
d52f5295 576 gcc_assert (first_clone == cgraph_node::get (first_clone->decl));
fee8b6da
JH
577
578 /* Now reshape the clone tree, so all other clones descends from
579 first_clone. */
580 if (first_clone->next_sibling_clone)
581 {
ec6a1e35
JH
582 for (n = first_clone->next_sibling_clone; n->next_sibling_clone;
583 n = n->next_sibling_clone)
fee8b6da
JH
584 n->clone_of = first_clone;
585 n->clone_of = first_clone;
586 n->next_sibling_clone = first_clone->clones;
587 if (first_clone->clones)
588 first_clone->clones->prev_sibling_clone = n;
589 first_clone->clones = first_clone->next_sibling_clone;
590 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
591 first_clone->next_sibling_clone = NULL;
592 gcc_assert (!first_clone->prev_sibling_clone);
593 }
594 first_clone->clone_of = NULL;
595
596 /* Now node in question has no clones. */
597 node->clones = NULL;
598
1a3118e9
JH
599 /* Inline clones share decl with the function they are cloned
600 from. Walk the whole clone tree and redirect them all to the
601 new decl. */
fee8b6da
JH
602 if (first_clone->clones)
603 for (n = first_clone->clones; n != first_clone;)
604 {
67348ccc
DM
605 gcc_assert (n->decl == node->decl);
606 n->decl = first_clone->decl;
fee8b6da
JH
607 if (n->clones)
608 n = n->clones;
609 else if (n->next_sibling_clone)
610 n = n->next_sibling_clone;
611 else
612 {
613 while (n != first_clone && !n->next_sibling_clone)
614 n = n->clone_of;
615 if (n != first_clone)
616 n = n->next_sibling_clone;
617 }
618 }
619
620 /* Copy the OLD_VERSION_NODE function tree to the new version. */
67348ccc 621 tree_function_versioning (node->decl, first_clone->decl,
ff6686d2 622 NULL, NULL, true, NULL, NULL);
fee8b6da 623
1a3118e9
JH
624 /* The function will be short lived and removed after we inline all the clones,
625 but make it internal so we won't confuse ourself. */
67348ccc 626 DECL_EXTERNAL (first_clone->decl) = 0;
67348ccc
DM
627 TREE_PUBLIC (first_clone->decl) = 0;
628 DECL_COMDAT (first_clone->decl) = 0;
9771b263 629 first_clone->ipa_transforms_to_apply.release ();
fee8b6da 630
b4e93f45
JH
631 /* When doing recursive inlining, the clone may become unnecessary.
632 This is possible i.e. in the case when the recursive function is proved to be
633 non-throwing and the recursion happens only in the EH landing pad.
67914693 634 We cannot remove the clone until we are done with saving the body.
b4e93f45
JH
635 Remove it now. */
636 if (!first_clone->callers)
637 {
d52f5295 638 first_clone->remove_symbol_and_inline_clones ();
b4e93f45
JH
639 first_clone = NULL;
640 }
b2b29377 641 else if (flag_checking)
d52f5295 642 first_clone->verify ();
b2b29377 643
fee8b6da
JH
644 return first_clone;
645}
646
9c8305f8
JH
647/* Return true when function body of DECL still needs to be kept around
648 for later re-use. */
65d630d4 649static bool
9c8305f8
JH
650preserve_function_body_p (struct cgraph_node *node)
651{
3dafb85c 652 gcc_assert (symtab->global_info_ready);
67348ccc 653 gcc_assert (!node->alias && !node->thunk.thunk_p);
9c8305f8 654
ec6a1e35
JH
655 /* Look if there is any non-thunk clone around. */
656 for (node = node->clones; node; node = node->next_sibling_clone)
657 if (!node->thunk.thunk_p)
658 return true;
9c8305f8
JH
659 return false;
660}
fee8b6da
JH
661
662/* Apply inline plan to function. */
663
664unsigned int
665inline_transform (struct cgraph_node *node)
666{
667 unsigned int todo = 0;
e8aec975 668 struct cgraph_edge *e, *next;
2bf86c84 669 bool has_inline = false;
c9fc06dc 670
fee8b6da
JH
671 /* FIXME: Currently the pass manager is adding inline transform more than
672 once to some clones. This needs revisiting after WPA cleanups. */
673 if (cfun->after_inlining)
674 return 0;
675
676 /* We might need the body of this function so that we can expand
677 it inline somewhere else. */
9c8305f8 678 if (preserve_function_body_p (node))
fee8b6da
JH
679 save_inline_function_body (node);
680
e8aec975
JH
681 for (e = node->callees; e; e = next)
682 {
2bf86c84
JH
683 if (!e->inline_failed)
684 has_inline = true;
e8aec975 685 next = e->next_callee;
3dafb85c 686 e->redirect_call_stmt_to_callee ();
e8aec975 687 }
d122681a 688 node->remove_all_references ();
c9fc06dc
CB
689
690 timevar_push (TV_INTEGRATION);
bb1e543c 691 if (node->callees && (opt_for_fn (node->decl, optimize) || has_inline))
09fcc0c0
JH
692 {
693 profile_count num = node->count;
694 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
517048ce 695 bool scale = num.initialized_p () && !(num == den);
09fcc0c0
JH
696 if (scale)
697 {
517048ce 698 profile_count::adjust_for_ipa_scaling (&num, &den);
09fcc0c0
JH
699 if (dump_file)
700 {
701 fprintf (dump_file, "Applying count scale ");
702 num.dump (dump_file);
703 fprintf (dump_file, "/");
704 den.dump (dump_file);
705 fprintf (dump_file, "\n");
706 }
707
708 basic_block bb;
517048ce 709 cfun->cfg->count_max = profile_count::uninitialized ();
09fcc0c0 710 FOR_ALL_BB_FN (bb, cfun)
517048ce 711 {
8e7d1486 712 bb->count = bb->count.apply_scale (num, den);
517048ce
JH
713 cfun->cfg->count_max = cfun->cfg->count_max.max (bb->count);
714 }
09fcc0c0
JH
715 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
716 }
717 todo = optimize_inline_calls (current_function_decl);
718 }
c9fc06dc
CB
719 timevar_pop (TV_INTEGRATION);
720
f8698b37
RG
721 cfun->always_inline_functions_inlined = true;
722 cfun->after_inlining = true;
723 todo |= execute_fixup_cfg ();
724
55f01229
RG
725 if (!(todo & TODO_update_ssa_any))
726 /* Redirecting edges might lead to a need for vops to be recomputed. */
727 todo |= TODO_update_ssa_only_virtuals;
728
f8698b37 729 return todo;
fee8b6da 730}