]>
Commit | Line | Data |
---|---|---|
8cbc43ff | 1 | /* Callgraph transformations to handle inlining |
f1717362 | 2 | Copyright (C) 2003-2016 Free Software Foundation, Inc. |
8cbc43ff | 3 | Contributed by Jan Hubicka |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it under | |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 3, or (at your option) any later | |
10 | version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING3. If not see | |
19 | <http://www.gnu.org/licenses/>. */ | |
20 | ||
21 | /* The inline decisions are stored in callgraph in "inline plan" and | |
22 | applied later. | |
23 | ||
24 | To mark given call inline, use inline_call function. | |
25 | The function marks the edge inlinable and, if necessary, produces | |
26 | virtual clone in the callgraph representing the new copy of callee's | |
27 | function body. | |
28 | ||
29 | The inline plan is applied on given function body by inline_transform. */ | |
30 | ||
31 | #include "config.h" | |
32 | #include "system.h" | |
33 | #include "coretypes.h" | |
34 | #include "tm.h" | |
7c29e30e | 35 | #include "function.h" |
8cbc43ff | 36 | #include "tree.h" |
7c29e30e | 37 | #include "alloc-pool.h" |
38 | #include "tree-pass.h" | |
39 | #include "cgraph.h" | |
073c1fd5 | 40 | #include "tree-cfg.h" |
2cc80ac3 | 41 | #include "symbol-summary.h" |
8cbc43ff | 42 | #include "ipa-prop.h" |
43 | #include "ipa-inline.h" | |
44 | #include "tree-inline.h" | |
45 | ||
46 | int ncalls_inlined; | |
47 | int nfunctions_inlined; | |
48 | ||
0835ad03 | 49 | /* Scale frequency of NODE edges by FREQ_SCALE. */ |
8cbc43ff | 50 | |
51 | static void | |
52 | update_noncloned_frequencies (struct cgraph_node *node, | |
0835ad03 | 53 | int freq_scale) |
8cbc43ff | 54 | { |
55 | struct cgraph_edge *e; | |
56 | ||
57 | /* We do not want to ignore high loop nest after freq drops to 0. */ | |
58 | if (!freq_scale) | |
59 | freq_scale = 1; | |
60 | for (e = node->callees; e; e = e->next_callee) | |
61 | { | |
8cbc43ff | 62 | e->frequency = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE; |
63 | if (e->frequency > CGRAPH_FREQ_MAX) | |
64 | e->frequency = CGRAPH_FREQ_MAX; | |
65 | if (!e->inline_failed) | |
0835ad03 | 66 | update_noncloned_frequencies (e->callee, freq_scale); |
67 | } | |
68 | for (e = node->indirect_calls; e; e = e->next_callee) | |
69 | { | |
70 | e->frequency = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE; | |
71 | if (e->frequency > CGRAPH_FREQ_MAX) | |
72 | e->frequency = CGRAPH_FREQ_MAX; | |
8cbc43ff | 73 | } |
74 | } | |
75 | ||
82626cb0 | 76 | /* We removed or are going to remove the last call to NODE. |
77 | Return true if we can and want proactively remove the NODE now. | |
78 | This is important to do, since we want inliner to know when offline | |
79 | copy of function was removed. */ | |
80 | ||
81 | static bool | |
7fbf53b8 | 82 | can_remove_node_now_p_1 (struct cgraph_node *node, struct cgraph_edge *e) |
82626cb0 | 83 | { |
7fbf53b8 | 84 | ipa_ref *ref; |
85 | ||
86 | FOR_EACH_ALIAS (node, ref) | |
87 | { | |
88 | cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring); | |
89 | if ((alias->callers && alias->callers != e) | |
90 | || !can_remove_node_now_p_1 (alias, e)) | |
91 | return false; | |
92 | } | |
82626cb0 | 93 | /* FIXME: When address is taken of DECL_EXTERNAL function we still |
94 | can remove its offline copy, but we would need to keep unanalyzed node in | |
e88fecaf | 95 | the callgraph so references can point to it. |
96 | ||
97 | Also for comdat group we can ignore references inside a group as we | |
98 | want to prove the group as a whole to be dead. */ | |
02774f2d | 99 | return (!node->address_taken |
e88fecaf | 100 | && node->can_remove_if_no_direct_calls_and_refs_p () |
82626cb0 | 101 | /* Inlining might enable more devirtualizing, so we want to remove |
102 | those only after all devirtualizable virtual calls are processed. | |
103 | Lacking may edges in callgraph we just preserve them post | |
104 | inlining. */ | |
7fbf53b8 | 105 | && (!DECL_VIRTUAL_P (node->decl) |
106 | || !opt_for_fn (node->decl, flag_devirtualize)) | |
82626cb0 | 107 | /* During early inlining some unanalyzed cgraph nodes might be in the |
108 | callgraph and they might reffer the function in question. */ | |
347a47cb | 109 | && !cgraph_new_nodes.exists ()); |
82626cb0 | 110 | } |
111 | ||
7791b0eb | 112 | /* We are going to eliminate last direct call to NODE (or alias of it) via edge E. |
113 | Verify that the NODE can be removed from unit and if it is contained in comdat | |
114 | group that the whole comdat group is removable. */ | |
115 | ||
116 | static bool | |
117 | can_remove_node_now_p (struct cgraph_node *node, struct cgraph_edge *e) | |
118 | { | |
119 | struct cgraph_node *next; | |
7fbf53b8 | 120 | if (!can_remove_node_now_p_1 (node, e)) |
7791b0eb | 121 | return false; |
122 | ||
123 | /* When we see same comdat group, we need to be sure that all | |
124 | items can be removed. */ | |
6365c927 | 125 | if (!node->same_comdat_group || !node->externally_visible) |
7791b0eb | 126 | return true; |
415d1b9a | 127 | for (next = dyn_cast<cgraph_node *> (node->same_comdat_group); |
128 | next != node; next = dyn_cast<cgraph_node *> (next->same_comdat_group)) | |
7fbf53b8 | 129 | { |
130 | if (next->alias) | |
131 | continue; | |
132 | if ((next->callers && next->callers != e) | |
133 | || !can_remove_node_now_p_1 (next, e)) | |
134 | return false; | |
135 | } | |
7791b0eb | 136 | return true; |
137 | } | |
138 | ||
0ecf4b03 | 139 | /* Return true if NODE is a master clone with non-inline clones. */ |
140 | ||
141 | static bool | |
142 | master_clone_with_noninline_clones_p (struct cgraph_node *node) | |
143 | { | |
144 | if (node->clone_of) | |
145 | return false; | |
146 | ||
147 | for (struct cgraph_node *n = node->clones; n; n = n->next_sibling_clone) | |
148 | if (n->decl != node->decl) | |
149 | return true; | |
150 | ||
151 | return false; | |
152 | } | |
8cbc43ff | 153 | |
154 | /* E is expected to be an edge being inlined. Clone destination node of | |
155 | the edge and redirect it to the new clone. | |
156 | DUPLICATE is used for bookkeeping on whether we are actually creating new | |
157 | clones or re-using node originally representing out-of-line function call. | |
b8731470 | 158 | By default the offline copy is removed, when it appears dead after inlining. |
159 | UPDATE_ORIGINAL prevents this transformation. | |
160 | If OVERALL_SIZE is non-NULL, the size is updated to reflect the | |
161 | transformation. | |
162 | FREQ_SCALE specify the scaling of frequencies of call sites. */ | |
8cbc43ff | 163 | |
164 | void | |
165 | clone_inlined_nodes (struct cgraph_edge *e, bool duplicate, | |
b8731470 | 166 | bool update_original, int *overall_size, int freq_scale) |
8cbc43ff | 167 | { |
48f42a9a | 168 | struct cgraph_node *inlining_into; |
12d5ae9f | 169 | struct cgraph_edge *next; |
48f42a9a | 170 | |
171 | if (e->caller->global.inlined_to) | |
172 | inlining_into = e->caller->global.inlined_to; | |
173 | else | |
174 | inlining_into = e->caller; | |
175 | ||
8cbc43ff | 176 | if (duplicate) |
177 | { | |
178 | /* We may eliminate the need for out-of-line copy to be output. | |
179 | In that case just go ahead and re-use it. This is not just an | |
180 | memory optimization. Making offline copy of fuction disappear | |
181 | from the program will improve future decisions on inlining. */ | |
182 | if (!e->callee->callers->next_caller | |
183 | /* Recursive inlining never wants the master clone to | |
184 | be overwritten. */ | |
185 | && update_original | |
0ecf4b03 | 186 | && can_remove_node_now_p (e->callee, e) |
187 | /* We cannot overwrite a master clone with non-inline clones | |
188 | until after these clones are materialized. */ | |
189 | && !master_clone_with_noninline_clones_p (e->callee)) | |
8cbc43ff | 190 | { |
7791b0eb | 191 | /* TODO: When callee is in a comdat group, we could remove all of it, |
192 | including all inline clones inlined into it. That would however | |
193 | need small function inlining to register edge removal hook to | |
194 | maintain the priority queue. | |
195 | ||
196 | For now we keep the ohter functions in the group in program until | |
197 | cgraph_remove_unreachable_functions gets rid of them. */ | |
8cbc43ff | 198 | gcc_assert (!e->callee->global.inlined_to); |
ab89cd93 | 199 | e->callee->remove_from_same_comdat_group (); |
a6d60179 | 200 | if (e->callee->definition |
201 | && inline_account_function_p (e->callee)) | |
8cbc43ff | 202 | { |
a6d60179 | 203 | gcc_assert (!e->callee->alias); |
8cbc43ff | 204 | if (overall_size) |
b4bae7a0 | 205 | *overall_size -= inline_summaries->get (e->callee)->size; |
8cbc43ff | 206 | nfunctions_inlined++; |
207 | } | |
208 | duplicate = false; | |
02774f2d | 209 | e->callee->externally_visible = false; |
0835ad03 | 210 | update_noncloned_frequencies (e->callee, e->frequency); |
8cbc43ff | 211 | } |
212 | else | |
213 | { | |
214 | struct cgraph_node *n; | |
b8731470 | 215 | |
216 | if (freq_scale == -1) | |
217 | freq_scale = e->frequency; | |
415d1b9a | 218 | n = e->callee->create_clone (e->callee->decl, |
219 | MIN (e->count, e->callee->count), | |
220 | freq_scale, | |
221 | update_original, vNULL, true, | |
222 | inlining_into, | |
223 | NULL); | |
ca92a251 | 224 | n->used_as_abstract_origin = e->callee->used_as_abstract_origin; |
35ee1c66 | 225 | e->redirect_callee (n); |
8cbc43ff | 226 | } |
227 | } | |
cf951b1a | 228 | else |
ab89cd93 | 229 | e->callee->remove_from_same_comdat_group (); |
8cbc43ff | 230 | |
48f42a9a | 231 | e->callee->global.inlined_to = inlining_into; |
8cbc43ff | 232 | |
233 | /* Recursively clone all bodies. */ | |
12d5ae9f | 234 | for (e = e->callee->callees; e; e = next) |
235 | { | |
236 | next = e->next_callee; | |
237 | if (!e->inline_failed) | |
b8731470 | 238 | clone_inlined_nodes (e, duplicate, update_original, overall_size, freq_scale); |
7ab096e0 | 239 | } |
240 | } | |
241 | ||
242 | /* Check all speculations in N and resolve them if they seems useless. */ | |
243 | ||
244 | static bool | |
245 | check_speculations (cgraph_node *n) | |
246 | { | |
247 | bool speculation_removed = false; | |
248 | cgraph_edge *next; | |
249 | ||
250 | for (cgraph_edge *e = n->callees; e; e = next) | |
251 | { | |
252 | next = e->next_callee; | |
12d5ae9f | 253 | if (e->speculative && !speculation_useful_p (e, true)) |
254 | { | |
35ee1c66 | 255 | e->resolve_speculation (NULL); |
12d5ae9f | 256 | speculation_removed = true; |
257 | } | |
7ab096e0 | 258 | else if (!e->inline_failed) |
259 | speculation_removed |= check_speculations (e->callee); | |
12d5ae9f | 260 | } |
7ab096e0 | 261 | return speculation_removed; |
8cbc43ff | 262 | } |
263 | ||
2bec7365 | 264 | /* Mark all call graph edges coming out of NODE and all nodes that have been |
265 | inlined to it as in_polymorphic_cdtor. */ | |
266 | ||
267 | static void | |
268 | mark_all_inlined_calls_cdtor (cgraph_node *node) | |
269 | { | |
270 | for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee) | |
271 | { | |
272 | cs->in_polymorphic_cdtor = true; | |
273 | if (!cs->inline_failed) | |
274 | mark_all_inlined_calls_cdtor (cs->callee); | |
275 | } | |
276 | for (cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee) | |
277 | cs->in_polymorphic_cdtor = true; | |
278 | } | |
279 | ||
8cbc43ff | 280 | |
281 | /* Mark edge E as inlined and update callgraph accordingly. UPDATE_ORIGINAL | |
282 | specify whether profile of original function should be updated. If any new | |
283 | indirect edges are discovered in the process, add them to NEW_EDGES, unless | |
6331b6fa | 284 | it is NULL. If UPDATE_OVERALL_SUMMARY is false, do not bother to recompute overall |
285 | size of caller after inlining. Caller is required to eventually do it via | |
286 | inline_update_overall_summary. | |
7c5c01f1 | 287 | If callee_removed is non-NULL, set it to true if we removed callee node. |
6331b6fa | 288 | |
289 | Return true iff any new callgraph edges were discovered as a | |
8cbc43ff | 290 | result of inlining. */ |
291 | ||
292 | bool | |
293 | inline_call (struct cgraph_edge *e, bool update_original, | |
415d1b9a | 294 | vec<cgraph_edge *> *new_edges, |
7c5c01f1 | 295 | int *overall_size, bool update_overall_summary, |
296 | bool *callee_removed) | |
8cbc43ff | 297 | { |
298 | int old_size = 0, new_size = 0; | |
299 | struct cgraph_node *to = NULL; | |
300 | struct cgraph_edge *curr = e; | |
415d1b9a | 301 | struct cgraph_node *callee = e->callee->ultimate_alias_target (); |
18b64b34 | 302 | bool new_edges_found = false; |
303 | ||
6737c56a | 304 | /* This is used only for assert bellow. */ |
305 | #if 0 | |
18b64b34 | 306 | int estimated_growth = estimate_edge_growth (e); |
307 | bool predicated = inline_edge_summary (e)->predicate != NULL; | |
308 | #endif | |
8cbc43ff | 309 | |
310 | /* Don't inline inlined edges. */ | |
311 | gcc_assert (e->inline_failed); | |
312 | /* Don't even think of inlining inline clone. */ | |
82626cb0 | 313 | gcc_assert (!callee->global.inlined_to); |
8cbc43ff | 314 | |
315 | e->inline_failed = CIF_OK; | |
02774f2d | 316 | DECL_POSSIBLY_INLINED (callee->decl) = true; |
8cbc43ff | 317 | |
a41f2a28 | 318 | to = e->caller; |
319 | if (to->global.inlined_to) | |
320 | to = to->global.inlined_to; | |
a41f2a28 | 321 | |
e806c56f | 322 | if (DECL_FUNCTION_PERSONALITY (callee->decl)) |
323 | DECL_FUNCTION_PERSONALITY (to->decl) | |
324 | = DECL_FUNCTION_PERSONALITY (callee->decl); | |
b1d47f0c | 325 | if (!opt_for_fn (callee->decl, flag_strict_aliasing) |
326 | && opt_for_fn (to->decl, flag_strict_aliasing)) | |
327 | { | |
328 | struct gcc_options opts = global_options; | |
329 | ||
330 | cl_optimization_restore (&opts, | |
331 | TREE_OPTIMIZATION (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (to->decl))); | |
332 | opts.x_flag_strict_aliasing = false; | |
333 | if (dump_file) | |
334 | fprintf (dump_file, "Dropping flag_strict_aliasing on %s:%i\n", | |
335 | to->name (), to->order); | |
336 | build_optimization_node (&opts); | |
337 | DECL_FUNCTION_SPECIFIC_OPTIMIZATION (to->decl) | |
338 | = build_optimization_node (&opts); | |
339 | } | |
e806c56f | 340 | |
82626cb0 | 341 | /* If aliases are involved, redirect edge to the actual destination and |
342 | possibly remove the aliases. */ | |
343 | if (e->callee != callee) | |
c70f46b0 | 344 | { |
345 | struct cgraph_node *alias = e->callee, *next_alias; | |
35ee1c66 | 346 | e->redirect_callee (callee); |
c70f46b0 | 347 | while (alias && alias != callee) |
348 | { | |
349 | if (!alias->callers | |
6365c927 | 350 | && can_remove_node_now_p (alias, |
351 | !e->next_caller && !e->prev_caller ? e : NULL)) | |
c70f46b0 | 352 | { |
415d1b9a | 353 | next_alias = alias->get_alias_target (); |
354 | alias->remove (); | |
7c5c01f1 | 355 | if (callee_removed) |
356 | *callee_removed = true; | |
c70f46b0 | 357 | alias = next_alias; |
358 | } | |
359 | else | |
360 | break; | |
361 | } | |
362 | } | |
82626cb0 | 363 | |
b8731470 | 364 | clone_inlined_nodes (e, true, update_original, overall_size, e->frequency); |
8cbc43ff | 365 | |
8cbc43ff | 366 | gcc_assert (curr->callee->global.inlined_to == to); |
0835ad03 | 367 | |
b4bae7a0 | 368 | old_size = inline_summaries->get (to)->size; |
0835ad03 | 369 | inline_merge_summary (e); |
2bec7365 | 370 | if (e->in_polymorphic_cdtor) |
371 | mark_all_inlined_calls_cdtor (e->callee); | |
a6d60179 | 372 | if (opt_for_fn (e->caller->decl, optimize)) |
18b64b34 | 373 | new_edges_found = ipa_propagate_indirect_call_infos (curr, new_edges); |
7ab096e0 | 374 | check_speculations (e->callee); |
6331b6fa | 375 | if (update_overall_summary) |
376 | inline_update_overall_summary (to); | |
b4bae7a0 | 377 | new_size = inline_summaries->get (to)->size; |
12ecd4f9 | 378 | |
468088ac | 379 | if (callee->calls_comdat_local) |
380 | to->calls_comdat_local = true; | |
415d1b9a | 381 | else if (to->calls_comdat_local && callee->comdat_local_p ()) |
468088ac | 382 | { |
383 | struct cgraph_edge *se = to->callees; | |
384 | for (; se; se = se->next_callee) | |
415d1b9a | 385 | if (se->inline_failed && se->callee->comdat_local_p ()) |
468088ac | 386 | break; |
387 | if (se == NULL) | |
388 | to->calls_comdat_local = false; | |
389 | } | |
390 | ||
6737c56a | 391 | /* FIXME: This assert suffers from roundoff errors, disable it for GCC 5 |
392 | and revisit it after conversion to sreals in GCC 6. | |
393 | See PR 65654. */ | |
394 | #if 0 | |
18b64b34 | 395 | /* Verify that estimated growth match real growth. Allow off-by-one |
396 | error due to INLINE_SIZE_SCALE roudoff errors. */ | |
95fb3203 | 397 | gcc_assert (!update_overall_summary || !overall_size || new_edges_found |
18b64b34 | 398 | || abs (estimated_growth - (new_size - old_size)) <= 1 |
12d5ae9f | 399 | || speculation_removed |
18b64b34 | 400 | /* FIXME: a hack. Edges with false predicate are accounted |
401 | wrong, we should remove them from callgraph. */ | |
402 | || predicated); | |
403 | #endif | |
12ecd4f9 | 404 | |
d94ff7ce | 405 | /* Account the change of overall unit size; external functions will be |
406 | removed and are thus not accounted. */ | |
a6d60179 | 407 | if (overall_size && inline_account_function_p (to)) |
8cbc43ff | 408 | *overall_size += new_size - old_size; |
409 | ncalls_inlined++; | |
410 | ||
eb4ae064 | 411 | /* This must happen after inline_merge_summary that rely on jump |
412 | functions of callee to not be updated. */ | |
18b64b34 | 413 | return new_edges_found; |
8cbc43ff | 414 | } |
415 | ||
416 | ||
417 | /* Copy function body of NODE and redirect all inline clones to it. | |
418 | This is done before inline plan is applied to NODE when there are | |
419 | still some inline clones if it. | |
420 | ||
9d75589a | 421 | This is necessary because inline decisions are not really transitive |
8cbc43ff | 422 | and the other inline clones may have different bodies. */ |
423 | ||
424 | static struct cgraph_node * | |
425 | save_inline_function_body (struct cgraph_node *node) | |
426 | { | |
427 | struct cgraph_node *first_clone, *n; | |
428 | ||
429 | if (dump_file) | |
430 | fprintf (dump_file, "\nSaving body of %s for later reuse\n", | |
f1c8b4d7 | 431 | node->name ()); |
8cbc43ff | 432 | |
415d1b9a | 433 | gcc_assert (node == cgraph_node::get (node->decl)); |
8cbc43ff | 434 | |
435 | /* first_clone will be turned into real function. */ | |
436 | first_clone = node->clones; | |
02774f2d | 437 | first_clone->decl = copy_node (node->decl); |
8c016392 | 438 | first_clone->decl->decl_with_vis.symtab_node = first_clone; |
415d1b9a | 439 | gcc_assert (first_clone == cgraph_node::get (first_clone->decl)); |
8cbc43ff | 440 | |
441 | /* Now reshape the clone tree, so all other clones descends from | |
442 | first_clone. */ | |
443 | if (first_clone->next_sibling_clone) | |
444 | { | |
445 | for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone) | |
446 | n->clone_of = first_clone; | |
447 | n->clone_of = first_clone; | |
448 | n->next_sibling_clone = first_clone->clones; | |
449 | if (first_clone->clones) | |
450 | first_clone->clones->prev_sibling_clone = n; | |
451 | first_clone->clones = first_clone->next_sibling_clone; | |
452 | first_clone->next_sibling_clone->prev_sibling_clone = NULL; | |
453 | first_clone->next_sibling_clone = NULL; | |
454 | gcc_assert (!first_clone->prev_sibling_clone); | |
455 | } | |
456 | first_clone->clone_of = NULL; | |
457 | ||
458 | /* Now node in question has no clones. */ | |
459 | node->clones = NULL; | |
460 | ||
d826e131 | 461 | /* Inline clones share decl with the function they are cloned |
462 | from. Walk the whole clone tree and redirect them all to the | |
463 | new decl. */ | |
8cbc43ff | 464 | if (first_clone->clones) |
465 | for (n = first_clone->clones; n != first_clone;) | |
466 | { | |
02774f2d | 467 | gcc_assert (n->decl == node->decl); |
468 | n->decl = first_clone->decl; | |
8cbc43ff | 469 | if (n->clones) |
470 | n = n->clones; | |
471 | else if (n->next_sibling_clone) | |
472 | n = n->next_sibling_clone; | |
473 | else | |
474 | { | |
475 | while (n != first_clone && !n->next_sibling_clone) | |
476 | n = n->clone_of; | |
477 | if (n != first_clone) | |
478 | n = n->next_sibling_clone; | |
479 | } | |
480 | } | |
481 | ||
482 | /* Copy the OLD_VERSION_NODE function tree to the new version. */ | |
02774f2d | 483 | tree_function_versioning (node->decl, first_clone->decl, |
f1f41a6c | 484 | NULL, true, NULL, false, |
485 | NULL, NULL); | |
8cbc43ff | 486 | |
d826e131 | 487 | /* The function will be short lived and removed after we inline all the clones, |
488 | but make it internal so we won't confuse ourself. */ | |
02774f2d | 489 | DECL_EXTERNAL (first_clone->decl) = 0; |
02774f2d | 490 | TREE_PUBLIC (first_clone->decl) = 0; |
491 | DECL_COMDAT (first_clone->decl) = 0; | |
f1f41a6c | 492 | first_clone->ipa_transforms_to_apply.release (); |
8cbc43ff | 493 | |
41710b76 | 494 | /* When doing recursive inlining, the clone may become unnecessary. |
495 | This is possible i.e. in the case when the recursive function is proved to be | |
496 | non-throwing and the recursion happens only in the EH landing pad. | |
497 | We can not remove the clone until we are done with saving the body. | |
498 | Remove it now. */ | |
499 | if (!first_clone->callers) | |
500 | { | |
415d1b9a | 501 | first_clone->remove_symbol_and_inline_clones (); |
41710b76 | 502 | first_clone = NULL; |
503 | } | |
382ecba7 | 504 | else if (flag_checking) |
415d1b9a | 505 | first_clone->verify (); |
382ecba7 | 506 | |
8cbc43ff | 507 | return first_clone; |
508 | } | |
509 | ||
da5e1e7c | 510 | /* Return true when function body of DECL still needs to be kept around |
511 | for later re-use. */ | |
cf951b1a | 512 | static bool |
da5e1e7c | 513 | preserve_function_body_p (struct cgraph_node *node) |
514 | { | |
35ee1c66 | 515 | gcc_assert (symtab->global_info_ready); |
02774f2d | 516 | gcc_assert (!node->alias && !node->thunk.thunk_p); |
da5e1e7c | 517 | |
518 | /* Look if there is any clone around. */ | |
519 | if (node->clones) | |
520 | return true; | |
521 | return false; | |
522 | } | |
8cbc43ff | 523 | |
524 | /* Apply inline plan to function. */ | |
525 | ||
526 | unsigned int | |
527 | inline_transform (struct cgraph_node *node) | |
528 | { | |
529 | unsigned int todo = 0; | |
dc3d1030 | 530 | struct cgraph_edge *e, *next; |
d1f68cd8 | 531 | bool has_inline = false; |
a522e9eb | 532 | |
8cbc43ff | 533 | /* FIXME: Currently the pass manager is adding inline transform more than |
534 | once to some clones. This needs revisiting after WPA cleanups. */ | |
535 | if (cfun->after_inlining) | |
536 | return 0; | |
537 | ||
538 | /* We might need the body of this function so that we can expand | |
539 | it inline somewhere else. */ | |
da5e1e7c | 540 | if (preserve_function_body_p (node)) |
8cbc43ff | 541 | save_inline_function_body (node); |
542 | ||
dc3d1030 | 543 | for (e = node->callees; e; e = next) |
544 | { | |
d1f68cd8 | 545 | if (!e->inline_failed) |
546 | has_inline = true; | |
dc3d1030 | 547 | next = e->next_callee; |
35ee1c66 | 548 | e->redirect_call_stmt_to_callee (); |
dc3d1030 | 549 | } |
51ce5652 | 550 | node->remove_all_references (); |
a522e9eb | 551 | |
552 | timevar_push (TV_INTEGRATION); | |
a6d60179 | 553 | if (node->callees && (opt_for_fn (node->decl, optimize) || has_inline)) |
4b0e0420 | 554 | todo = optimize_inline_calls (current_function_decl); |
a522e9eb | 555 | timevar_pop (TV_INTEGRATION); |
556 | ||
e723655c | 557 | cfun->always_inline_functions_inlined = true; |
558 | cfun->after_inlining = true; | |
559 | todo |= execute_fixup_cfg (); | |
560 | ||
4b0e0420 | 561 | if (!(todo & TODO_update_ssa_any)) |
562 | /* Redirecting edges might lead to a need for vops to be recomputed. */ | |
563 | todo |= TODO_update_ssa_only_virtuals; | |
564 | ||
e723655c | 565 | return todo; |
8cbc43ff | 566 | } |