]>
Commit | Line | Data |
---|---|---|
564fe867 | 1 | /* Callgraph clones |
23a5b65a | 2 | Copyright (C) 2003-2014 Free Software Foundation, Inc. |
564fe867 JH |
3 | Contributed by Jan Hubicka |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it under | |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 3, or (at your option) any later | |
10 | version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING3. If not see | |
19 | <http://www.gnu.org/licenses/>. */ | |
20 | ||
21 | /* This module provide facilities for clonning functions. I.e. creating | |
22 | new functions based on existing functions with simple modifications, | |
23 | such as replacement of parameters. | |
24 | ||
25 | To allow whole program optimization without actual presence of function | |
26 | bodies, an additional infrastructure is provided for so-called virtual | |
27 | clones | |
28 | ||
29 | A virtual clone in the callgraph is a function that has no | |
30 | associated body, just a description of how to create its body based | |
31 | on a different function (which itself may be a virtual clone). | |
32 | ||
33 | The description of function modifications includes adjustments to | |
34 | the function's signature (which allows, for example, removing or | |
35 | adding function arguments), substitutions to perform on the | |
36 | function body, and, for inlined functions, a pointer to the | |
37 | function that it will be inlined into. | |
38 | ||
39 | It is also possible to redirect any edge of the callgraph from a | |
40 | function to its virtual clone. This implies updating of the call | |
41 | site to adjust for the new function signature. | |
42 | ||
43 | Most of the transformations performed by inter-procedural | |
44 | optimizations can be represented via virtual clones. For | |
45 | instance, a constant propagation pass can produce a virtual clone | |
46 | of the function which replaces one of its arguments by a | |
47 | constant. The inliner can represent its decisions by producing a | |
48 | clone of a function whose body will be later integrated into | |
49 | a given function. | |
50 | ||
51 | Using virtual clones, the program can be easily updated | |
52 | during the Execute stage, solving most of pass interactions | |
53 | problems that would otherwise occur during Transform. | |
54 | ||
55 | Virtual clones are later materialized in the LTRANS stage and | |
56 | turned into real functions. Passes executed after the virtual | |
57 | clone were introduced also perform their Transform stage | |
58 | on new functions, so for a pass there is no significant | |
59 | difference between operating on a real function or a virtual | |
60 | clone introduced before its Execute stage. | |
61 | ||
62 | Optimization passes then work on virtual clones introduced before | |
63 | their Execute stage as if they were real functions. The | |
64 | only difference is that clones are not visible during the | |
65 | Generate Summary stage. */ | |
66 | ||
67 | #include "config.h" | |
68 | #include "system.h" | |
69 | #include "coretypes.h" | |
70 | #include "tm.h" | |
d8a2d370 | 71 | #include "rtl.h" |
564fe867 | 72 | #include "tree.h" |
d8a2d370 DN |
73 | #include "stringpool.h" |
74 | #include "function.h" | |
75 | #include "emit-rtl.h" | |
2fb9a547 AM |
76 | #include "basic-block.h" |
77 | #include "tree-ssa-alias.h" | |
78 | #include "internal-fn.h" | |
79 | #include "tree-eh.h" | |
80 | #include "gimple-expr.h" | |
81 | #include "is-a.h" | |
8e9055ae | 82 | #include "gimple.h" |
442b4905 AM |
83 | #include "bitmap.h" |
84 | #include "tree-cfg.h" | |
564fe867 JH |
85 | #include "tree-inline.h" |
86 | #include "langhooks.h" | |
564fe867 JH |
87 | #include "toplev.h" |
88 | #include "flags.h" | |
564fe867 JH |
89 | #include "debug.h" |
90 | #include "target.h" | |
564fe867 | 91 | #include "diagnostic.h" |
564fe867 | 92 | #include "params.h" |
564fe867 JH |
93 | #include "intl.h" |
94 | #include "function.h" | |
95 | #include "ipa-prop.h" | |
564fe867 | 96 | #include "tree-iterator.h" |
564fe867 JH |
97 | #include "tree-dump.h" |
98 | #include "gimple-pretty-print.h" | |
564fe867 | 99 | #include "coverage.h" |
564fe867 JH |
100 | #include "ipa-inline.h" |
101 | #include "ipa-utils.h" | |
102 | #include "lto-streamer.h" | |
103 | #include "except.h" | |
104 | ||
105 | /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */ | |
106 | struct cgraph_edge * | |
107 | cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n, | |
108 | gimple call_stmt, unsigned stmt_uid, gcov_type count_scale, | |
109 | int freq_scale, bool update_original) | |
110 | { | |
111 | struct cgraph_edge *new_edge; | |
8b47039c | 112 | gcov_type count = apply_probability (e->count, count_scale); |
564fe867 JH |
113 | gcov_type freq; |
114 | ||
115 | /* We do not want to ignore loop nest after frequency drops to 0. */ | |
116 | if (!freq_scale) | |
117 | freq_scale = 1; | |
118 | freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE; | |
119 | if (freq > CGRAPH_FREQ_MAX) | |
120 | freq = CGRAPH_FREQ_MAX; | |
121 | ||
122 | if (e->indirect_unknown_callee) | |
123 | { | |
124 | tree decl; | |
125 | ||
e57872ee JH |
126 | if (call_stmt && (decl = gimple_call_fndecl (call_stmt)) |
127 | /* When the call is speculative, we need to resolve it | |
128 | via cgraph_resolve_speculation and not here. */ | |
129 | && !e->speculative) | |
564fe867 JH |
130 | { |
131 | struct cgraph_node *callee = cgraph_get_node (decl); | |
132 | gcc_checking_assert (callee); | |
133 | new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq); | |
134 | } | |
135 | else | |
136 | { | |
137 | new_edge = cgraph_create_indirect_edge (n, call_stmt, | |
138 | e->indirect_info->ecf_flags, | |
139 | count, freq); | |
140 | *new_edge->indirect_info = *e->indirect_info; | |
141 | } | |
142 | } | |
143 | else | |
144 | { | |
145 | new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq); | |
146 | if (e->indirect_info) | |
147 | { | |
148 | new_edge->indirect_info | |
766090c2 | 149 | = ggc_cleared_alloc<cgraph_indirect_call_info> (); |
564fe867 JH |
150 | *new_edge->indirect_info = *e->indirect_info; |
151 | } | |
152 | } | |
153 | ||
154 | new_edge->inline_failed = e->inline_failed; | |
155 | new_edge->indirect_inlining_edge = e->indirect_inlining_edge; | |
156 | new_edge->lto_stmt_uid = stmt_uid; | |
157 | /* Clone flags that depend on call_stmt availability manually. */ | |
158 | new_edge->can_throw_external = e->can_throw_external; | |
159 | new_edge->call_stmt_cannot_inline_p = e->call_stmt_cannot_inline_p; | |
042ae7d2 | 160 | new_edge->speculative = e->speculative; |
564fe867 JH |
161 | if (update_original) |
162 | { | |
163 | e->count -= new_edge->count; | |
164 | if (e->count < 0) | |
165 | e->count = 0; | |
166 | } | |
167 | cgraph_call_edge_duplication_hooks (e, new_edge); | |
168 | return new_edge; | |
169 | } | |
170 | ||
610c8ef0 MJ |
171 | /* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP and the |
172 | return value if SKIP_RETURN is true. */ | |
173 | ||
174 | static tree | |
175 | build_function_type_skip_args (tree orig_type, bitmap args_to_skip, | |
176 | bool skip_return) | |
177 | { | |
178 | tree new_type = NULL; | |
179 | tree args, new_args = NULL, t; | |
180 | tree new_reversed; | |
181 | int i = 0; | |
182 | ||
183 | for (args = TYPE_ARG_TYPES (orig_type); args && args != void_list_node; | |
184 | args = TREE_CHAIN (args), i++) | |
185 | if (!args_to_skip || !bitmap_bit_p (args_to_skip, i)) | |
186 | new_args = tree_cons (NULL_TREE, TREE_VALUE (args), new_args); | |
187 | ||
188 | new_reversed = nreverse (new_args); | |
189 | if (args) | |
190 | { | |
191 | if (new_reversed) | |
192 | TREE_CHAIN (new_args) = void_list_node; | |
193 | else | |
194 | new_reversed = void_list_node; | |
195 | } | |
196 | ||
197 | /* Use copy_node to preserve as much as possible from original type | |
198 | (debug info, attribute lists etc.) | |
199 | Exception is METHOD_TYPEs must have THIS argument. | |
200 | When we are asked to remove it, we need to build new FUNCTION_TYPE | |
201 | instead. */ | |
202 | if (TREE_CODE (orig_type) != METHOD_TYPE | |
203 | || !args_to_skip | |
204 | || !bitmap_bit_p (args_to_skip, 0)) | |
205 | { | |
206 | new_type = build_distinct_type_copy (orig_type); | |
207 | TYPE_ARG_TYPES (new_type) = new_reversed; | |
208 | } | |
209 | else | |
210 | { | |
211 | new_type | |
212 | = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type), | |
213 | new_reversed)); | |
214 | TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type); | |
215 | } | |
216 | ||
217 | if (skip_return) | |
218 | TREE_TYPE (new_type) = void_type_node; | |
219 | ||
220 | /* This is a new type, not a copy of an old type. Need to reassociate | |
221 | variants. We can handle everything except the main variant lazily. */ | |
222 | t = TYPE_MAIN_VARIANT (orig_type); | |
223 | if (t != orig_type) | |
224 | { | |
225 | t = build_function_type_skip_args (t, args_to_skip, skip_return); | |
226 | TYPE_MAIN_VARIANT (new_type) = t; | |
227 | TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t); | |
228 | TYPE_NEXT_VARIANT (t) = new_type; | |
229 | } | |
230 | else | |
231 | { | |
232 | TYPE_MAIN_VARIANT (new_type) = new_type; | |
233 | TYPE_NEXT_VARIANT (new_type) = NULL; | |
234 | } | |
235 | ||
236 | return new_type; | |
237 | } | |
238 | ||
239 | /* Build variant of function decl ORIG_DECL skipping ARGS_TO_SKIP and the | |
240 | return value if SKIP_RETURN is true. | |
241 | ||
242 | Arguments from DECL_ARGUMENTS list can't be removed now, since they are | |
243 | linked by TREE_CHAIN directly. The caller is responsible for eliminating | |
244 | them when they are being duplicated (i.e. copy_arguments_for_versioning). */ | |
245 | ||
246 | static tree | |
247 | build_function_decl_skip_args (tree orig_decl, bitmap args_to_skip, | |
248 | bool skip_return) | |
249 | { | |
250 | tree new_decl = copy_node (orig_decl); | |
251 | tree new_type; | |
252 | ||
253 | new_type = TREE_TYPE (orig_decl); | |
254 | if (prototype_p (new_type) | |
255 | || (skip_return && !VOID_TYPE_P (TREE_TYPE (new_type)))) | |
256 | new_type | |
257 | = build_function_type_skip_args (new_type, args_to_skip, skip_return); | |
258 | TREE_TYPE (new_decl) = new_type; | |
259 | ||
260 | /* For declarations setting DECL_VINDEX (i.e. methods) | |
261 | we expect first argument to be THIS pointer. */ | |
262 | if (args_to_skip && bitmap_bit_p (args_to_skip, 0)) | |
263 | DECL_VINDEX (new_decl) = NULL_TREE; | |
264 | ||
265 | /* When signature changes, we need to clear builtin info. */ | |
266 | if (DECL_BUILT_IN (new_decl) | |
267 | && args_to_skip | |
268 | && !bitmap_empty_p (args_to_skip)) | |
269 | { | |
270 | DECL_BUILT_IN_CLASS (new_decl) = NOT_BUILT_IN; | |
271 | DECL_FUNCTION_CODE (new_decl) = (enum built_in_function) 0; | |
272 | } | |
273 | /* The FE might have information and assumptions about the other | |
274 | arguments. */ | |
275 | DECL_LANG_SPECIFIC (new_decl) = NULL; | |
276 | return new_decl; | |
277 | } | |
278 | ||
279 | /* Set flags of NEW_NODE and its decl. NEW_NODE is a newly created private | |
280 | clone or its thunk. */ | |
281 | ||
282 | static void | |
283 | set_new_clone_decl_and_node_flags (cgraph_node *new_node) | |
284 | { | |
285 | DECL_EXTERNAL (new_node->decl) = 0; | |
610c8ef0 MJ |
286 | TREE_PUBLIC (new_node->decl) = 0; |
287 | DECL_COMDAT (new_node->decl) = 0; | |
288 | DECL_WEAK (new_node->decl) = 0; | |
289 | DECL_VIRTUAL_P (new_node->decl) = 0; | |
290 | DECL_STATIC_CONSTRUCTOR (new_node->decl) = 0; | |
291 | DECL_STATIC_DESTRUCTOR (new_node->decl) = 0; | |
292 | ||
293 | new_node->externally_visible = 0; | |
294 | new_node->local.local = 1; | |
295 | new_node->lowered = true; | |
296 | } | |
297 | ||
298 | /* Duplicate thunk THUNK if necessary but make it to refer to NODE. | |
299 | ARGS_TO_SKIP, if non-NULL, determines which parameters should be omitted. | |
300 | Function can return NODE if no thunk is necessary, which can happen when | |
301 | thunk is this_adjusting but we are removing this parameter. */ | |
302 | ||
303 | static cgraph_node * | |
304 | duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node, | |
305 | bitmap args_to_skip) | |
306 | { | |
307 | cgraph_node *new_thunk, *thunk_of; | |
308 | thunk_of = cgraph_function_or_thunk_node (thunk->callees->callee); | |
309 | ||
310 | if (thunk_of->thunk.thunk_p) | |
311 | node = duplicate_thunk_for_node (thunk_of, node, args_to_skip); | |
312 | ||
313 | struct cgraph_edge *cs; | |
314 | for (cs = node->callers; cs; cs = cs->next_caller) | |
315 | if (cs->caller->thunk.thunk_p | |
316 | && cs->caller->thunk.this_adjusting == thunk->thunk.this_adjusting | |
317 | && cs->caller->thunk.fixed_offset == thunk->thunk.fixed_offset | |
318 | && cs->caller->thunk.virtual_offset_p == thunk->thunk.virtual_offset_p | |
319 | && cs->caller->thunk.virtual_value == thunk->thunk.virtual_value) | |
320 | return cs->caller; | |
321 | ||
322 | tree new_decl; | |
323 | if (!args_to_skip) | |
324 | new_decl = copy_node (thunk->decl); | |
325 | else | |
326 | { | |
327 | /* We do not need to duplicate this_adjusting thunks if we have removed | |
328 | this. */ | |
329 | if (thunk->thunk.this_adjusting | |
330 | && bitmap_bit_p (args_to_skip, 0)) | |
331 | return node; | |
332 | ||
333 | new_decl = build_function_decl_skip_args (thunk->decl, args_to_skip, | |
334 | false); | |
335 | } | |
336 | gcc_checking_assert (!DECL_STRUCT_FUNCTION (new_decl)); | |
337 | gcc_checking_assert (!DECL_INITIAL (new_decl)); | |
338 | gcc_checking_assert (!DECL_RESULT (new_decl)); | |
339 | gcc_checking_assert (!DECL_RTL_SET_P (new_decl)); | |
340 | ||
341 | DECL_NAME (new_decl) = clone_function_name (thunk->decl, "artificial_thunk"); | |
342 | SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl)); | |
610c8ef0 MJ |
343 | |
344 | new_thunk = cgraph_create_node (new_decl); | |
345 | set_new_clone_decl_and_node_flags (new_thunk); | |
346 | new_thunk->definition = true; | |
347 | new_thunk->thunk = thunk->thunk; | |
348 | new_thunk->unique_name = in_lto_p; | |
349 | new_thunk->former_clone_of = thunk->decl; | |
350 | ||
351 | struct cgraph_edge *e = cgraph_create_edge (new_thunk, node, NULL, 0, | |
352 | CGRAPH_FREQ_BASE); | |
353 | e->call_stmt_cannot_inline_p = true; | |
354 | cgraph_call_edge_duplication_hooks (thunk->callees, e); | |
d211e471 | 355 | if (!expand_thunk (new_thunk, false, false)) |
610c8ef0 MJ |
356 | new_thunk->analyzed = true; |
357 | cgraph_call_node_duplication_hooks (thunk, new_thunk); | |
358 | return new_thunk; | |
359 | } | |
360 | ||
361 | /* If E does not lead to a thunk, simply redirect it to N. Otherwise create | |
362 | one or more equivalent thunks for N and redirect E to the first in the | |
363 | chain. */ | |
364 | ||
365 | void | |
366 | redirect_edge_duplicating_thunks (struct cgraph_edge *e, struct cgraph_node *n, | |
367 | bitmap args_to_skip) | |
368 | { | |
369 | cgraph_node *orig_to = cgraph_function_or_thunk_node (e->callee); | |
370 | if (orig_to->thunk.thunk_p) | |
371 | n = duplicate_thunk_for_node (orig_to, n, args_to_skip); | |
372 | ||
373 | cgraph_redirect_edge_callee (e, n); | |
374 | } | |
564fe867 JH |
375 | |
376 | /* Create node representing clone of N executed COUNT times. Decrease | |
377 | the execution counts from original node too. | |
378 | The new clone will have decl set to DECL that may or may not be the same | |
379 | as decl of N. | |
380 | ||
381 | When UPDATE_ORIGINAL is true, the counts are subtracted from the original | |
382 | function's profile to reflect the fact that part of execution is handled | |
383 | by node. | |
384 | When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about | |
44a60244 MJ |
385 | the new clone. Otherwise the caller is responsible for doing so later. |
386 | ||
387 | If the new node is being inlined into another one, NEW_INLINED_TO should be | |
388 | the outline function the new one is (even indirectly) inlined to. All hooks | |
389 | will see this in node's global.inlined_to, when invoked. Can be NULL if the | |
390 | node is not inlined. */ | |
564fe867 JH |
391 | |
392 | struct cgraph_node * | |
393 | cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq, | |
394 | bool update_original, | |
9771b263 | 395 | vec<cgraph_edge_p> redirect_callers, |
44a60244 | 396 | bool call_duplication_hook, |
610c8ef0 MJ |
397 | struct cgraph_node *new_inlined_to, |
398 | bitmap args_to_skip) | |
564fe867 JH |
399 | { |
400 | struct cgraph_node *new_node = cgraph_create_empty_node (); | |
401 | struct cgraph_edge *e; | |
402 | gcov_type count_scale; | |
403 | unsigned i; | |
404 | ||
67348ccc DM |
405 | new_node->decl = decl; |
406 | symtab_register_node (new_node); | |
564fe867 | 407 | new_node->origin = n->origin; |
67348ccc | 408 | new_node->lto_file_data = n->lto_file_data; |
564fe867 JH |
409 | if (new_node->origin) |
410 | { | |
411 | new_node->next_nested = new_node->origin->nested; | |
412 | new_node->origin->nested = new_node; | |
413 | } | |
67348ccc DM |
414 | new_node->analyzed = n->analyzed; |
415 | new_node->definition = n->definition; | |
564fe867 | 416 | new_node->local = n->local; |
67348ccc | 417 | new_node->externally_visible = false; |
564fe867 JH |
418 | new_node->local.local = true; |
419 | new_node->global = n->global; | |
44a60244 | 420 | new_node->global.inlined_to = new_inlined_to; |
564fe867 JH |
421 | new_node->rtl = n->rtl; |
422 | new_node->count = count; | |
423 | new_node->frequency = n->frequency; | |
424 | new_node->clone = n->clone; | |
9771b263 | 425 | new_node->clone.tree_map = NULL; |
86ce5d2f | 426 | new_node->tp_first_run = n->tp_first_run; |
564fe867 JH |
427 | if (n->count) |
428 | { | |
429 | if (new_node->count > n->count) | |
430 | count_scale = REG_BR_PROB_BASE; | |
431 | else | |
8b47039c | 432 | count_scale = GCOV_COMPUTE_SCALE (new_node->count, n->count); |
564fe867 JH |
433 | } |
434 | else | |
435 | count_scale = 0; | |
436 | if (update_original) | |
437 | { | |
438 | n->count -= count; | |
439 | if (n->count < 0) | |
440 | n->count = 0; | |
441 | } | |
442 | ||
9771b263 | 443 | FOR_EACH_VEC_ELT (redirect_callers, i, e) |
564fe867 JH |
444 | { |
445 | /* Redirect calls to the old version node to point to its new | |
9de6f6c3 JH |
446 | version. The only exception is when the edge was proved to |
447 | be unreachable during the clonning procedure. */ | |
448 | if (!e->callee | |
449 | || DECL_BUILT_IN_CLASS (e->callee->decl) != BUILT_IN_NORMAL | |
450 | || DECL_FUNCTION_CODE (e->callee->decl) != BUILT_IN_UNREACHABLE) | |
610c8ef0 | 451 | redirect_edge_duplicating_thunks (e, new_node, args_to_skip); |
564fe867 JH |
452 | } |
453 | ||
454 | ||
455 | for (e = n->callees;e; e=e->next_callee) | |
456 | cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid, | |
457 | count_scale, freq, update_original); | |
458 | ||
459 | for (e = n->indirect_calls; e; e = e->next_callee) | |
460 | cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid, | |
461 | count_scale, freq, update_original); | |
d122681a | 462 | new_node->clone_references (n); |
564fe867 JH |
463 | |
464 | new_node->next_sibling_clone = n->clones; | |
465 | if (n->clones) | |
466 | n->clones->prev_sibling_clone = new_node; | |
467 | n->clones = new_node; | |
468 | new_node->clone_of = n; | |
469 | ||
470 | if (call_duplication_hook) | |
471 | cgraph_call_node_duplication_hooks (n, new_node); | |
472 | return new_node; | |
473 | } | |
474 | ||
440a5082 | 475 | /* Return a new assembler name for a clone of DECL with SUFFIX. */ |
564fe867 JH |
476 | |
477 | static GTY(()) unsigned int clone_fn_id_num; | |
478 | ||
479 | tree | |
480 | clone_function_name (tree decl, const char *suffix) | |
481 | { | |
482 | tree name = DECL_ASSEMBLER_NAME (decl); | |
483 | size_t len = IDENTIFIER_LENGTH (name); | |
484 | char *tmp_name, *prefix; | |
485 | ||
486 | prefix = XALLOCAVEC (char, len + strlen (suffix) + 2); | |
487 | memcpy (prefix, IDENTIFIER_POINTER (name), len); | |
488 | strcpy (prefix + len + 1, suffix); | |
489 | #ifndef NO_DOT_IN_LABEL | |
490 | prefix[len] = '.'; | |
491 | #elif !defined NO_DOLLAR_IN_LABEL | |
492 | prefix[len] = '$'; | |
493 | #else | |
494 | prefix[len] = '_'; | |
495 | #endif | |
496 | ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++); | |
497 | return get_identifier (tmp_name); | |
498 | } | |
499 | ||
500 | /* Create callgraph node clone with new declaration. The actual body will | |
501 | be copied later at compilation stage. | |
502 | ||
503 | TODO: after merging in ipa-sra use function call notes instead of args_to_skip | |
504 | bitmap interface. | |
505 | */ | |
506 | struct cgraph_node * | |
507 | cgraph_create_virtual_clone (struct cgraph_node *old_node, | |
9771b263 DN |
508 | vec<cgraph_edge_p> redirect_callers, |
509 | vec<ipa_replace_map_p, va_gc> *tree_map, | |
564fe867 JH |
510 | bitmap args_to_skip, |
511 | const char * suffix) | |
512 | { | |
67348ccc | 513 | tree old_decl = old_node->decl; |
564fe867 JH |
514 | struct cgraph_node *new_node = NULL; |
515 | tree new_decl; | |
440a5082 | 516 | size_t len, i; |
564fe867 | 517 | struct ipa_replace_map *map; |
440a5082 | 518 | char *name; |
564fe867 | 519 | |
a2e2a668 | 520 | if (!in_lto_p) |
564fe867 JH |
521 | gcc_checking_assert (tree_versionable_function_p (old_decl)); |
522 | ||
523 | gcc_assert (old_node->local.can_change_signature || !args_to_skip); | |
524 | ||
525 | /* Make a new FUNCTION_DECL tree node */ | |
526 | if (!args_to_skip) | |
527 | new_decl = copy_node (old_decl); | |
528 | else | |
529 | new_decl = build_function_decl_skip_args (old_decl, args_to_skip, false); | |
49bde175 JH |
530 | |
531 | /* These pointers represent function body and will be populated only when clone | |
532 | is materialized. */ | |
533 | gcc_assert (new_decl != old_decl); | |
564fe867 | 534 | DECL_STRUCT_FUNCTION (new_decl) = NULL; |
49bde175 JH |
535 | DECL_ARGUMENTS (new_decl) = NULL; |
536 | DECL_INITIAL (new_decl) = NULL; | |
537 | DECL_RESULT (new_decl) = NULL; | |
538 | /* We can not do DECL_RESULT (new_decl) = NULL; here because of LTO partitioning | |
539 | sometimes storing only clone decl instead of original. */ | |
564fe867 JH |
540 | |
541 | /* Generate a new name for the new version. */ | |
440a5082 EB |
542 | len = IDENTIFIER_LENGTH (DECL_NAME (old_decl)); |
543 | name = XALLOCAVEC (char, len + strlen (suffix) + 2); | |
544 | memcpy (name, IDENTIFIER_POINTER (DECL_NAME (old_decl)), len); | |
545 | strcpy (name + len + 1, suffix); | |
546 | name[len] = '.'; | |
547 | DECL_NAME (new_decl) = get_identifier (name); | |
548 | SET_DECL_ASSEMBLER_NAME (new_decl, clone_function_name (old_decl, suffix)); | |
564fe867 JH |
549 | SET_DECL_RTL (new_decl, NULL); |
550 | ||
551 | new_node = cgraph_clone_node (old_node, new_decl, old_node->count, | |
552 | CGRAPH_FREQ_BASE, false, | |
610c8ef0 | 553 | redirect_callers, false, NULL, args_to_skip); |
564fe867 JH |
554 | /* Update the properties. |
555 | Make clone visible only within this translation unit. Make sure | |
556 | that is not weak also. | |
557 | ??? We cannot use COMDAT linkage because there is no | |
558 | ABI support for this. */ | |
610c8ef0 | 559 | set_new_clone_decl_and_node_flags (new_node); |
564fe867 JH |
560 | new_node->clone.tree_map = tree_map; |
561 | new_node->clone.args_to_skip = args_to_skip; | |
702d8703 JH |
562 | |
563 | /* Clones of global symbols or symbols with unique names are unique. */ | |
564 | if ((TREE_PUBLIC (old_decl) | |
565 | && !DECL_EXTERNAL (old_decl) | |
566 | && !DECL_WEAK (old_decl) | |
567 | && !DECL_COMDAT (old_decl)) | |
568 | || in_lto_p) | |
67348ccc | 569 | new_node->unique_name = true; |
9771b263 | 570 | FOR_EACH_VEC_SAFE_ELT (tree_map, i, map) |
d122681a | 571 | new_node->maybe_add_reference (map->new_tree, IPA_REF_ADDR, NULL); |
564fe867 JH |
572 | if (!args_to_skip) |
573 | new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip; | |
574 | else if (old_node->clone.combined_args_to_skip) | |
575 | { | |
576 | int newi = 0, oldi = 0; | |
577 | tree arg; | |
578 | bitmap new_args_to_skip = BITMAP_GGC_ALLOC (); | |
579 | struct cgraph_node *orig_node; | |
580 | for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of) | |
581 | ; | |
67348ccc | 582 | for (arg = DECL_ARGUMENTS (orig_node->decl); |
564fe867 JH |
583 | arg; arg = DECL_CHAIN (arg), oldi++) |
584 | { | |
585 | if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi)) | |
586 | { | |
587 | bitmap_set_bit (new_args_to_skip, oldi); | |
588 | continue; | |
589 | } | |
590 | if (bitmap_bit_p (args_to_skip, newi)) | |
591 | bitmap_set_bit (new_args_to_skip, oldi); | |
592 | newi++; | |
593 | } | |
594 | new_node->clone.combined_args_to_skip = new_args_to_skip; | |
595 | } | |
596 | else | |
597 | new_node->clone.combined_args_to_skip = args_to_skip; | |
ca860d03 MJ |
598 | if (old_node->ipa_transforms_to_apply.exists ()) |
599 | new_node->ipa_transforms_to_apply | |
600 | = old_node->ipa_transforms_to_apply.copy (); | |
564fe867 JH |
601 | |
602 | cgraph_call_node_duplication_hooks (old_node, new_node); | |
603 | ||
604 | ||
605 | return new_node; | |
606 | } | |
607 | ||
608 | /* NODE is being removed from symbol table; see if its entry can be replaced by | |
609 | other inline clone. */ | |
610 | struct cgraph_node * | |
611 | cgraph_find_replacement_node (struct cgraph_node *node) | |
612 | { | |
613 | struct cgraph_node *next_inline_clone, *replacement; | |
614 | ||
615 | for (next_inline_clone = node->clones; | |
616 | next_inline_clone | |
67348ccc | 617 | && next_inline_clone->decl != node->decl; |
564fe867 JH |
618 | next_inline_clone = next_inline_clone->next_sibling_clone) |
619 | ; | |
620 | ||
621 | /* If there is inline clone of the node being removed, we need | |
622 | to put it into the position of removed node and reorganize all | |
623 | other clones to be based on it. */ | |
624 | if (next_inline_clone) | |
625 | { | |
626 | struct cgraph_node *n; | |
627 | struct cgraph_node *new_clones; | |
628 | ||
629 | replacement = next_inline_clone; | |
630 | ||
631 | /* Unlink inline clone from the list of clones of removed node. */ | |
632 | if (next_inline_clone->next_sibling_clone) | |
633 | next_inline_clone->next_sibling_clone->prev_sibling_clone | |
634 | = next_inline_clone->prev_sibling_clone; | |
635 | if (next_inline_clone->prev_sibling_clone) | |
636 | { | |
637 | gcc_assert (node->clones != next_inline_clone); | |
638 | next_inline_clone->prev_sibling_clone->next_sibling_clone | |
639 | = next_inline_clone->next_sibling_clone; | |
640 | } | |
641 | else | |
642 | { | |
643 | gcc_assert (node->clones == next_inline_clone); | |
644 | node->clones = next_inline_clone->next_sibling_clone; | |
645 | } | |
646 | ||
647 | new_clones = node->clones; | |
648 | node->clones = NULL; | |
649 | ||
650 | /* Copy clone info. */ | |
651 | next_inline_clone->clone = node->clone; | |
652 | ||
653 | /* Now place it into clone tree at same level at NODE. */ | |
654 | next_inline_clone->clone_of = node->clone_of; | |
655 | next_inline_clone->prev_sibling_clone = NULL; | |
656 | next_inline_clone->next_sibling_clone = NULL; | |
657 | if (node->clone_of) | |
658 | { | |
659 | if (node->clone_of->clones) | |
660 | node->clone_of->clones->prev_sibling_clone = next_inline_clone; | |
661 | next_inline_clone->next_sibling_clone = node->clone_of->clones; | |
662 | node->clone_of->clones = next_inline_clone; | |
663 | } | |
664 | ||
665 | /* Merge the clone list. */ | |
666 | if (new_clones) | |
667 | { | |
668 | if (!next_inline_clone->clones) | |
669 | next_inline_clone->clones = new_clones; | |
670 | else | |
671 | { | |
672 | n = next_inline_clone->clones; | |
673 | while (n->next_sibling_clone) | |
674 | n = n->next_sibling_clone; | |
675 | n->next_sibling_clone = new_clones; | |
676 | new_clones->prev_sibling_clone = n; | |
677 | } | |
678 | } | |
679 | ||
680 | /* Update clone_of pointers. */ | |
681 | n = new_clones; | |
682 | while (n) | |
683 | { | |
684 | n->clone_of = next_inline_clone; | |
685 | n = n->next_sibling_clone; | |
686 | } | |
687 | return replacement; | |
688 | } | |
689 | else | |
690 | return NULL; | |
691 | } | |
692 | ||
693 | /* Like cgraph_set_call_stmt but walk the clone tree and update all | |
042ae7d2 JH |
694 | clones sharing the same function body. |
695 | When WHOLE_SPECULATIVE_EDGES is true, all three components of | |
696 | speculative edge gets updated. Otherwise we update only direct | |
697 | call. */ | |
564fe867 JH |
698 | |
699 | void | |
700 | cgraph_set_call_stmt_including_clones (struct cgraph_node *orig, | |
042ae7d2 JH |
701 | gimple old_stmt, gimple new_stmt, |
702 | bool update_speculative) | |
564fe867 JH |
703 | { |
704 | struct cgraph_node *node; | |
705 | struct cgraph_edge *edge = cgraph_edge (orig, old_stmt); | |
706 | ||
707 | if (edge) | |
042ae7d2 | 708 | cgraph_set_call_stmt (edge, new_stmt, update_speculative); |
564fe867 JH |
709 | |
710 | node = orig->clones; | |
711 | if (node) | |
712 | while (node != orig) | |
713 | { | |
714 | struct cgraph_edge *edge = cgraph_edge (node, old_stmt); | |
715 | if (edge) | |
042ae7d2 JH |
716 | { |
717 | cgraph_set_call_stmt (edge, new_stmt, update_speculative); | |
718 | /* If UPDATE_SPECULATIVE is false, it means that we are turning | |
719 | speculative call into a real code sequence. Update the | |
720 | callgraph edges. */ | |
721 | if (edge->speculative && !update_speculative) | |
722 | { | |
723 | struct cgraph_edge *direct, *indirect; | |
724 | struct ipa_ref *ref; | |
725 | ||
726 | gcc_assert (!edge->indirect_unknown_callee); | |
727 | cgraph_speculative_call_info (edge, direct, indirect, ref); | |
728 | direct->speculative = false; | |
729 | indirect->speculative = false; | |
730 | ref->speculative = false; | |
731 | } | |
732 | } | |
564fe867 JH |
733 | if (node->clones) |
734 | node = node->clones; | |
735 | else if (node->next_sibling_clone) | |
736 | node = node->next_sibling_clone; | |
737 | else | |
738 | { | |
739 | while (node != orig && !node->next_sibling_clone) | |
740 | node = node->clone_of; | |
741 | if (node != orig) | |
742 | node = node->next_sibling_clone; | |
743 | } | |
744 | } | |
745 | } | |
746 | ||
747 | /* Like cgraph_create_edge walk the clone tree and update all clones sharing | |
748 | same function body. If clones already have edge for OLD_STMT; only | |
749 | update the edge same way as cgraph_set_call_stmt_including_clones does. | |
750 | ||
751 | TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative | |
752 | frequencies of the clones. */ | |
753 | ||
754 | void | |
755 | cgraph_create_edge_including_clones (struct cgraph_node *orig, | |
756 | struct cgraph_node *callee, | |
757 | gimple old_stmt, | |
758 | gimple stmt, gcov_type count, | |
759 | int freq, | |
760 | cgraph_inline_failed_t reason) | |
761 | { | |
762 | struct cgraph_node *node; | |
763 | struct cgraph_edge *edge; | |
764 | ||
765 | if (!cgraph_edge (orig, stmt)) | |
766 | { | |
767 | edge = cgraph_create_edge (orig, callee, stmt, count, freq); | |
768 | edge->inline_failed = reason; | |
769 | } | |
770 | ||
771 | node = orig->clones; | |
772 | if (node) | |
773 | while (node != orig) | |
774 | { | |
775 | struct cgraph_edge *edge = cgraph_edge (node, old_stmt); | |
776 | ||
777 | /* It is possible that clones already contain the edge while | |
778 | master didn't. Either we promoted indirect call into direct | |
779 | call in the clone or we are processing clones of unreachable | |
780 | master where edges has been removed. */ | |
781 | if (edge) | |
782 | cgraph_set_call_stmt (edge, stmt); | |
783 | else if (!cgraph_edge (node, stmt)) | |
784 | { | |
785 | edge = cgraph_create_edge (node, callee, stmt, count, | |
786 | freq); | |
787 | edge->inline_failed = reason; | |
788 | } | |
789 | ||
790 | if (node->clones) | |
791 | node = node->clones; | |
792 | else if (node->next_sibling_clone) | |
793 | node = node->next_sibling_clone; | |
794 | else | |
795 | { | |
796 | while (node != orig && !node->next_sibling_clone) | |
797 | node = node->clone_of; | |
798 | if (node != orig) | |
799 | node = node->next_sibling_clone; | |
800 | } | |
801 | } | |
802 | } | |
803 | ||
804 | /* Remove the node from cgraph and all inline clones inlined into it. | |
805 | Skip however removal of FORBIDDEN_NODE and return true if it needs to be | |
806 | removed. This allows to call the function from outer loop walking clone | |
807 | tree. */ | |
808 | ||
809 | bool | |
810 | cgraph_remove_node_and_inline_clones (struct cgraph_node *node, struct cgraph_node *forbidden_node) | |
811 | { | |
812 | struct cgraph_edge *e, *next; | |
813 | bool found = false; | |
814 | ||
815 | if (node == forbidden_node) | |
39f9719e JH |
816 | { |
817 | cgraph_remove_edge (node->callers); | |
818 | return true; | |
819 | } | |
564fe867 JH |
820 | for (e = node->callees; e; e = next) |
821 | { | |
822 | next = e->next_callee; | |
823 | if (!e->inline_failed) | |
824 | found |= cgraph_remove_node_and_inline_clones (e->callee, forbidden_node); | |
825 | } | |
826 | cgraph_remove_node (node); | |
827 | return found; | |
828 | } | |
829 | ||
830 | /* The edges representing the callers of the NEW_VERSION node were | |
831 | fixed by cgraph_function_versioning (), now the call_expr in their | |
832 | respective tree code should be updated to call the NEW_VERSION. */ | |
833 | ||
834 | static void | |
835 | update_call_expr (struct cgraph_node *new_version) | |
836 | { | |
837 | struct cgraph_edge *e; | |
838 | ||
839 | gcc_assert (new_version); | |
840 | ||
841 | /* Update the call expr on the edges to call the new version. */ | |
842 | for (e = new_version->callers; e; e = e->next_caller) | |
843 | { | |
67348ccc DM |
844 | struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl); |
845 | gimple_call_set_fndecl (e->call_stmt, new_version->decl); | |
564fe867 JH |
846 | maybe_clean_eh_stmt_fn (inner_function, e->call_stmt); |
847 | } | |
848 | } | |
849 | ||
850 | ||
851 | /* Create a new cgraph node which is the new version of | |
852 | OLD_VERSION node. REDIRECT_CALLERS holds the callers | |
853 | edges which should be redirected to point to | |
854 | NEW_VERSION. ALL the callees edges of OLD_VERSION | |
855 | are cloned to the new version node. Return the new | |
856 | version node. | |
857 | ||
858 | If non-NULL BLOCK_TO_COPY determine what basic blocks | |
859 | was copied to prevent duplications of calls that are dead | |
860 | in the clone. */ | |
861 | ||
862 | struct cgraph_node * | |
863 | cgraph_copy_node_for_versioning (struct cgraph_node *old_version, | |
864 | tree new_decl, | |
9771b263 | 865 | vec<cgraph_edge_p> redirect_callers, |
564fe867 JH |
866 | bitmap bbs_to_copy) |
867 | { | |
868 | struct cgraph_node *new_version; | |
869 | struct cgraph_edge *e; | |
870 | unsigned i; | |
871 | ||
872 | gcc_assert (old_version); | |
873 | ||
874 | new_version = cgraph_create_node (new_decl); | |
875 | ||
67348ccc DM |
876 | new_version->analyzed = old_version->analyzed; |
877 | new_version->definition = old_version->definition; | |
564fe867 | 878 | new_version->local = old_version->local; |
67348ccc DM |
879 | new_version->externally_visible = false; |
880 | new_version->local.local = new_version->definition; | |
564fe867 JH |
881 | new_version->global = old_version->global; |
882 | new_version->rtl = old_version->rtl; | |
883 | new_version->count = old_version->count; | |
884 | ||
885 | for (e = old_version->callees; e; e=e->next_callee) | |
886 | if (!bbs_to_copy | |
887 | || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index)) | |
888 | cgraph_clone_edge (e, new_version, e->call_stmt, | |
889 | e->lto_stmt_uid, REG_BR_PROB_BASE, | |
890 | CGRAPH_FREQ_BASE, | |
891 | true); | |
892 | for (e = old_version->indirect_calls; e; e=e->next_callee) | |
893 | if (!bbs_to_copy | |
894 | || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index)) | |
895 | cgraph_clone_edge (e, new_version, e->call_stmt, | |
896 | e->lto_stmt_uid, REG_BR_PROB_BASE, | |
897 | CGRAPH_FREQ_BASE, | |
898 | true); | |
9771b263 | 899 | FOR_EACH_VEC_ELT (redirect_callers, i, e) |
564fe867 JH |
900 | { |
901 | /* Redirect calls to the old version node to point to its new | |
902 | version. */ | |
903 | cgraph_redirect_edge_callee (e, new_version); | |
904 | } | |
905 | ||
906 | cgraph_call_node_duplication_hooks (old_version, new_version); | |
907 | ||
908 | return new_version; | |
909 | } | |
910 | ||
911 | /* Perform function versioning. | |
912 | Function versioning includes copying of the tree and | |
913 | a callgraph update (creating a new cgraph node and updating | |
914 | its callees and callers). | |
915 | ||
916 | REDIRECT_CALLERS varray includes the edges to be redirected | |
917 | to the new version. | |
918 | ||
919 | TREE_MAP is a mapping of tree nodes we want to replace with | |
920 | new ones (according to results of prior analysis). | |
921 | OLD_VERSION_NODE is the node that is versioned. | |
922 | ||
923 | If non-NULL ARGS_TO_SKIP determine function parameters to remove | |
924 | from new version. | |
925 | If SKIP_RETURN is true, the new version will return void. | |
926 | If non-NULL BLOCK_TO_COPY determine what basic blocks to copy. | |
927 | If non_NULL NEW_ENTRY determine new entry BB of the clone. | |
928 | ||
929 | Return the new version's cgraph node. */ | |
930 | ||
931 | struct cgraph_node * | |
932 | cgraph_function_versioning (struct cgraph_node *old_version_node, | |
9771b263 DN |
933 | vec<cgraph_edge_p> redirect_callers, |
934 | vec<ipa_replace_map_p, va_gc> *tree_map, | |
564fe867 JH |
935 | bitmap args_to_skip, |
936 | bool skip_return, | |
937 | bitmap bbs_to_copy, | |
938 | basic_block new_entry_block, | |
939 | const char *clone_name) | |
940 | { | |
67348ccc | 941 | tree old_decl = old_version_node->decl; |
564fe867 JH |
942 | struct cgraph_node *new_version_node = NULL; |
943 | tree new_decl; | |
944 | ||
945 | if (!tree_versionable_function_p (old_decl)) | |
946 | return NULL; | |
947 | ||
948 | gcc_assert (old_version_node->local.can_change_signature || !args_to_skip); | |
949 | ||
950 | /* Make a new FUNCTION_DECL tree node for the new version. */ | |
951 | if (!args_to_skip && !skip_return) | |
952 | new_decl = copy_node (old_decl); | |
953 | else | |
954 | new_decl | |
955 | = build_function_decl_skip_args (old_decl, args_to_skip, skip_return); | |
956 | ||
957 | /* Generate a new name for the new version. */ | |
958 | DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name); | |
959 | SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl)); | |
960 | SET_DECL_RTL (new_decl, NULL); | |
961 | ||
962 | /* When the old decl was a con-/destructor make sure the clone isn't. */ | |
c3284718 RS |
963 | DECL_STATIC_CONSTRUCTOR (new_decl) = 0; |
964 | DECL_STATIC_DESTRUCTOR (new_decl) = 0; | |
564fe867 JH |
965 | |
966 | /* Create the new version's call-graph node. | |
967 | and update the edges of the new node. */ | |
968 | new_version_node = | |
969 | cgraph_copy_node_for_versioning (old_version_node, new_decl, | |
970 | redirect_callers, bbs_to_copy); | |
971 | ||
c6d43074 MJ |
972 | if (old_version_node->ipa_transforms_to_apply.exists ()) |
973 | new_version_node->ipa_transforms_to_apply | |
974 | = old_version_node->ipa_transforms_to_apply.copy (); | |
564fe867 JH |
975 | /* Copy the OLD_VERSION_NODE function tree to the new version. */ |
976 | tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip, | |
977 | skip_return, bbs_to_copy, new_entry_block); | |
978 | ||
979 | /* Update the new version's properties. | |
980 | Make The new version visible only within this translation unit. Make sure | |
981 | that is not weak also. | |
982 | ??? We cannot use COMDAT linkage because there is no | |
983 | ABI support for this. */ | |
67348ccc DM |
984 | symtab_make_decl_local (new_version_node->decl); |
985 | DECL_VIRTUAL_P (new_version_node->decl) = 0; | |
986 | new_version_node->externally_visible = 0; | |
564fe867 JH |
987 | new_version_node->local.local = 1; |
988 | new_version_node->lowered = true; | |
702d8703 JH |
989 | /* Clones of global symbols or symbols with unique names are unique. */ |
990 | if ((TREE_PUBLIC (old_decl) | |
991 | && !DECL_EXTERNAL (old_decl) | |
992 | && !DECL_WEAK (old_decl) | |
993 | && !DECL_COMDAT (old_decl)) | |
994 | || in_lto_p) | |
67348ccc | 995 | new_version_node->unique_name = true; |
564fe867 JH |
996 | |
997 | /* Update the call_expr on the edges to call the new version node. */ | |
998 | update_call_expr (new_version_node); | |
999 | ||
1000 | cgraph_call_function_insertion_hooks (new_version_node); | |
1001 | return new_version_node; | |
1002 | } | |
1003 | ||
1004 | /* Given virtual clone, turn it into actual clone. */ | |
1005 | ||
1006 | static void | |
1007 | cgraph_materialize_clone (struct cgraph_node *node) | |
1008 | { | |
1009 | bitmap_obstack_initialize (NULL); | |
67348ccc | 1010 | node->former_clone_of = node->clone_of->decl; |
564fe867 JH |
1011 | if (node->clone_of->former_clone_of) |
1012 | node->former_clone_of = node->clone_of->former_clone_of; | |
1013 | /* Copy the OLD_VERSION_NODE function tree to the new version. */ | |
67348ccc | 1014 | tree_function_versioning (node->clone_of->decl, node->decl, |
564fe867 JH |
1015 | node->clone.tree_map, true, |
1016 | node->clone.args_to_skip, false, | |
1017 | NULL, NULL); | |
1018 | if (cgraph_dump_file) | |
1019 | { | |
67348ccc DM |
1020 | dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags); |
1021 | dump_function_to_file (node->decl, cgraph_dump_file, dump_flags); | |
564fe867 JH |
1022 | } |
1023 | ||
1024 | /* Function is no longer clone. */ | |
1025 | if (node->next_sibling_clone) | |
1026 | node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone; | |
1027 | if (node->prev_sibling_clone) | |
1028 | node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone; | |
1029 | else | |
1030 | node->clone_of->clones = node->next_sibling_clone; | |
1031 | node->next_sibling_clone = NULL; | |
1032 | node->prev_sibling_clone = NULL; | |
67348ccc | 1033 | if (!node->clone_of->analyzed && !node->clone_of->clones) |
564fe867 JH |
1034 | { |
1035 | cgraph_release_function_body (node->clone_of); | |
1036 | cgraph_node_remove_callees (node->clone_of); | |
d122681a | 1037 | node->clone_of->remove_all_references (); |
564fe867 JH |
1038 | } |
1039 | node->clone_of = NULL; | |
1040 | bitmap_obstack_release (NULL); | |
1041 | } | |
1042 | ||
1043 | /* Once all functions from compilation unit are in memory, produce all clones | |
1044 | and update all calls. We might also do this on demand if we don't want to | |
1045 | bring all functions to memory prior compilation, but current WHOPR | |
1046 | implementation does that and it is is bit easier to keep everything right in | |
1047 | this order. */ | |
1048 | ||
1049 | void | |
1050 | cgraph_materialize_all_clones (void) | |
1051 | { | |
1052 | struct cgraph_node *node; | |
1053 | bool stabilized = false; | |
042ae7d2 | 1054 | |
564fe867 JH |
1055 | |
1056 | if (cgraph_dump_file) | |
1057 | fprintf (cgraph_dump_file, "Materializing clones\n"); | |
1058 | #ifdef ENABLE_CHECKING | |
1059 | verify_cgraph (); | |
1060 | #endif | |
1061 | ||
1062 | /* We can also do topological order, but number of iterations should be | |
1063 | bounded by number of IPA passes since single IPA pass is probably not | |
1064 | going to create clones of clones it created itself. */ | |
1065 | while (!stabilized) | |
1066 | { | |
1067 | stabilized = true; | |
1068 | FOR_EACH_FUNCTION (node) | |
1069 | { | |
67348ccc DM |
1070 | if (node->clone_of && node->decl != node->clone_of->decl |
1071 | && !gimple_has_body_p (node->decl)) | |
564fe867 | 1072 | { |
a2e2a668 JH |
1073 | if (!node->clone_of->clone_of) |
1074 | cgraph_get_body (node->clone_of); | |
67348ccc | 1075 | if (gimple_has_body_p (node->clone_of->decl)) |
564fe867 JH |
1076 | { |
1077 | if (cgraph_dump_file) | |
1078 | { | |
1079 | fprintf (cgraph_dump_file, "cloning %s to %s\n", | |
fec39fa6 TS |
1080 | xstrdup (node->clone_of->name ()), |
1081 | xstrdup (node->name ())); | |
564fe867 JH |
1082 | if (node->clone.tree_map) |
1083 | { | |
1084 | unsigned int i; | |
1085 | fprintf (cgraph_dump_file, " replace map: "); | |
9771b263 DN |
1086 | for (i = 0; |
1087 | i < vec_safe_length (node->clone.tree_map); | |
1088 | i++) | |
564fe867 JH |
1089 | { |
1090 | struct ipa_replace_map *replace_info; | |
9771b263 | 1091 | replace_info = (*node->clone.tree_map)[i]; |
564fe867 JH |
1092 | print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0); |
1093 | fprintf (cgraph_dump_file, " -> "); | |
1094 | print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0); | |
1095 | fprintf (cgraph_dump_file, "%s%s;", | |
1096 | replace_info->replace_p ? "(replace)":"", | |
1097 | replace_info->ref_p ? "(ref)":""); | |
1098 | } | |
1099 | fprintf (cgraph_dump_file, "\n"); | |
1100 | } | |
1101 | if (node->clone.args_to_skip) | |
1102 | { | |
1103 | fprintf (cgraph_dump_file, " args_to_skip: "); | |
1104 | dump_bitmap (cgraph_dump_file, node->clone.args_to_skip); | |
1105 | } | |
1106 | if (node->clone.args_to_skip) | |
1107 | { | |
1108 | fprintf (cgraph_dump_file, " combined_args_to_skip:"); | |
1109 | dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip); | |
1110 | } | |
1111 | } | |
1112 | cgraph_materialize_clone (node); | |
1113 | stabilized = false; | |
1114 | } | |
1115 | } | |
1116 | } | |
1117 | } | |
1118 | FOR_EACH_FUNCTION (node) | |
67348ccc | 1119 | if (!node->analyzed && node->callees) |
71cafea9 JH |
1120 | { |
1121 | cgraph_node_remove_callees (node); | |
d122681a | 1122 | node->remove_all_references (); |
71cafea9 JH |
1123 | } |
1124 | else | |
d122681a | 1125 | node->clear_stmts_in_references (); |
564fe867 JH |
1126 | if (cgraph_dump_file) |
1127 | fprintf (cgraph_dump_file, "Materialization Call site updates done.\n"); | |
1128 | #ifdef ENABLE_CHECKING | |
1129 | verify_cgraph (); | |
1130 | #endif | |
04142cc3 | 1131 | symtab_remove_unreachable_nodes (false, cgraph_dump_file); |
564fe867 JH |
1132 | } |
1133 | ||
1134 | #include "gt-cgraphclones.h" |