]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraphclones.c
[PATCH 7/9] ENABLE_CHECKING refactoring: middle-end, LTO FE
[thirdparty/gcc.git] / gcc / cgraphclones.c
1 /* Callgraph clones
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module provide facilities for clonning functions. I.e. creating
22 new functions based on existing functions with simple modifications,
23 such as replacement of parameters.
24
25 To allow whole program optimization without actual presence of function
26 bodies, an additional infrastructure is provided for so-called virtual
27 clones
28
29 A virtual clone in the callgraph is a function that has no
30 associated body, just a description of how to create its body based
31 on a different function (which itself may be a virtual clone).
32
33 The description of function modifications includes adjustments to
34 the function's signature (which allows, for example, removing or
35 adding function arguments), substitutions to perform on the
36 function body, and, for inlined functions, a pointer to the
37 function that it will be inlined into.
38
39 It is also possible to redirect any edge of the callgraph from a
40 function to its virtual clone. This implies updating of the call
41 site to adjust for the new function signature.
42
43 Most of the transformations performed by inter-procedural
44 optimizations can be represented via virtual clones. For
45 instance, a constant propagation pass can produce a virtual clone
46 of the function which replaces one of its arguments by a
47 constant. The inliner can represent its decisions by producing a
48 clone of a function whose body will be later integrated into
49 a given function.
50
51 Using virtual clones, the program can be easily updated
52 during the Execute stage, solving most of pass interactions
53 problems that would otherwise occur during Transform.
54
55 Virtual clones are later materialized in the LTRANS stage and
56 turned into real functions. Passes executed after the virtual
57 clone were introduced also perform their Transform stage
58 on new functions, so for a pass there is no significant
59 difference between operating on a real function or a virtual
60 clone introduced before its Execute stage.
61
62 Optimization passes then work on virtual clones introduced before
63 their Execute stage as if they were real functions. The
64 only difference is that clones are not visible during the
65 Generate Summary stage. */
66
67 #include "config.h"
68 #include "system.h"
69 #include "coretypes.h"
70 #include "backend.h"
71 #include "tree.h"
72 #include "gimple.h"
73 #include "rtl.h"
74 #include "alias.h"
75 #include "fold-const.h"
76 #include "stringpool.h"
77 #include "emit-rtl.h"
78 #include "internal-fn.h"
79 #include "tree-eh.h"
80 #include "tree-cfg.h"
81 #include "tree-inline.h"
82 #include "langhooks.h"
83 #include "toplev.h"
84 #include "flags.h"
85 #include "debug.h"
86 #include "target.h"
87 #include "diagnostic.h"
88 #include "params.h"
89 #include "intl.h"
90 #include "cgraph.h"
91 #include "alloc-pool.h"
92 #include "symbol-summary.h"
93 #include "ipa-prop.h"
94 #include "tree-iterator.h"
95 #include "tree-dump.h"
96 #include "gimple-pretty-print.h"
97 #include "coverage.h"
98 #include "ipa-inline.h"
99 #include "ipa-utils.h"
100 #include "lto-streamer.h"
101 #include "except.h"
102
103 /* Create clone of edge in the node N represented by CALL_EXPR
104 the callgraph. */
105
106 cgraph_edge *
107 cgraph_edge::clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
108 gcov_type count_scale, int freq_scale, bool update_original)
109 {
110 cgraph_edge *new_edge;
111 gcov_type gcov_count = apply_probability (count, count_scale);
112 gcov_type freq;
113
114 /* We do not want to ignore loop nest after frequency drops to 0. */
115 if (!freq_scale)
116 freq_scale = 1;
117 freq = frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
118 if (freq > CGRAPH_FREQ_MAX)
119 freq = CGRAPH_FREQ_MAX;
120
121 if (indirect_unknown_callee)
122 {
123 tree decl;
124
125 if (call_stmt && (decl = gimple_call_fndecl (call_stmt))
126 /* When the call is speculative, we need to resolve it
127 via cgraph_resolve_speculation and not here. */
128 && !speculative)
129 {
130 cgraph_node *callee = cgraph_node::get (decl);
131 gcc_checking_assert (callee);
132 new_edge = n->create_edge (callee, call_stmt, gcov_count, freq);
133 }
134 else
135 {
136 new_edge = n->create_indirect_edge (call_stmt,
137 indirect_info->ecf_flags,
138 count, freq, false);
139 *new_edge->indirect_info = *indirect_info;
140 }
141 }
142 else
143 {
144 new_edge = n->create_edge (callee, call_stmt, gcov_count, freq);
145 if (indirect_info)
146 {
147 new_edge->indirect_info
148 = ggc_cleared_alloc<cgraph_indirect_call_info> ();
149 *new_edge->indirect_info = *indirect_info;
150 }
151 }
152
153 new_edge->inline_failed = inline_failed;
154 new_edge->indirect_inlining_edge = indirect_inlining_edge;
155 new_edge->lto_stmt_uid = stmt_uid;
156 /* Clone flags that depend on call_stmt availability manually. */
157 new_edge->can_throw_external = can_throw_external;
158 new_edge->call_stmt_cannot_inline_p = call_stmt_cannot_inline_p;
159 new_edge->speculative = speculative;
160 new_edge->in_polymorphic_cdtor = in_polymorphic_cdtor;
161 if (update_original)
162 {
163 count -= new_edge->count;
164 if (count < 0)
165 count = 0;
166 }
167 symtab->call_edge_duplication_hooks (this, new_edge);
168 return new_edge;
169 }
170
171 /* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP and the
172 return value if SKIP_RETURN is true. */
173
174 static tree
175 build_function_type_skip_args (tree orig_type, bitmap args_to_skip,
176 bool skip_return)
177 {
178 tree new_type = NULL;
179 tree args, new_args = NULL;
180 tree new_reversed;
181 int i = 0;
182
183 for (args = TYPE_ARG_TYPES (orig_type); args && args != void_list_node;
184 args = TREE_CHAIN (args), i++)
185 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
186 new_args = tree_cons (NULL_TREE, TREE_VALUE (args), new_args);
187
188 new_reversed = nreverse (new_args);
189 if (args)
190 {
191 if (new_reversed)
192 TREE_CHAIN (new_args) = void_list_node;
193 else
194 new_reversed = void_list_node;
195 }
196
197 /* Use copy_node to preserve as much as possible from original type
198 (debug info, attribute lists etc.)
199 Exception is METHOD_TYPEs must have THIS argument.
200 When we are asked to remove it, we need to build new FUNCTION_TYPE
201 instead. */
202 if (TREE_CODE (orig_type) != METHOD_TYPE
203 || !args_to_skip
204 || !bitmap_bit_p (args_to_skip, 0))
205 {
206 new_type = build_distinct_type_copy (orig_type);
207 TYPE_ARG_TYPES (new_type) = new_reversed;
208 }
209 else
210 {
211 new_type
212 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
213 new_reversed));
214 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
215 }
216
217 if (skip_return)
218 TREE_TYPE (new_type) = void_type_node;
219
220 return new_type;
221 }
222
223 /* Build variant of function decl ORIG_DECL skipping ARGS_TO_SKIP and the
224 return value if SKIP_RETURN is true.
225
226 Arguments from DECL_ARGUMENTS list can't be removed now, since they are
227 linked by TREE_CHAIN directly. The caller is responsible for eliminating
228 them when they are being duplicated (i.e. copy_arguments_for_versioning). */
229
230 static tree
231 build_function_decl_skip_args (tree orig_decl, bitmap args_to_skip,
232 bool skip_return)
233 {
234 tree new_decl = copy_node (orig_decl);
235 tree new_type;
236
237 new_type = TREE_TYPE (orig_decl);
238 if (prototype_p (new_type)
239 || (skip_return && !VOID_TYPE_P (TREE_TYPE (new_type))))
240 new_type
241 = build_function_type_skip_args (new_type, args_to_skip, skip_return);
242 TREE_TYPE (new_decl) = new_type;
243
244 /* For declarations setting DECL_VINDEX (i.e. methods)
245 we expect first argument to be THIS pointer. */
246 if (args_to_skip && bitmap_bit_p (args_to_skip, 0))
247 DECL_VINDEX (new_decl) = NULL_TREE;
248
249 /* When signature changes, we need to clear builtin info. */
250 if (DECL_BUILT_IN (new_decl)
251 && args_to_skip
252 && !bitmap_empty_p (args_to_skip))
253 {
254 DECL_BUILT_IN_CLASS (new_decl) = NOT_BUILT_IN;
255 DECL_FUNCTION_CODE (new_decl) = (enum built_in_function) 0;
256 }
257 /* The FE might have information and assumptions about the other
258 arguments. */
259 DECL_LANG_SPECIFIC (new_decl) = NULL;
260 return new_decl;
261 }
262
263 /* Set flags of NEW_NODE and its decl. NEW_NODE is a newly created private
264 clone or its thunk. */
265
266 static void
267 set_new_clone_decl_and_node_flags (cgraph_node *new_node)
268 {
269 DECL_EXTERNAL (new_node->decl) = 0;
270 TREE_PUBLIC (new_node->decl) = 0;
271 DECL_COMDAT (new_node->decl) = 0;
272 DECL_WEAK (new_node->decl) = 0;
273 DECL_VIRTUAL_P (new_node->decl) = 0;
274 DECL_STATIC_CONSTRUCTOR (new_node->decl) = 0;
275 DECL_STATIC_DESTRUCTOR (new_node->decl) = 0;
276
277 new_node->externally_visible = 0;
278 new_node->local.local = 1;
279 new_node->lowered = true;
280 }
281
282 /* Duplicate thunk THUNK if necessary but make it to refer to NODE.
283 ARGS_TO_SKIP, if non-NULL, determines which parameters should be omitted.
284 Function can return NODE if no thunk is necessary, which can happen when
285 thunk is this_adjusting but we are removing this parameter. */
286
287 static cgraph_node *
288 duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node)
289 {
290 cgraph_node *new_thunk, *thunk_of;
291 thunk_of = thunk->callees->callee->ultimate_alias_target ();
292
293 if (thunk_of->thunk.thunk_p)
294 node = duplicate_thunk_for_node (thunk_of, node);
295
296 if (!DECL_ARGUMENTS (thunk->decl))
297 thunk->get_untransformed_body ();
298
299 cgraph_edge *cs;
300 for (cs = node->callers; cs; cs = cs->next_caller)
301 if (cs->caller->thunk.thunk_p
302 && cs->caller->thunk.this_adjusting == thunk->thunk.this_adjusting
303 && cs->caller->thunk.fixed_offset == thunk->thunk.fixed_offset
304 && cs->caller->thunk.virtual_offset_p == thunk->thunk.virtual_offset_p
305 && cs->caller->thunk.virtual_value == thunk->thunk.virtual_value)
306 return cs->caller;
307
308 tree new_decl;
309 if (!node->clone.args_to_skip)
310 new_decl = copy_node (thunk->decl);
311 else
312 {
313 /* We do not need to duplicate this_adjusting thunks if we have removed
314 this. */
315 if (thunk->thunk.this_adjusting
316 && bitmap_bit_p (node->clone.args_to_skip, 0))
317 return node;
318
319 new_decl = build_function_decl_skip_args (thunk->decl,
320 node->clone.args_to_skip,
321 false);
322 }
323
324 tree *link = &DECL_ARGUMENTS (new_decl);
325 int i = 0;
326 for (tree pd = DECL_ARGUMENTS (thunk->decl); pd; pd = DECL_CHAIN (pd), i++)
327 {
328 if (!node->clone.args_to_skip
329 || !bitmap_bit_p (node->clone.args_to_skip, i))
330 {
331 tree nd = copy_node (pd);
332 DECL_CONTEXT (nd) = new_decl;
333 *link = nd;
334 link = &DECL_CHAIN (nd);
335 }
336 }
337 *link = NULL_TREE;
338
339 gcc_checking_assert (!DECL_STRUCT_FUNCTION (new_decl));
340 gcc_checking_assert (!DECL_INITIAL (new_decl));
341 gcc_checking_assert (!DECL_RESULT (new_decl));
342 gcc_checking_assert (!DECL_RTL_SET_P (new_decl));
343
344 DECL_NAME (new_decl) = clone_function_name (thunk->decl, "artificial_thunk");
345 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
346
347 new_thunk = cgraph_node::create (new_decl);
348 set_new_clone_decl_and_node_flags (new_thunk);
349 new_thunk->definition = true;
350 new_thunk->thunk = thunk->thunk;
351 new_thunk->unique_name = in_lto_p;
352 new_thunk->former_clone_of = thunk->decl;
353 new_thunk->clone.args_to_skip = node->clone.args_to_skip;
354 new_thunk->clone.combined_args_to_skip = node->clone.combined_args_to_skip;
355
356 cgraph_edge *e = new_thunk->create_edge (node, NULL, 0,
357 CGRAPH_FREQ_BASE);
358 e->call_stmt_cannot_inline_p = true;
359 symtab->call_edge_duplication_hooks (thunk->callees, e);
360 symtab->call_cgraph_duplication_hooks (thunk, new_thunk);
361 return new_thunk;
362 }
363
364 /* If E does not lead to a thunk, simply redirect it to N. Otherwise create
365 one or more equivalent thunks for N and redirect E to the first in the
366 chain. Note that it is then necessary to call
367 n->expand_all_artificial_thunks once all callers are redirected. */
368
369 void
370 cgraph_edge::redirect_callee_duplicating_thunks (cgraph_node *n)
371 {
372 cgraph_node *orig_to = callee->ultimate_alias_target ();
373 if (orig_to->thunk.thunk_p)
374 n = duplicate_thunk_for_node (orig_to, n);
375
376 redirect_callee (n);
377 }
378
379 /* Call expand_thunk on all callers that are thunks and if analyze those nodes
380 that were expanded. */
381
382 void
383 cgraph_node::expand_all_artificial_thunks ()
384 {
385 cgraph_edge *e;
386 for (e = callers; e;)
387 if (e->caller->thunk.thunk_p)
388 {
389 cgraph_node *thunk = e->caller;
390
391 e = e->next_caller;
392 if (thunk->expand_thunk (false, false))
393 {
394 thunk->thunk.thunk_p = false;
395 thunk->analyze ();
396 }
397 thunk->expand_all_artificial_thunks ();
398 }
399 else
400 e = e->next_caller;
401 }
402
403 /* Create node representing clone of N executed COUNT times. Decrease
404 the execution counts from original node too.
405 The new clone will have decl set to DECL that may or may not be the same
406 as decl of N.
407
408 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
409 function's profile to reflect the fact that part of execution is handled
410 by node.
411 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
412 the new clone. Otherwise the caller is responsible for doing so later.
413
414 If the new node is being inlined into another one, NEW_INLINED_TO should be
415 the outline function the new one is (even indirectly) inlined to. All hooks
416 will see this in node's global.inlined_to, when invoked. Can be NULL if the
417 node is not inlined. */
418
419 cgraph_node *
420 cgraph_node::create_clone (tree new_decl, gcov_type gcov_count, int freq,
421 bool update_original,
422 vec<cgraph_edge *> redirect_callers,
423 bool call_duplication_hook,
424 cgraph_node *new_inlined_to,
425 bitmap args_to_skip)
426 {
427 cgraph_node *new_node = symtab->create_empty ();
428 cgraph_edge *e;
429 gcov_type count_scale;
430 unsigned i;
431
432 new_node->decl = new_decl;
433 new_node->register_symbol ();
434 new_node->origin = origin;
435 new_node->lto_file_data = lto_file_data;
436 if (new_node->origin)
437 {
438 new_node->next_nested = new_node->origin->nested;
439 new_node->origin->nested = new_node;
440 }
441 new_node->analyzed = analyzed;
442 new_node->definition = definition;
443 new_node->local = local;
444 new_node->externally_visible = false;
445 new_node->no_reorder = no_reorder;
446 new_node->local.local = true;
447 new_node->global = global;
448 new_node->global.inlined_to = new_inlined_to;
449 new_node->rtl = rtl;
450 new_node->count = count;
451 new_node->frequency = frequency;
452 new_node->tp_first_run = tp_first_run;
453 new_node->tm_clone = tm_clone;
454 new_node->icf_merged = icf_merged;
455 new_node->merged = merged;
456
457 new_node->clone.tree_map = NULL;
458 new_node->clone.args_to_skip = args_to_skip;
459 new_node->split_part = split_part;
460 if (!args_to_skip)
461 new_node->clone.combined_args_to_skip = clone.combined_args_to_skip;
462 else if (clone.combined_args_to_skip)
463 {
464 new_node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
465 bitmap_ior (new_node->clone.combined_args_to_skip,
466 clone.combined_args_to_skip, args_to_skip);
467 }
468 else
469 new_node->clone.combined_args_to_skip = args_to_skip;
470
471 if (count)
472 {
473 if (new_node->count > count)
474 count_scale = REG_BR_PROB_BASE;
475 else
476 count_scale = GCOV_COMPUTE_SCALE (new_node->count, count);
477 }
478 else
479 count_scale = 0;
480 if (update_original)
481 {
482 count -= gcov_count;
483 if (count < 0)
484 count = 0;
485 }
486
487 FOR_EACH_VEC_ELT (redirect_callers, i, e)
488 {
489 /* Redirect calls to the old version node to point to its new
490 version. The only exception is when the edge was proved to
491 be unreachable during the clonning procedure. */
492 if (!e->callee
493 || DECL_BUILT_IN_CLASS (e->callee->decl) != BUILT_IN_NORMAL
494 || DECL_FUNCTION_CODE (e->callee->decl) != BUILT_IN_UNREACHABLE)
495 e->redirect_callee_duplicating_thunks (new_node);
496 }
497 new_node->expand_all_artificial_thunks ();
498
499 for (e = callees;e; e=e->next_callee)
500 e->clone (new_node, e->call_stmt, e->lto_stmt_uid, count_scale,
501 freq, update_original);
502
503 for (e = indirect_calls; e; e = e->next_callee)
504 e->clone (new_node, e->call_stmt, e->lto_stmt_uid,
505 count_scale, freq, update_original);
506 new_node->clone_references (this);
507
508 new_node->next_sibling_clone = clones;
509 if (clones)
510 clones->prev_sibling_clone = new_node;
511 clones = new_node;
512 new_node->clone_of = this;
513
514 if (call_duplication_hook)
515 symtab->call_cgraph_duplication_hooks (this, new_node);
516 return new_node;
517 }
518
519 static GTY(()) unsigned int clone_fn_id_num;
520
521 /* Return a new assembler name for a clone with SUFFIX of a decl named
522 NAME. */
523
524 tree
525 clone_function_name_1 (const char *name, const char *suffix)
526 {
527 size_t len = strlen (name);
528 char *tmp_name, *prefix;
529
530 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2);
531 memcpy (prefix, name, len);
532 strcpy (prefix + len + 1, suffix);
533 #ifndef NO_DOT_IN_LABEL
534 prefix[len] = '.';
535 #elif !defined NO_DOLLAR_IN_LABEL
536 prefix[len] = '$';
537 #else
538 prefix[len] = '_';
539 #endif
540 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
541 return get_identifier (tmp_name);
542 }
543
544 /* Return a new assembler name for a clone of DECL with SUFFIX. */
545
546 tree
547 clone_function_name (tree decl, const char *suffix)
548 {
549 tree name = DECL_ASSEMBLER_NAME (decl);
550 return clone_function_name_1 (IDENTIFIER_POINTER (name), suffix);
551 }
552
553
554 /* Create callgraph node clone with new declaration. The actual body will
555 be copied later at compilation stage.
556
557 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
558 bitmap interface.
559 */
560 cgraph_node *
561 cgraph_node::create_virtual_clone (vec<cgraph_edge *> redirect_callers,
562 vec<ipa_replace_map *, va_gc> *tree_map,
563 bitmap args_to_skip, const char * suffix)
564 {
565 tree old_decl = decl;
566 cgraph_node *new_node = NULL;
567 tree new_decl;
568 size_t len, i;
569 ipa_replace_map *map;
570 char *name;
571
572 gcc_checking_assert (local.versionable);
573 gcc_assert (local.can_change_signature || !args_to_skip);
574
575 /* Make a new FUNCTION_DECL tree node */
576 if (!args_to_skip)
577 new_decl = copy_node (old_decl);
578 else
579 new_decl = build_function_decl_skip_args (old_decl, args_to_skip, false);
580
581 /* These pointers represent function body and will be populated only when clone
582 is materialized. */
583 gcc_assert (new_decl != old_decl);
584 DECL_STRUCT_FUNCTION (new_decl) = NULL;
585 DECL_ARGUMENTS (new_decl) = NULL;
586 DECL_INITIAL (new_decl) = NULL;
587 DECL_RESULT (new_decl) = NULL;
588 /* We can not do DECL_RESULT (new_decl) = NULL; here because of LTO partitioning
589 sometimes storing only clone decl instead of original. */
590
591 /* Generate a new name for the new version. */
592 len = IDENTIFIER_LENGTH (DECL_NAME (old_decl));
593 name = XALLOCAVEC (char, len + strlen (suffix) + 2);
594 memcpy (name, IDENTIFIER_POINTER (DECL_NAME (old_decl)), len);
595 strcpy (name + len + 1, suffix);
596 name[len] = '.';
597 DECL_NAME (new_decl) = get_identifier (name);
598 SET_DECL_ASSEMBLER_NAME (new_decl, clone_function_name (old_decl, suffix));
599 SET_DECL_RTL (new_decl, NULL);
600
601 new_node = create_clone (new_decl, count, CGRAPH_FREQ_BASE, false,
602 redirect_callers, false, NULL, args_to_skip);
603
604 /* Update the properties.
605 Make clone visible only within this translation unit. Make sure
606 that is not weak also.
607 ??? We cannot use COMDAT linkage because there is no
608 ABI support for this. */
609 set_new_clone_decl_and_node_flags (new_node);
610 new_node->clone.tree_map = tree_map;
611 if (!implicit_section)
612 new_node->set_section (get_section ());
613
614 /* Clones of global symbols or symbols with unique names are unique. */
615 if ((TREE_PUBLIC (old_decl)
616 && !DECL_EXTERNAL (old_decl)
617 && !DECL_WEAK (old_decl)
618 && !DECL_COMDAT (old_decl))
619 || in_lto_p)
620 new_node->unique_name = true;
621 FOR_EACH_VEC_SAFE_ELT (tree_map, i, map)
622 new_node->maybe_create_reference (map->new_tree, IPA_REF_ADDR, NULL);
623
624 if (ipa_transforms_to_apply.exists ())
625 new_node->ipa_transforms_to_apply
626 = ipa_transforms_to_apply.copy ();
627
628 symtab->call_cgraph_duplication_hooks (this, new_node);
629
630 return new_node;
631 }
632
633 /* callgraph node being removed from symbol table; see if its entry can be
634 replaced by other inline clone. */
635 cgraph_node *
636 cgraph_node::find_replacement (void)
637 {
638 cgraph_node *next_inline_clone, *replacement;
639
640 for (next_inline_clone = clones;
641 next_inline_clone
642 && next_inline_clone->decl != decl;
643 next_inline_clone = next_inline_clone->next_sibling_clone)
644 ;
645
646 /* If there is inline clone of the node being removed, we need
647 to put it into the position of removed node and reorganize all
648 other clones to be based on it. */
649 if (next_inline_clone)
650 {
651 cgraph_node *n;
652 cgraph_node *new_clones;
653
654 replacement = next_inline_clone;
655
656 /* Unlink inline clone from the list of clones of removed node. */
657 if (next_inline_clone->next_sibling_clone)
658 next_inline_clone->next_sibling_clone->prev_sibling_clone
659 = next_inline_clone->prev_sibling_clone;
660 if (next_inline_clone->prev_sibling_clone)
661 {
662 gcc_assert (clones != next_inline_clone);
663 next_inline_clone->prev_sibling_clone->next_sibling_clone
664 = next_inline_clone->next_sibling_clone;
665 }
666 else
667 {
668 gcc_assert (clones == next_inline_clone);
669 clones = next_inline_clone->next_sibling_clone;
670 }
671
672 new_clones = clones;
673 clones = NULL;
674
675 /* Copy clone info. */
676 next_inline_clone->clone = clone;
677
678 /* Now place it into clone tree at same level at NODE. */
679 next_inline_clone->clone_of = clone_of;
680 next_inline_clone->prev_sibling_clone = NULL;
681 next_inline_clone->next_sibling_clone = NULL;
682 if (clone_of)
683 {
684 if (clone_of->clones)
685 clone_of->clones->prev_sibling_clone = next_inline_clone;
686 next_inline_clone->next_sibling_clone = clone_of->clones;
687 clone_of->clones = next_inline_clone;
688 }
689
690 /* Merge the clone list. */
691 if (new_clones)
692 {
693 if (!next_inline_clone->clones)
694 next_inline_clone->clones = new_clones;
695 else
696 {
697 n = next_inline_clone->clones;
698 while (n->next_sibling_clone)
699 n = n->next_sibling_clone;
700 n->next_sibling_clone = new_clones;
701 new_clones->prev_sibling_clone = n;
702 }
703 }
704
705 /* Update clone_of pointers. */
706 n = new_clones;
707 while (n)
708 {
709 n->clone_of = next_inline_clone;
710 n = n->next_sibling_clone;
711 }
712 return replacement;
713 }
714 else
715 return NULL;
716 }
717
718 /* Like cgraph_set_call_stmt but walk the clone tree and update all
719 clones sharing the same function body.
720 When WHOLE_SPECULATIVE_EDGES is true, all three components of
721 speculative edge gets updated. Otherwise we update only direct
722 call. */
723
724 void
725 cgraph_node::set_call_stmt_including_clones (gimple *old_stmt,
726 gcall *new_stmt,
727 bool update_speculative)
728 {
729 cgraph_node *node;
730 cgraph_edge *edge = get_edge (old_stmt);
731
732 if (edge)
733 edge->set_call_stmt (new_stmt, update_speculative);
734
735 node = clones;
736 if (node)
737 while (node != this)
738 {
739 cgraph_edge *edge = node->get_edge (old_stmt);
740 if (edge)
741 {
742 edge->set_call_stmt (new_stmt, update_speculative);
743 /* If UPDATE_SPECULATIVE is false, it means that we are turning
744 speculative call into a real code sequence. Update the
745 callgraph edges. */
746 if (edge->speculative && !update_speculative)
747 {
748 cgraph_edge *direct, *indirect;
749 ipa_ref *ref;
750
751 gcc_assert (!edge->indirect_unknown_callee);
752 edge->speculative_call_info (direct, indirect, ref);
753 direct->speculative = false;
754 indirect->speculative = false;
755 ref->speculative = false;
756 }
757 }
758 if (node->clones)
759 node = node->clones;
760 else if (node->next_sibling_clone)
761 node = node->next_sibling_clone;
762 else
763 {
764 while (node != this && !node->next_sibling_clone)
765 node = node->clone_of;
766 if (node != this)
767 node = node->next_sibling_clone;
768 }
769 }
770 }
771
772 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
773 same function body. If clones already have edge for OLD_STMT; only
774 update the edge same way as cgraph_set_call_stmt_including_clones does.
775
776 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
777 frequencies of the clones. */
778
779 void
780 cgraph_node::create_edge_including_clones (cgraph_node *callee,
781 gimple *old_stmt, gcall *stmt,
782 gcov_type count,
783 int freq,
784 cgraph_inline_failed_t reason)
785 {
786 cgraph_node *node;
787 cgraph_edge *edge;
788
789 if (!get_edge (stmt))
790 {
791 edge = create_edge (callee, stmt, count, freq);
792 edge->inline_failed = reason;
793 }
794
795 node = clones;
796 if (node)
797 while (node != this)
798 {
799 cgraph_edge *edge = node->get_edge (old_stmt);
800
801 /* It is possible that clones already contain the edge while
802 master didn't. Either we promoted indirect call into direct
803 call in the clone or we are processing clones of unreachable
804 master where edges has been removed. */
805 if (edge)
806 edge->set_call_stmt (stmt);
807 else if (! node->get_edge (stmt))
808 {
809 edge = node->create_edge (callee, stmt, count, freq);
810 edge->inline_failed = reason;
811 }
812
813 if (node->clones)
814 node = node->clones;
815 else if (node->next_sibling_clone)
816 node = node->next_sibling_clone;
817 else
818 {
819 while (node != this && !node->next_sibling_clone)
820 node = node->clone_of;
821 if (node != this)
822 node = node->next_sibling_clone;
823 }
824 }
825 }
826
827 /* Remove the node from cgraph and all inline clones inlined into it.
828 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
829 removed. This allows to call the function from outer loop walking clone
830 tree. */
831
832 bool
833 cgraph_node::remove_symbol_and_inline_clones (cgraph_node *forbidden_node)
834 {
835 cgraph_edge *e, *next;
836 bool found = false;
837
838 if (this == forbidden_node)
839 {
840 callers->remove ();
841 return true;
842 }
843 for (e = callees; e; e = next)
844 {
845 next = e->next_callee;
846 if (!e->inline_failed)
847 found |= e->callee->remove_symbol_and_inline_clones (forbidden_node);
848 }
849 remove ();
850 return found;
851 }
852
853 /* The edges representing the callers of the NEW_VERSION node were
854 fixed by cgraph_function_versioning (), now the call_expr in their
855 respective tree code should be updated to call the NEW_VERSION. */
856
857 static void
858 update_call_expr (cgraph_node *new_version)
859 {
860 cgraph_edge *e;
861
862 gcc_assert (new_version);
863
864 /* Update the call expr on the edges to call the new version. */
865 for (e = new_version->callers; e; e = e->next_caller)
866 {
867 function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
868 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
869 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
870 }
871 }
872
873
874 /* Create a new cgraph node which is the new version of
875 callgraph node. REDIRECT_CALLERS holds the callers
876 edges which should be redirected to point to
877 NEW_VERSION. ALL the callees edges of the node
878 are cloned to the new version node. Return the new
879 version node.
880
881 If non-NULL BLOCK_TO_COPY determine what basic blocks
882 was copied to prevent duplications of calls that are dead
883 in the clone. */
884
885 cgraph_node *
886 cgraph_node::create_version_clone (tree new_decl,
887 vec<cgraph_edge *> redirect_callers,
888 bitmap bbs_to_copy)
889 {
890 cgraph_node *new_version;
891 cgraph_edge *e;
892 unsigned i;
893
894 new_version = cgraph_node::create (new_decl);
895
896 new_version->analyzed = analyzed;
897 new_version->definition = definition;
898 new_version->local = local;
899 new_version->externally_visible = false;
900 new_version->no_reorder = no_reorder;
901 new_version->local.local = new_version->definition;
902 new_version->global = global;
903 new_version->rtl = rtl;
904 new_version->count = count;
905
906 for (e = callees; e; e=e->next_callee)
907 if (!bbs_to_copy
908 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
909 e->clone (new_version, e->call_stmt,
910 e->lto_stmt_uid, REG_BR_PROB_BASE,
911 CGRAPH_FREQ_BASE,
912 true);
913 for (e = indirect_calls; e; e=e->next_callee)
914 if (!bbs_to_copy
915 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
916 e->clone (new_version, e->call_stmt,
917 e->lto_stmt_uid, REG_BR_PROB_BASE,
918 CGRAPH_FREQ_BASE,
919 true);
920 FOR_EACH_VEC_ELT (redirect_callers, i, e)
921 {
922 /* Redirect calls to the old version node to point to its new
923 version. */
924 e->redirect_callee (new_version);
925 }
926
927 symtab->call_cgraph_duplication_hooks (this, new_version);
928
929 return new_version;
930 }
931
932 /* Perform function versioning.
933 Function versioning includes copying of the tree and
934 a callgraph update (creating a new cgraph node and updating
935 its callees and callers).
936
937 REDIRECT_CALLERS varray includes the edges to be redirected
938 to the new version.
939
940 TREE_MAP is a mapping of tree nodes we want to replace with
941 new ones (according to results of prior analysis).
942
943 If non-NULL ARGS_TO_SKIP determine function parameters to remove
944 from new version.
945 If SKIP_RETURN is true, the new version will return void.
946 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
947 If non_NULL NEW_ENTRY determine new entry BB of the clone.
948
949 Return the new version's cgraph node. */
950
951 cgraph_node *
952 cgraph_node::create_version_clone_with_body
953 (vec<cgraph_edge *> redirect_callers,
954 vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
955 bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
956 const char *clone_name)
957 {
958 tree old_decl = decl;
959 cgraph_node *new_version_node = NULL;
960 tree new_decl;
961
962 if (!tree_versionable_function_p (old_decl))
963 return NULL;
964
965 gcc_assert (local.can_change_signature || !args_to_skip);
966
967 /* Make a new FUNCTION_DECL tree node for the new version. */
968 if (!args_to_skip && !skip_return)
969 new_decl = copy_node (old_decl);
970 else
971 new_decl
972 = build_function_decl_skip_args (old_decl, args_to_skip, skip_return);
973
974 /* Generate a new name for the new version. */
975 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
976 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
977 SET_DECL_RTL (new_decl, NULL);
978
979 /* When the old decl was a con-/destructor make sure the clone isn't. */
980 DECL_STATIC_CONSTRUCTOR (new_decl) = 0;
981 DECL_STATIC_DESTRUCTOR (new_decl) = 0;
982
983 /* Create the new version's call-graph node.
984 and update the edges of the new node. */
985 new_version_node = create_version_clone (new_decl, redirect_callers,
986 bbs_to_copy);
987
988 if (ipa_transforms_to_apply.exists ())
989 new_version_node->ipa_transforms_to_apply
990 = ipa_transforms_to_apply.copy ();
991 /* Copy the OLD_VERSION_NODE function tree to the new version. */
992 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
993 skip_return, bbs_to_copy, new_entry_block);
994
995 /* Update the new version's properties.
996 Make The new version visible only within this translation unit. Make sure
997 that is not weak also.
998 ??? We cannot use COMDAT linkage because there is no
999 ABI support for this. */
1000 new_version_node->make_decl_local ();
1001 DECL_VIRTUAL_P (new_version_node->decl) = 0;
1002 new_version_node->externally_visible = 0;
1003 new_version_node->local.local = 1;
1004 new_version_node->lowered = true;
1005 if (!implicit_section)
1006 new_version_node->set_section (get_section ());
1007 /* Clones of global symbols or symbols with unique names are unique. */
1008 if ((TREE_PUBLIC (old_decl)
1009 && !DECL_EXTERNAL (old_decl)
1010 && !DECL_WEAK (old_decl)
1011 && !DECL_COMDAT (old_decl))
1012 || in_lto_p)
1013 new_version_node->unique_name = true;
1014
1015 /* Update the call_expr on the edges to call the new version node. */
1016 update_call_expr (new_version_node);
1017
1018 symtab->call_cgraph_insertion_hooks (this);
1019 return new_version_node;
1020 }
1021
1022 /* Given virtual clone, turn it into actual clone. */
1023
1024 static void
1025 cgraph_materialize_clone (cgraph_node *node)
1026 {
1027 bitmap_obstack_initialize (NULL);
1028 node->former_clone_of = node->clone_of->decl;
1029 if (node->clone_of->former_clone_of)
1030 node->former_clone_of = node->clone_of->former_clone_of;
1031 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1032 tree_function_versioning (node->clone_of->decl, node->decl,
1033 node->clone.tree_map, true,
1034 node->clone.args_to_skip, false,
1035 NULL, NULL);
1036 if (symtab->dump_file)
1037 {
1038 dump_function_to_file (node->clone_of->decl, symtab->dump_file,
1039 dump_flags);
1040 dump_function_to_file (node->decl, symtab->dump_file, dump_flags);
1041 }
1042
1043 /* Function is no longer clone. */
1044 if (node->next_sibling_clone)
1045 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1046 if (node->prev_sibling_clone)
1047 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1048 else
1049 node->clone_of->clones = node->next_sibling_clone;
1050 node->next_sibling_clone = NULL;
1051 node->prev_sibling_clone = NULL;
1052 if (!node->clone_of->analyzed && !node->clone_of->clones)
1053 {
1054 node->clone_of->release_body ();
1055 node->clone_of->remove_callees ();
1056 node->clone_of->remove_all_references ();
1057 }
1058 node->clone_of = NULL;
1059 bitmap_obstack_release (NULL);
1060 }
1061
1062 /* Once all functions from compilation unit are in memory, produce all clones
1063 and update all calls. We might also do this on demand if we don't want to
1064 bring all functions to memory prior compilation, but current WHOPR
1065 implementation does that and it is a bit easier to keep everything right in
1066 this order. */
1067
1068 void
1069 symbol_table::materialize_all_clones (void)
1070 {
1071 cgraph_node *node;
1072 bool stabilized = false;
1073
1074
1075 if (symtab->dump_file)
1076 fprintf (symtab->dump_file, "Materializing clones\n");
1077
1078 cgraph_node::checking_verify_cgraph_nodes ();
1079
1080 /* We can also do topological order, but number of iterations should be
1081 bounded by number of IPA passes since single IPA pass is probably not
1082 going to create clones of clones it created itself. */
1083 while (!stabilized)
1084 {
1085 stabilized = true;
1086 FOR_EACH_FUNCTION (node)
1087 {
1088 if (node->clone_of && node->decl != node->clone_of->decl
1089 && !gimple_has_body_p (node->decl))
1090 {
1091 if (!node->clone_of->clone_of)
1092 node->clone_of->get_untransformed_body ();
1093 if (gimple_has_body_p (node->clone_of->decl))
1094 {
1095 if (symtab->dump_file)
1096 {
1097 fprintf (symtab->dump_file, "cloning %s to %s\n",
1098 xstrdup_for_dump (node->clone_of->name ()),
1099 xstrdup_for_dump (node->name ()));
1100 if (node->clone.tree_map)
1101 {
1102 unsigned int i;
1103 fprintf (symtab->dump_file, " replace map: ");
1104 for (i = 0;
1105 i < vec_safe_length (node->clone.tree_map);
1106 i++)
1107 {
1108 ipa_replace_map *replace_info;
1109 replace_info = (*node->clone.tree_map)[i];
1110 print_generic_expr (symtab->dump_file, replace_info->old_tree, 0);
1111 fprintf (symtab->dump_file, " -> ");
1112 print_generic_expr (symtab->dump_file, replace_info->new_tree, 0);
1113 fprintf (symtab->dump_file, "%s%s;",
1114 replace_info->replace_p ? "(replace)":"",
1115 replace_info->ref_p ? "(ref)":"");
1116 }
1117 fprintf (symtab->dump_file, "\n");
1118 }
1119 if (node->clone.args_to_skip)
1120 {
1121 fprintf (symtab->dump_file, " args_to_skip: ");
1122 dump_bitmap (symtab->dump_file,
1123 node->clone.args_to_skip);
1124 }
1125 if (node->clone.args_to_skip)
1126 {
1127 fprintf (symtab->dump_file, " combined_args_to_skip:");
1128 dump_bitmap (symtab->dump_file, node->clone.combined_args_to_skip);
1129 }
1130 }
1131 cgraph_materialize_clone (node);
1132 stabilized = false;
1133 }
1134 }
1135 }
1136 }
1137 FOR_EACH_FUNCTION (node)
1138 if (!node->analyzed && node->callees)
1139 {
1140 node->remove_callees ();
1141 node->remove_all_references ();
1142 }
1143 else
1144 node->clear_stmts_in_references ();
1145 if (symtab->dump_file)
1146 fprintf (symtab->dump_file, "Materialization Call site updates done.\n");
1147
1148 cgraph_node::checking_verify_cgraph_nodes ();
1149
1150 symtab->remove_unreachable_nodes (symtab->dump_file);
1151 }
1152
1153 #include "gt-cgraphclones.h"