]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraphclones.c
c2337e8455364f74450bc6bef4d2159bd265bf08
[thirdparty/gcc.git] / gcc / cgraphclones.c
1 /* Callgraph clones
2 Copyright (C) 2003-2017 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module provide facilities for clonning functions. I.e. creating
22 new functions based on existing functions with simple modifications,
23 such as replacement of parameters.
24
25 To allow whole program optimization without actual presence of function
26 bodies, an additional infrastructure is provided for so-called virtual
27 clones
28
29 A virtual clone in the callgraph is a function that has no
30 associated body, just a description of how to create its body based
31 on a different function (which itself may be a virtual clone).
32
33 The description of function modifications includes adjustments to
34 the function's signature (which allows, for example, removing or
35 adding function arguments), substitutions to perform on the
36 function body, and, for inlined functions, a pointer to the
37 function that it will be inlined into.
38
39 It is also possible to redirect any edge of the callgraph from a
40 function to its virtual clone. This implies updating of the call
41 site to adjust for the new function signature.
42
43 Most of the transformations performed by inter-procedural
44 optimizations can be represented via virtual clones. For
45 instance, a constant propagation pass can produce a virtual clone
46 of the function which replaces one of its arguments by a
47 constant. The inliner can represent its decisions by producing a
48 clone of a function whose body will be later integrated into
49 a given function.
50
51 Using virtual clones, the program can be easily updated
52 during the Execute stage, solving most of pass interactions
53 problems that would otherwise occur during Transform.
54
55 Virtual clones are later materialized in the LTRANS stage and
56 turned into real functions. Passes executed after the virtual
57 clone were introduced also perform their Transform stage
58 on new functions, so for a pass there is no significant
59 difference between operating on a real function or a virtual
60 clone introduced before its Execute stage.
61
62 Optimization passes then work on virtual clones introduced before
63 their Execute stage as if they were real functions. The
64 only difference is that clones are not visible during the
65 Generate Summary stage. */
66
67 #include "config.h"
68 #include "system.h"
69 #include "coretypes.h"
70 #include "backend.h"
71 #include "target.h"
72 #include "rtl.h"
73 #include "tree.h"
74 #include "gimple.h"
75 #include "stringpool.h"
76 #include "cgraph.h"
77 #include "lto-streamer.h"
78 #include "tree-eh.h"
79 #include "tree-cfg.h"
80 #include "tree-inline.h"
81 #include "tree-dump.h"
82 #include "gimple-pretty-print.h"
83
84 /* Create clone of edge in the node N represented by CALL_EXPR
85 the callgraph. */
86
87 cgraph_edge *
88 cgraph_edge::clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
89 gcov_type count_scale, int freq_scale, bool update_original)
90 {
91 cgraph_edge *new_edge;
92 gcov_type gcov_count = apply_probability (count, count_scale);
93 gcov_type freq;
94
95 /* We do not want to ignore loop nest after frequency drops to 0. */
96 if (!freq_scale)
97 freq_scale = 1;
98 freq = frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
99 if (freq > CGRAPH_FREQ_MAX)
100 freq = CGRAPH_FREQ_MAX;
101
102 if (indirect_unknown_callee)
103 {
104 tree decl;
105
106 if (call_stmt && (decl = gimple_call_fndecl (call_stmt))
107 /* When the call is speculative, we need to resolve it
108 via cgraph_resolve_speculation and not here. */
109 && !speculative)
110 {
111 cgraph_node *callee = cgraph_node::get (decl);
112 gcc_checking_assert (callee);
113 new_edge = n->create_edge (callee, call_stmt, gcov_count, freq);
114 }
115 else
116 {
117 new_edge = n->create_indirect_edge (call_stmt,
118 indirect_info->ecf_flags,
119 count, freq, false);
120 *new_edge->indirect_info = *indirect_info;
121 }
122 }
123 else
124 {
125 new_edge = n->create_edge (callee, call_stmt, gcov_count, freq);
126 if (indirect_info)
127 {
128 new_edge->indirect_info
129 = ggc_cleared_alloc<cgraph_indirect_call_info> ();
130 *new_edge->indirect_info = *indirect_info;
131 }
132 }
133
134 new_edge->inline_failed = inline_failed;
135 new_edge->indirect_inlining_edge = indirect_inlining_edge;
136 new_edge->lto_stmt_uid = stmt_uid;
137 /* Clone flags that depend on call_stmt availability manually. */
138 new_edge->can_throw_external = can_throw_external;
139 new_edge->call_stmt_cannot_inline_p = call_stmt_cannot_inline_p;
140 new_edge->speculative = speculative;
141 new_edge->in_polymorphic_cdtor = in_polymorphic_cdtor;
142 if (update_original)
143 {
144 count -= new_edge->count;
145 if (count < 0)
146 count = 0;
147 }
148 symtab->call_edge_duplication_hooks (this, new_edge);
149 return new_edge;
150 }
151
152 /* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP and the
153 return value if SKIP_RETURN is true. */
154
155 static tree
156 build_function_type_skip_args (tree orig_type, bitmap args_to_skip,
157 bool skip_return)
158 {
159 tree new_type = NULL;
160 tree args, new_args = NULL;
161 tree new_reversed;
162 int i = 0;
163
164 for (args = TYPE_ARG_TYPES (orig_type); args && args != void_list_node;
165 args = TREE_CHAIN (args), i++)
166 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
167 new_args = tree_cons (NULL_TREE, TREE_VALUE (args), new_args);
168
169 new_reversed = nreverse (new_args);
170 if (args)
171 {
172 if (new_reversed)
173 TREE_CHAIN (new_args) = void_list_node;
174 else
175 new_reversed = void_list_node;
176 }
177
178 /* Use copy_node to preserve as much as possible from original type
179 (debug info, attribute lists etc.)
180 Exception is METHOD_TYPEs must have THIS argument.
181 When we are asked to remove it, we need to build new FUNCTION_TYPE
182 instead. */
183 if (TREE_CODE (orig_type) != METHOD_TYPE
184 || !args_to_skip
185 || !bitmap_bit_p (args_to_skip, 0))
186 {
187 new_type = build_distinct_type_copy (orig_type);
188 TYPE_ARG_TYPES (new_type) = new_reversed;
189 }
190 else
191 {
192 new_type
193 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
194 new_reversed));
195 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
196 }
197
198 if (skip_return)
199 TREE_TYPE (new_type) = void_type_node;
200
201 return new_type;
202 }
203
204 /* Build variant of function decl ORIG_DECL skipping ARGS_TO_SKIP and the
205 return value if SKIP_RETURN is true.
206
207 Arguments from DECL_ARGUMENTS list can't be removed now, since they are
208 linked by TREE_CHAIN directly. The caller is responsible for eliminating
209 them when they are being duplicated (i.e. copy_arguments_for_versioning). */
210
211 static tree
212 build_function_decl_skip_args (tree orig_decl, bitmap args_to_skip,
213 bool skip_return)
214 {
215 tree new_decl = copy_node (orig_decl);
216 tree new_type;
217
218 new_type = TREE_TYPE (orig_decl);
219 if (prototype_p (new_type)
220 || (skip_return && !VOID_TYPE_P (TREE_TYPE (new_type))))
221 new_type
222 = build_function_type_skip_args (new_type, args_to_skip, skip_return);
223 TREE_TYPE (new_decl) = new_type;
224
225 /* For declarations setting DECL_VINDEX (i.e. methods)
226 we expect first argument to be THIS pointer. */
227 if (args_to_skip && bitmap_bit_p (args_to_skip, 0))
228 DECL_VINDEX (new_decl) = NULL_TREE;
229
230 /* When signature changes, we need to clear builtin info. */
231 if (DECL_BUILT_IN (new_decl)
232 && args_to_skip
233 && !bitmap_empty_p (args_to_skip))
234 {
235 DECL_BUILT_IN_CLASS (new_decl) = NOT_BUILT_IN;
236 DECL_FUNCTION_CODE (new_decl) = (enum built_in_function) 0;
237 }
238 /* The FE might have information and assumptions about the other
239 arguments. */
240 DECL_LANG_SPECIFIC (new_decl) = NULL;
241 return new_decl;
242 }
243
244 /* Set flags of NEW_NODE and its decl. NEW_NODE is a newly created private
245 clone or its thunk. */
246
247 static void
248 set_new_clone_decl_and_node_flags (cgraph_node *new_node)
249 {
250 DECL_EXTERNAL (new_node->decl) = 0;
251 TREE_PUBLIC (new_node->decl) = 0;
252 DECL_COMDAT (new_node->decl) = 0;
253 DECL_WEAK (new_node->decl) = 0;
254 DECL_VIRTUAL_P (new_node->decl) = 0;
255 DECL_STATIC_CONSTRUCTOR (new_node->decl) = 0;
256 DECL_STATIC_DESTRUCTOR (new_node->decl) = 0;
257
258 new_node->externally_visible = 0;
259 new_node->local.local = 1;
260 new_node->lowered = true;
261 }
262
263 /* Duplicate thunk THUNK if necessary but make it to refer to NODE.
264 ARGS_TO_SKIP, if non-NULL, determines which parameters should be omitted.
265 Function can return NODE if no thunk is necessary, which can happen when
266 thunk is this_adjusting but we are removing this parameter. */
267
268 static cgraph_node *
269 duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node)
270 {
271 cgraph_node *new_thunk, *thunk_of;
272 thunk_of = thunk->callees->callee->ultimate_alias_target ();
273
274 if (thunk_of->thunk.thunk_p)
275 node = duplicate_thunk_for_node (thunk_of, node);
276
277 if (!DECL_ARGUMENTS (thunk->decl))
278 thunk->get_untransformed_body ();
279
280 cgraph_edge *cs;
281 for (cs = node->callers; cs; cs = cs->next_caller)
282 if (cs->caller->thunk.thunk_p
283 && cs->caller->thunk.this_adjusting == thunk->thunk.this_adjusting
284 && cs->caller->thunk.fixed_offset == thunk->thunk.fixed_offset
285 && cs->caller->thunk.virtual_offset_p == thunk->thunk.virtual_offset_p
286 && cs->caller->thunk.virtual_value == thunk->thunk.virtual_value)
287 return cs->caller;
288
289 tree new_decl;
290 if (!node->clone.args_to_skip)
291 new_decl = copy_node (thunk->decl);
292 else
293 {
294 /* We do not need to duplicate this_adjusting thunks if we have removed
295 this. */
296 if (thunk->thunk.this_adjusting
297 && bitmap_bit_p (node->clone.args_to_skip, 0))
298 return node;
299
300 new_decl = build_function_decl_skip_args (thunk->decl,
301 node->clone.args_to_skip,
302 false);
303 }
304
305 tree *link = &DECL_ARGUMENTS (new_decl);
306 int i = 0;
307 for (tree pd = DECL_ARGUMENTS (thunk->decl); pd; pd = DECL_CHAIN (pd), i++)
308 {
309 if (!node->clone.args_to_skip
310 || !bitmap_bit_p (node->clone.args_to_skip, i))
311 {
312 tree nd = copy_node (pd);
313 DECL_CONTEXT (nd) = new_decl;
314 *link = nd;
315 link = &DECL_CHAIN (nd);
316 }
317 }
318 *link = NULL_TREE;
319
320 gcc_checking_assert (!DECL_STRUCT_FUNCTION (new_decl));
321 gcc_checking_assert (!DECL_INITIAL (new_decl));
322 gcc_checking_assert (!DECL_RESULT (new_decl));
323 gcc_checking_assert (!DECL_RTL_SET_P (new_decl));
324
325 DECL_NAME (new_decl) = clone_function_name (thunk->decl, "artificial_thunk");
326 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
327
328 new_thunk = cgraph_node::create (new_decl);
329 set_new_clone_decl_and_node_flags (new_thunk);
330 new_thunk->definition = true;
331 new_thunk->local.can_change_signature = node->local.can_change_signature;
332 new_thunk->thunk = thunk->thunk;
333 new_thunk->unique_name = in_lto_p;
334 new_thunk->former_clone_of = thunk->decl;
335 new_thunk->clone.args_to_skip = node->clone.args_to_skip;
336 new_thunk->clone.combined_args_to_skip = node->clone.combined_args_to_skip;
337
338 cgraph_edge *e = new_thunk->create_edge (node, NULL, 0,
339 CGRAPH_FREQ_BASE);
340 symtab->call_edge_duplication_hooks (thunk->callees, e);
341 symtab->call_cgraph_duplication_hooks (thunk, new_thunk);
342 return new_thunk;
343 }
344
345 /* If E does not lead to a thunk, simply redirect it to N. Otherwise create
346 one or more equivalent thunks for N and redirect E to the first in the
347 chain. Note that it is then necessary to call
348 n->expand_all_artificial_thunks once all callers are redirected. */
349
350 void
351 cgraph_edge::redirect_callee_duplicating_thunks (cgraph_node *n)
352 {
353 cgraph_node *orig_to = callee->ultimate_alias_target ();
354 if (orig_to->thunk.thunk_p)
355 n = duplicate_thunk_for_node (orig_to, n);
356
357 redirect_callee (n);
358 }
359
360 /* Call expand_thunk on all callers that are thunks and if analyze those nodes
361 that were expanded. */
362
363 void
364 cgraph_node::expand_all_artificial_thunks ()
365 {
366 cgraph_edge *e;
367 for (e = callers; e;)
368 if (e->caller->thunk.thunk_p)
369 {
370 cgraph_node *thunk = e->caller;
371
372 e = e->next_caller;
373 if (thunk->expand_thunk (false, false))
374 {
375 thunk->thunk.thunk_p = false;
376 thunk->analyze ();
377 }
378 thunk->expand_all_artificial_thunks ();
379 }
380 else
381 e = e->next_caller;
382 }
383
384 void
385 dump_callgraph_transformation (const cgraph_node *original,
386 const cgraph_node *clone,
387 const char *suffix)
388 {
389 if (symtab->ipa_clones_dump_file)
390 {
391 fprintf (symtab->ipa_clones_dump_file,
392 "Callgraph clone;%s;%d;%s;%d;%d;%s;%d;%s;%d;%d;%s\n",
393 original->asm_name (), original->order,
394 DECL_SOURCE_FILE (original->decl),
395 DECL_SOURCE_LINE (original->decl),
396 DECL_SOURCE_COLUMN (original->decl), clone->asm_name (),
397 clone->order, DECL_SOURCE_FILE (clone->decl),
398 DECL_SOURCE_LINE (clone->decl), DECL_SOURCE_COLUMN (clone->decl),
399 suffix);
400
401 symtab->cloned_nodes.add (original);
402 symtab->cloned_nodes.add (clone);
403 }
404 }
405
406 /* Create node representing clone of N executed COUNT times. Decrease
407 the execution counts from original node too.
408 The new clone will have decl set to DECL that may or may not be the same
409 as decl of N.
410
411 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
412 function's profile to reflect the fact that part of execution is handled
413 by node.
414 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
415 the new clone. Otherwise the caller is responsible for doing so later.
416
417 If the new node is being inlined into another one, NEW_INLINED_TO should be
418 the outline function the new one is (even indirectly) inlined to. All hooks
419 will see this in node's global.inlined_to, when invoked. Can be NULL if the
420 node is not inlined. */
421
422 cgraph_node *
423 cgraph_node::create_clone (tree new_decl, gcov_type gcov_count, int freq,
424 bool update_original,
425 vec<cgraph_edge *> redirect_callers,
426 bool call_duplication_hook,
427 cgraph_node *new_inlined_to,
428 bitmap args_to_skip, const char *suffix)
429 {
430 cgraph_node *new_node = symtab->create_empty ();
431 cgraph_edge *e;
432 gcov_type count_scale;
433 unsigned i;
434
435 if (new_inlined_to)
436 dump_callgraph_transformation (this, new_inlined_to, "inlining to");
437
438 new_node->decl = new_decl;
439 new_node->register_symbol ();
440 new_node->origin = origin;
441 new_node->lto_file_data = lto_file_data;
442 if (new_node->origin)
443 {
444 new_node->next_nested = new_node->origin->nested;
445 new_node->origin->nested = new_node;
446 }
447 new_node->analyzed = analyzed;
448 new_node->definition = definition;
449 new_node->local = local;
450 new_node->externally_visible = false;
451 new_node->no_reorder = no_reorder;
452 new_node->local.local = true;
453 new_node->global = global;
454 new_node->global.inlined_to = new_inlined_to;
455 new_node->rtl = rtl;
456 new_node->count = count;
457 new_node->frequency = frequency;
458 new_node->tp_first_run = tp_first_run;
459 new_node->tm_clone = tm_clone;
460 new_node->icf_merged = icf_merged;
461 new_node->merged_comdat = merged_comdat;
462 new_node->thunk = thunk;
463
464 new_node->clone.tree_map = NULL;
465 new_node->clone.args_to_skip = args_to_skip;
466 new_node->split_part = split_part;
467 if (!args_to_skip)
468 new_node->clone.combined_args_to_skip = clone.combined_args_to_skip;
469 else if (clone.combined_args_to_skip)
470 {
471 new_node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
472 bitmap_ior (new_node->clone.combined_args_to_skip,
473 clone.combined_args_to_skip, args_to_skip);
474 }
475 else
476 new_node->clone.combined_args_to_skip = args_to_skip;
477
478 if (count)
479 {
480 if (new_node->count > count)
481 count_scale = REG_BR_PROB_BASE;
482 else
483 count_scale = GCOV_COMPUTE_SCALE (new_node->count, count);
484 }
485 else
486 count_scale = 0;
487 if (update_original)
488 {
489 count -= gcov_count;
490 if (count < 0)
491 count = 0;
492 }
493
494 FOR_EACH_VEC_ELT (redirect_callers, i, e)
495 {
496 /* Redirect calls to the old version node to point to its new
497 version. The only exception is when the edge was proved to
498 be unreachable during the clonning procedure. */
499 if (!e->callee
500 || DECL_BUILT_IN_CLASS (e->callee->decl) != BUILT_IN_NORMAL
501 || DECL_FUNCTION_CODE (e->callee->decl) != BUILT_IN_UNREACHABLE)
502 e->redirect_callee_duplicating_thunks (new_node);
503 }
504 new_node->expand_all_artificial_thunks ();
505
506 for (e = callees;e; e=e->next_callee)
507 e->clone (new_node, e->call_stmt, e->lto_stmt_uid, count_scale,
508 freq, update_original);
509
510 for (e = indirect_calls; e; e = e->next_callee)
511 e->clone (new_node, e->call_stmt, e->lto_stmt_uid,
512 count_scale, freq, update_original);
513 new_node->clone_references (this);
514
515 new_node->next_sibling_clone = clones;
516 if (clones)
517 clones->prev_sibling_clone = new_node;
518 clones = new_node;
519 new_node->clone_of = this;
520
521 if (call_duplication_hook)
522 symtab->call_cgraph_duplication_hooks (this, new_node);
523
524 if (!new_inlined_to)
525 dump_callgraph_transformation (this, new_node, suffix);
526
527 return new_node;
528 }
529
530 static GTY(()) unsigned int clone_fn_id_num;
531
532 /* Return a new assembler name for a clone with SUFFIX of a decl named
533 NAME. */
534
535 tree
536 clone_function_name_1 (const char *name, const char *suffix)
537 {
538 size_t len = strlen (name);
539 char *tmp_name, *prefix;
540
541 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2);
542 memcpy (prefix, name, len);
543 strcpy (prefix + len + 1, suffix);
544 prefix[len] = symbol_table::symbol_suffix_separator ();
545 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
546 return get_identifier (tmp_name);
547 }
548
549 /* Return a new assembler name for a clone of DECL with SUFFIX. */
550
551 tree
552 clone_function_name (tree decl, const char *suffix)
553 {
554 tree name = DECL_ASSEMBLER_NAME (decl);
555 return clone_function_name_1 (IDENTIFIER_POINTER (name), suffix);
556 }
557
558
559 /* Create callgraph node clone with new declaration. The actual body will
560 be copied later at compilation stage.
561
562 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
563 bitmap interface.
564 */
565 cgraph_node *
566 cgraph_node::create_virtual_clone (vec<cgraph_edge *> redirect_callers,
567 vec<ipa_replace_map *, va_gc> *tree_map,
568 bitmap args_to_skip, const char * suffix)
569 {
570 tree old_decl = decl;
571 cgraph_node *new_node = NULL;
572 tree new_decl;
573 size_t len, i;
574 ipa_replace_map *map;
575 char *name;
576
577 gcc_checking_assert (local.versionable);
578 gcc_assert (local.can_change_signature || !args_to_skip);
579
580 /* Make a new FUNCTION_DECL tree node */
581 if (!args_to_skip)
582 new_decl = copy_node (old_decl);
583 else
584 new_decl = build_function_decl_skip_args (old_decl, args_to_skip, false);
585
586 /* These pointers represent function body and will be populated only when clone
587 is materialized. */
588 gcc_assert (new_decl != old_decl);
589 DECL_STRUCT_FUNCTION (new_decl) = NULL;
590 DECL_ARGUMENTS (new_decl) = NULL;
591 DECL_INITIAL (new_decl) = NULL;
592 DECL_RESULT (new_decl) = NULL;
593 /* We can not do DECL_RESULT (new_decl) = NULL; here because of LTO partitioning
594 sometimes storing only clone decl instead of original. */
595
596 /* Generate a new name for the new version. */
597 len = IDENTIFIER_LENGTH (DECL_NAME (old_decl));
598 name = XALLOCAVEC (char, len + strlen (suffix) + 2);
599 memcpy (name, IDENTIFIER_POINTER (DECL_NAME (old_decl)), len);
600 strcpy (name + len + 1, suffix);
601 name[len] = '.';
602 DECL_NAME (new_decl) = get_identifier (name);
603 SET_DECL_ASSEMBLER_NAME (new_decl, clone_function_name (old_decl, suffix));
604 SET_DECL_RTL (new_decl, NULL);
605
606 new_node = create_clone (new_decl, count, CGRAPH_FREQ_BASE, false,
607 redirect_callers, false, NULL, args_to_skip, suffix);
608
609 /* Update the properties.
610 Make clone visible only within this translation unit. Make sure
611 that is not weak also.
612 ??? We cannot use COMDAT linkage because there is no
613 ABI support for this. */
614 set_new_clone_decl_and_node_flags (new_node);
615 new_node->clone.tree_map = tree_map;
616 if (!implicit_section)
617 new_node->set_section (get_section ());
618
619 /* Clones of global symbols or symbols with unique names are unique. */
620 if ((TREE_PUBLIC (old_decl)
621 && !DECL_EXTERNAL (old_decl)
622 && !DECL_WEAK (old_decl)
623 && !DECL_COMDAT (old_decl))
624 || in_lto_p)
625 new_node->unique_name = true;
626 FOR_EACH_VEC_SAFE_ELT (tree_map, i, map)
627 new_node->maybe_create_reference (map->new_tree, NULL);
628
629 if (ipa_transforms_to_apply.exists ())
630 new_node->ipa_transforms_to_apply
631 = ipa_transforms_to_apply.copy ();
632
633 symtab->call_cgraph_duplication_hooks (this, new_node);
634
635 return new_node;
636 }
637
638 /* callgraph node being removed from symbol table; see if its entry can be
639 replaced by other inline clone. */
640 cgraph_node *
641 cgraph_node::find_replacement (void)
642 {
643 cgraph_node *next_inline_clone, *replacement;
644
645 for (next_inline_clone = clones;
646 next_inline_clone
647 && next_inline_clone->decl != decl;
648 next_inline_clone = next_inline_clone->next_sibling_clone)
649 ;
650
651 /* If there is inline clone of the node being removed, we need
652 to put it into the position of removed node and reorganize all
653 other clones to be based on it. */
654 if (next_inline_clone)
655 {
656 cgraph_node *n;
657 cgraph_node *new_clones;
658
659 replacement = next_inline_clone;
660
661 /* Unlink inline clone from the list of clones of removed node. */
662 if (next_inline_clone->next_sibling_clone)
663 next_inline_clone->next_sibling_clone->prev_sibling_clone
664 = next_inline_clone->prev_sibling_clone;
665 if (next_inline_clone->prev_sibling_clone)
666 {
667 gcc_assert (clones != next_inline_clone);
668 next_inline_clone->prev_sibling_clone->next_sibling_clone
669 = next_inline_clone->next_sibling_clone;
670 }
671 else
672 {
673 gcc_assert (clones == next_inline_clone);
674 clones = next_inline_clone->next_sibling_clone;
675 }
676
677 new_clones = clones;
678 clones = NULL;
679
680 /* Copy clone info. */
681 next_inline_clone->clone = clone;
682
683 /* Now place it into clone tree at same level at NODE. */
684 next_inline_clone->clone_of = clone_of;
685 next_inline_clone->prev_sibling_clone = NULL;
686 next_inline_clone->next_sibling_clone = NULL;
687 if (clone_of)
688 {
689 if (clone_of->clones)
690 clone_of->clones->prev_sibling_clone = next_inline_clone;
691 next_inline_clone->next_sibling_clone = clone_of->clones;
692 clone_of->clones = next_inline_clone;
693 }
694
695 /* Merge the clone list. */
696 if (new_clones)
697 {
698 if (!next_inline_clone->clones)
699 next_inline_clone->clones = new_clones;
700 else
701 {
702 n = next_inline_clone->clones;
703 while (n->next_sibling_clone)
704 n = n->next_sibling_clone;
705 n->next_sibling_clone = new_clones;
706 new_clones->prev_sibling_clone = n;
707 }
708 }
709
710 /* Update clone_of pointers. */
711 n = new_clones;
712 while (n)
713 {
714 n->clone_of = next_inline_clone;
715 n = n->next_sibling_clone;
716 }
717 return replacement;
718 }
719 else
720 return NULL;
721 }
722
723 /* Like cgraph_set_call_stmt but walk the clone tree and update all
724 clones sharing the same function body.
725 When WHOLE_SPECULATIVE_EDGES is true, all three components of
726 speculative edge gets updated. Otherwise we update only direct
727 call. */
728
729 void
730 cgraph_node::set_call_stmt_including_clones (gimple *old_stmt,
731 gcall *new_stmt,
732 bool update_speculative)
733 {
734 cgraph_node *node;
735 cgraph_edge *edge = get_edge (old_stmt);
736
737 if (edge)
738 edge->set_call_stmt (new_stmt, update_speculative);
739
740 node = clones;
741 if (node)
742 while (node != this)
743 {
744 cgraph_edge *edge = node->get_edge (old_stmt);
745 if (edge)
746 {
747 edge->set_call_stmt (new_stmt, update_speculative);
748 /* If UPDATE_SPECULATIVE is false, it means that we are turning
749 speculative call into a real code sequence. Update the
750 callgraph edges. */
751 if (edge->speculative && !update_speculative)
752 {
753 cgraph_edge *direct, *indirect;
754 ipa_ref *ref;
755
756 gcc_assert (!edge->indirect_unknown_callee);
757 edge->speculative_call_info (direct, indirect, ref);
758 direct->speculative = false;
759 indirect->speculative = false;
760 ref->speculative = false;
761 }
762 }
763 if (node->clones)
764 node = node->clones;
765 else if (node->next_sibling_clone)
766 node = node->next_sibling_clone;
767 else
768 {
769 while (node != this && !node->next_sibling_clone)
770 node = node->clone_of;
771 if (node != this)
772 node = node->next_sibling_clone;
773 }
774 }
775 }
776
777 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
778 same function body. If clones already have edge for OLD_STMT; only
779 update the edge same way as cgraph_set_call_stmt_including_clones does.
780
781 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
782 frequencies of the clones. */
783
784 void
785 cgraph_node::create_edge_including_clones (cgraph_node *callee,
786 gimple *old_stmt, gcall *stmt,
787 gcov_type count,
788 int freq,
789 cgraph_inline_failed_t reason)
790 {
791 cgraph_node *node;
792 cgraph_edge *edge;
793
794 if (!get_edge (stmt))
795 {
796 edge = create_edge (callee, stmt, count, freq);
797 edge->inline_failed = reason;
798 }
799
800 node = clones;
801 if (node)
802 while (node != this)
803 /* Thunk clones do not get updated while copying inline function body. */
804 if (!node->thunk.thunk_p)
805 {
806 cgraph_edge *edge = node->get_edge (old_stmt);
807
808 /* It is possible that clones already contain the edge while
809 master didn't. Either we promoted indirect call into direct
810 call in the clone or we are processing clones of unreachable
811 master where edges has been removed. */
812 if (edge)
813 edge->set_call_stmt (stmt);
814 else if (! node->get_edge (stmt))
815 {
816 edge = node->create_edge (callee, stmt, count, freq);
817 edge->inline_failed = reason;
818 }
819
820 if (node->clones)
821 node = node->clones;
822 else if (node->next_sibling_clone)
823 node = node->next_sibling_clone;
824 else
825 {
826 while (node != this && !node->next_sibling_clone)
827 node = node->clone_of;
828 if (node != this)
829 node = node->next_sibling_clone;
830 }
831 }
832 }
833
834 /* Remove the node from cgraph and all inline clones inlined into it.
835 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
836 removed. This allows to call the function from outer loop walking clone
837 tree. */
838
839 bool
840 cgraph_node::remove_symbol_and_inline_clones (cgraph_node *forbidden_node)
841 {
842 cgraph_edge *e, *next;
843 bool found = false;
844
845 if (this == forbidden_node)
846 {
847 callers->remove ();
848 return true;
849 }
850 for (e = callees; e; e = next)
851 {
852 next = e->next_callee;
853 if (!e->inline_failed)
854 found |= e->callee->remove_symbol_and_inline_clones (forbidden_node);
855 }
856 remove ();
857 return found;
858 }
859
860 /* The edges representing the callers of the NEW_VERSION node were
861 fixed by cgraph_function_versioning (), now the call_expr in their
862 respective tree code should be updated to call the NEW_VERSION. */
863
864 static void
865 update_call_expr (cgraph_node *new_version)
866 {
867 cgraph_edge *e;
868
869 gcc_assert (new_version);
870
871 /* Update the call expr on the edges to call the new version. */
872 for (e = new_version->callers; e; e = e->next_caller)
873 {
874 function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
875 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
876 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
877 }
878 }
879
880
881 /* Create a new cgraph node which is the new version of
882 callgraph node. REDIRECT_CALLERS holds the callers
883 edges which should be redirected to point to
884 NEW_VERSION. ALL the callees edges of the node
885 are cloned to the new version node. Return the new
886 version node.
887
888 If non-NULL BLOCK_TO_COPY determine what basic blocks
889 was copied to prevent duplications of calls that are dead
890 in the clone. */
891
892 cgraph_node *
893 cgraph_node::create_version_clone (tree new_decl,
894 vec<cgraph_edge *> redirect_callers,
895 bitmap bbs_to_copy,
896 const char *suffix)
897 {
898 cgraph_node *new_version;
899 cgraph_edge *e;
900 unsigned i;
901
902 new_version = cgraph_node::create (new_decl);
903
904 new_version->analyzed = analyzed;
905 new_version->definition = definition;
906 new_version->local = local;
907 new_version->externally_visible = false;
908 new_version->no_reorder = no_reorder;
909 new_version->local.local = new_version->definition;
910 new_version->global = global;
911 new_version->rtl = rtl;
912 new_version->count = count;
913
914 for (e = callees; e; e=e->next_callee)
915 if (!bbs_to_copy
916 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
917 e->clone (new_version, e->call_stmt,
918 e->lto_stmt_uid, REG_BR_PROB_BASE,
919 CGRAPH_FREQ_BASE,
920 true);
921 for (e = indirect_calls; e; e=e->next_callee)
922 if (!bbs_to_copy
923 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
924 e->clone (new_version, e->call_stmt,
925 e->lto_stmt_uid, REG_BR_PROB_BASE,
926 CGRAPH_FREQ_BASE,
927 true);
928 FOR_EACH_VEC_ELT (redirect_callers, i, e)
929 {
930 /* Redirect calls to the old version node to point to its new
931 version. */
932 e->redirect_callee (new_version);
933 }
934
935 symtab->call_cgraph_duplication_hooks (this, new_version);
936
937 dump_callgraph_transformation (this, new_version, suffix);
938
939 return new_version;
940 }
941
942 /* Perform function versioning.
943 Function versioning includes copying of the tree and
944 a callgraph update (creating a new cgraph node and updating
945 its callees and callers).
946
947 REDIRECT_CALLERS varray includes the edges to be redirected
948 to the new version.
949
950 TREE_MAP is a mapping of tree nodes we want to replace with
951 new ones (according to results of prior analysis).
952
953 If non-NULL ARGS_TO_SKIP determine function parameters to remove
954 from new version.
955 If SKIP_RETURN is true, the new version will return void.
956 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
957 If non_NULL NEW_ENTRY determine new entry BB of the clone.
958
959 Return the new version's cgraph node. */
960
961 cgraph_node *
962 cgraph_node::create_version_clone_with_body
963 (vec<cgraph_edge *> redirect_callers,
964 vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
965 bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
966 const char *suffix)
967 {
968 tree old_decl = decl;
969 cgraph_node *new_version_node = NULL;
970 tree new_decl;
971
972 if (!tree_versionable_function_p (old_decl))
973 return NULL;
974
975 gcc_assert (local.can_change_signature || !args_to_skip);
976
977 /* Make a new FUNCTION_DECL tree node for the new version. */
978 if (!args_to_skip && !skip_return)
979 new_decl = copy_node (old_decl);
980 else
981 new_decl
982 = build_function_decl_skip_args (old_decl, args_to_skip, skip_return);
983
984 /* Generate a new name for the new version. */
985 DECL_NAME (new_decl) = clone_function_name (old_decl, suffix);
986 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
987 SET_DECL_RTL (new_decl, NULL);
988
989 /* When the old decl was a con-/destructor make sure the clone isn't. */
990 DECL_STATIC_CONSTRUCTOR (new_decl) = 0;
991 DECL_STATIC_DESTRUCTOR (new_decl) = 0;
992
993 /* Create the new version's call-graph node.
994 and update the edges of the new node. */
995 new_version_node = create_version_clone (new_decl, redirect_callers,
996 bbs_to_copy, suffix);
997
998 if (ipa_transforms_to_apply.exists ())
999 new_version_node->ipa_transforms_to_apply
1000 = ipa_transforms_to_apply.copy ();
1001 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1002 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
1003 skip_return, bbs_to_copy, new_entry_block);
1004
1005 /* Update the new version's properties.
1006 Make The new version visible only within this translation unit. Make sure
1007 that is not weak also.
1008 ??? We cannot use COMDAT linkage because there is no
1009 ABI support for this. */
1010 new_version_node->make_decl_local ();
1011 DECL_VIRTUAL_P (new_version_node->decl) = 0;
1012 new_version_node->externally_visible = 0;
1013 new_version_node->local.local = 1;
1014 new_version_node->lowered = true;
1015 if (!implicit_section)
1016 new_version_node->set_section (get_section ());
1017 /* Clones of global symbols or symbols with unique names are unique. */
1018 if ((TREE_PUBLIC (old_decl)
1019 && !DECL_EXTERNAL (old_decl)
1020 && !DECL_WEAK (old_decl)
1021 && !DECL_COMDAT (old_decl))
1022 || in_lto_p)
1023 new_version_node->unique_name = true;
1024
1025 /* Update the call_expr on the edges to call the new version node. */
1026 update_call_expr (new_version_node);
1027
1028 symtab->call_cgraph_insertion_hooks (this);
1029 return new_version_node;
1030 }
1031
1032 /* Given virtual clone, turn it into actual clone. */
1033
1034 static void
1035 cgraph_materialize_clone (cgraph_node *node)
1036 {
1037 bitmap_obstack_initialize (NULL);
1038 node->former_clone_of = node->clone_of->decl;
1039 if (node->clone_of->former_clone_of)
1040 node->former_clone_of = node->clone_of->former_clone_of;
1041 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1042 tree_function_versioning (node->clone_of->decl, node->decl,
1043 node->clone.tree_map, true,
1044 node->clone.args_to_skip, false,
1045 NULL, NULL);
1046 if (symtab->dump_file)
1047 {
1048 dump_function_to_file (node->clone_of->decl, symtab->dump_file,
1049 dump_flags);
1050 dump_function_to_file (node->decl, symtab->dump_file, dump_flags);
1051 }
1052
1053 /* Function is no longer clone. */
1054 if (node->next_sibling_clone)
1055 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1056 if (node->prev_sibling_clone)
1057 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1058 else
1059 node->clone_of->clones = node->next_sibling_clone;
1060 node->next_sibling_clone = NULL;
1061 node->prev_sibling_clone = NULL;
1062 if (!node->clone_of->analyzed && !node->clone_of->clones)
1063 {
1064 node->clone_of->release_body ();
1065 node->clone_of->remove_callees ();
1066 node->clone_of->remove_all_references ();
1067 }
1068 node->clone_of = NULL;
1069 bitmap_obstack_release (NULL);
1070 }
1071
1072 /* Once all functions from compilation unit are in memory, produce all clones
1073 and update all calls. We might also do this on demand if we don't want to
1074 bring all functions to memory prior compilation, but current WHOPR
1075 implementation does that and it is a bit easier to keep everything right in
1076 this order. */
1077
1078 void
1079 symbol_table::materialize_all_clones (void)
1080 {
1081 cgraph_node *node;
1082 bool stabilized = false;
1083
1084
1085 if (symtab->dump_file)
1086 fprintf (symtab->dump_file, "Materializing clones\n");
1087
1088 cgraph_node::checking_verify_cgraph_nodes ();
1089
1090 /* We can also do topological order, but number of iterations should be
1091 bounded by number of IPA passes since single IPA pass is probably not
1092 going to create clones of clones it created itself. */
1093 while (!stabilized)
1094 {
1095 stabilized = true;
1096 FOR_EACH_FUNCTION (node)
1097 {
1098 if (node->clone_of && node->decl != node->clone_of->decl
1099 && !gimple_has_body_p (node->decl))
1100 {
1101 if (!node->clone_of->clone_of)
1102 node->clone_of->get_untransformed_body ();
1103 if (gimple_has_body_p (node->clone_of->decl))
1104 {
1105 if (symtab->dump_file)
1106 {
1107 fprintf (symtab->dump_file, "cloning %s to %s\n",
1108 xstrdup_for_dump (node->clone_of->name ()),
1109 xstrdup_for_dump (node->name ()));
1110 if (node->clone.tree_map)
1111 {
1112 unsigned int i;
1113 fprintf (symtab->dump_file, " replace map: ");
1114 for (i = 0;
1115 i < vec_safe_length (node->clone.tree_map);
1116 i++)
1117 {
1118 ipa_replace_map *replace_info;
1119 replace_info = (*node->clone.tree_map)[i];
1120 print_generic_expr (symtab->dump_file, replace_info->old_tree, 0);
1121 fprintf (symtab->dump_file, " -> ");
1122 print_generic_expr (symtab->dump_file, replace_info->new_tree, 0);
1123 fprintf (symtab->dump_file, "%s%s;",
1124 replace_info->replace_p ? "(replace)":"",
1125 replace_info->ref_p ? "(ref)":"");
1126 }
1127 fprintf (symtab->dump_file, "\n");
1128 }
1129 if (node->clone.args_to_skip)
1130 {
1131 fprintf (symtab->dump_file, " args_to_skip: ");
1132 dump_bitmap (symtab->dump_file,
1133 node->clone.args_to_skip);
1134 }
1135 if (node->clone.args_to_skip)
1136 {
1137 fprintf (symtab->dump_file, " combined_args_to_skip:");
1138 dump_bitmap (symtab->dump_file, node->clone.combined_args_to_skip);
1139 }
1140 }
1141 cgraph_materialize_clone (node);
1142 stabilized = false;
1143 }
1144 }
1145 }
1146 }
1147 FOR_EACH_FUNCTION (node)
1148 if (!node->analyzed && node->callees)
1149 {
1150 node->remove_callees ();
1151 node->remove_all_references ();
1152 }
1153 else
1154 node->clear_stmts_in_references ();
1155 if (symtab->dump_file)
1156 fprintf (symtab->dump_file, "Materialization Call site updates done.\n");
1157
1158 cgraph_node::checking_verify_cgraph_nodes ();
1159
1160 symtab->remove_unreachable_nodes (symtab->dump_file);
1161 }
1162
1163 #include "gt-cgraphclones.h"