]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraphunit.c
./:
[thirdparty/gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based intraprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic intraprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline function.)
36
37 - cgraph_varpool_finalize_variable
38
39 This function has same behavior as the above but is used for static
40 variables.
41
42 - cgraph_finalize_compilation_unit
43
44 This function is called once compilation unit is finalized and it will
45 no longer change.
46
47 In the unit-at-a-time the call-graph construction and local function
48 analysis takes place here. Bodies of unreachable functions are released
49 to conserve memory usage.
50
51 ??? The compilation unit in this point of view should be compilation
52 unit as defined by the language - for instance C frontend allows multiple
53 compilation units to be parsed at once and it should call function each
54 time parsing is done so we save memory.
55
56 - cgraph_optimize
57
58 In this unit-at-a-time compilation the intra procedural analysis takes
59 place here. In particular the static functions whose address is never
60 taken are marked as local. Backend can then use this information to
61 modify calling conventions, do better inlining or similar optimizations.
62
63 - cgraph_assemble_pending_functions
64 - cgraph_varpool_assemble_pending_variables
65
66 In non-unit-at-a-time mode these functions can be used to force compilation
67 of functions or variables that are known to be needed at given stage
68 of compilation
69
70 - cgraph_mark_needed_node
71 - cgraph_varpool_mark_needed_node
72
73 When function or variable is referenced by some hidden way (for instance
74 via assembly code and marked by attribute "used"), the call-graph data structure
75 must be updated accordingly by this function.
76
77 - analyze_expr callback
78
79 This function is responsible for lowering tree nodes not understood by
80 generic code into understandable ones or alternatively marking
81 callgraph and varpool nodes referenced by the as needed.
82
83 ??? On the tree-ssa genericizing should take place here and we will avoid
84 need for these hooks (replacing them by genericizing hook)
85
86 - expand_function callback
87
88 This function is used to expand function and pass it into RTL back-end.
89 Front-end should not make any assumptions about when this function can be
90 called. In particular cgraph_assemble_pending_functions,
91 cgraph_varpool_assemble_pending_variables, cgraph_finalize_function,
92 cgraph_varpool_finalize_function, cgraph_optimize can cause arbitrarily
93 previously finalized functions to be expanded.
94
95 We implement two compilation modes.
96
97 - unit-at-a-time: In this mode analyzing of all functions is deferred
98 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
99
100 In cgraph_finalize_compilation_unit the reachable functions are
101 analyzed. During analysis the call-graph edges from reachable
102 functions are constructed and their destinations are marked as
103 reachable. References to functions and variables are discovered too
104 and variables found to be needed output to the assembly file. Via
105 mark_referenced call in assemble_variable functions referenced by
106 static variables are noticed too.
107
108 The intra-procedural information is produced and its existence
109 indicated by global_info_ready. Once this flag is set it is impossible
110 to change function from !reachable to reachable and thus
111 assemble_variable no longer call mark_referenced.
112
113 Finally the call-graph is topologically sorted and all reachable functions
114 that has not been completely inlined or are not external are output.
115
116 ??? It is possible that reference to function or variable is optimized
117 out. We can not deal with this nicely because topological order is not
118 suitable for it. For tree-ssa we may consider another pass doing
119 optimization and re-discovering reachable functions.
120
121 ??? Reorganize code so variables are output very last and only if they
122 really has been referenced by produced code, so we catch more cases
123 where reference has been optimized out.
124
125 - non-unit-at-a-time
126
127 All functions are variables are output as early as possible to conserve
128 memory consumption. This may or may not result in less memory used but
129 it is still needed for some legacy code that rely on particular ordering
130 of things output from the compiler.
131
132 Varpool data structures are not used and variables are output directly.
133
134 Functions are output early using call of
135 cgraph_assemble_pending_function from cgraph_finalize_function. The
136 decision on whether function is needed is made more conservative so
137 uninlininable static functions are needed too. During the call-graph
138 construction the edge destinations are not marked as reachable and it
139 is completely relied upn assemble_variable to mark them. */
140
141
142 #include "config.h"
143 #include "system.h"
144 #include "coretypes.h"
145 #include "tm.h"
146 #include "tree.h"
147 #include "rtl.h"
148 #include "tree-flow.h"
149 #include "tree-inline.h"
150 #include "langhooks.h"
151 #include "pointer-set.h"
152 #include "toplev.h"
153 #include "flags.h"
154 #include "ggc.h"
155 #include "debug.h"
156 #include "target.h"
157 #include "cgraph.h"
158 #include "diagnostic.h"
159 #include "timevar.h"
160 #include "params.h"
161 #include "fibheap.h"
162 #include "c-common.h"
163 #include "intl.h"
164 #include "function.h"
165 #include "ipa-prop.h"
166 #include "tree-gimple.h"
167 #include "tree-pass.h"
168 #include "output.h"
169
170 static void cgraph_expand_all_functions (void);
171 static void cgraph_mark_functions_to_output (void);
172 static void cgraph_expand_function (struct cgraph_node *);
173 static tree record_reference (tree *, int *, void *);
174 static void cgraph_analyze_function (struct cgraph_node *node);
175
176 /* Records tree nodes seen in record_reference. Simply using
177 walk_tree_without_duplicates doesn't guarantee each node is visited
178 once because it gets a new htab upon each recursive call from
179 record_reference itself. */
180 static struct pointer_set_t *visited_nodes;
181
182 static FILE *cgraph_dump_file;
183
184 /* Determine if function DECL is needed. That is, visible to something
185 either outside this translation unit, something magic in the system
186 configury, or (if not doing unit-at-a-time) to something we havn't
187 seen yet. */
188
189 static bool
190 decide_is_function_needed (struct cgraph_node *node, tree decl)
191 {
192 tree origin;
193 if (MAIN_NAME_P (DECL_NAME (decl))
194 && TREE_PUBLIC (decl))
195 {
196 node->local.externally_visible = true;
197 return true;
198 }
199
200 /* If the user told us it is used, then it must be so. */
201 if (node->local.externally_visible
202 || lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
203 return true;
204
205 /* ??? If the assembler name is set by hand, it is possible to assemble
206 the name later after finalizing the function and the fact is noticed
207 in assemble_name then. This is arguably a bug. */
208 if (DECL_ASSEMBLER_NAME_SET_P (decl)
209 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
210 return true;
211
212 /* If we decided it was needed before, but at the time we didn't have
213 the body of the function available, then it's still needed. We have
214 to go back and re-check its dependencies now. */
215 if (node->needed)
216 return true;
217
218 /* Externally visible functions must be output. The exception is
219 COMDAT functions that must be output only when they are needed. */
220 if ((TREE_PUBLIC (decl) && !flag_whole_program)
221 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
222 return true;
223
224 /* Constructors and destructors are reachable from the runtime by
225 some mechanism. */
226 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
227 return true;
228
229 if (flag_unit_at_a_time)
230 return false;
231
232 /* If not doing unit at a time, then we'll only defer this function
233 if its marked for inlining. Otherwise we want to emit it now. */
234
235 /* "extern inline" functions are never output locally. */
236 if (DECL_EXTERNAL (decl))
237 return false;
238 /* Nested functions of extern inline function shall not be emit unless
239 we inlined the origin. */
240 for (origin = decl_function_context (decl); origin;
241 origin = decl_function_context (origin))
242 if (DECL_EXTERNAL (origin))
243 return false;
244 /* We want to emit COMDAT functions only when absolutely necessary. */
245 if (DECL_COMDAT (decl))
246 return false;
247 if (!DECL_INLINE (decl)
248 || (!node->local.disregard_inline_limits
249 /* When declared inline, defer even the uninlinable functions.
250 This allows them to be eliminated when unused. */
251 && !DECL_DECLARED_INLINE_P (decl)
252 && (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
253 return true;
254
255 return false;
256 }
257
258 /* Walk the decls we marked as necessary and see if they reference new
259 variables or functions and add them into the worklists. */
260 static bool
261 cgraph_varpool_analyze_pending_decls (void)
262 {
263 bool changed = false;
264 timevar_push (TV_CGRAPH);
265
266 while (cgraph_varpool_first_unanalyzed_node)
267 {
268 tree decl = cgraph_varpool_first_unanalyzed_node->decl;
269
270 cgraph_varpool_first_unanalyzed_node->analyzed = true;
271
272 cgraph_varpool_first_unanalyzed_node = cgraph_varpool_first_unanalyzed_node->next_needed;
273
274 if (DECL_INITIAL (decl))
275 {
276 visited_nodes = pointer_set_create ();
277 walk_tree (&DECL_INITIAL (decl), record_reference, NULL, visited_nodes);
278 pointer_set_destroy (visited_nodes);
279 visited_nodes = NULL;
280 }
281 changed = true;
282 }
283 timevar_pop (TV_CGRAPH);
284 return changed;
285 }
286
287 /* Optimization of function bodies might've rendered some variables as
288 unnecessary so we want to avoid these from being compiled.
289
290 This is done by pruning the queue and keeping only the variables that
291 really appear needed (ie they are either externally visible or referenced
292 by compiled function). Re-doing the reachability analysis on variables
293 brings back the remaining variables referenced by these. */
294 static void
295 cgraph_varpool_remove_unreferenced_decls (void)
296 {
297 struct cgraph_varpool_node *next, *node = cgraph_varpool_nodes_queue;
298
299 cgraph_varpool_reset_queue ();
300
301 if (errorcount || sorrycount)
302 return;
303
304 while (node)
305 {
306 tree decl = node->decl;
307 next = node->next_needed;
308 node->needed = 0;
309
310 if (node->finalized
311 && ((DECL_ASSEMBLER_NAME_SET_P (decl)
312 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
313 || node->force_output
314 || decide_is_variable_needed (node, decl)
315 /* ??? Cgraph does not yet rule the world with an iron hand,
316 and does not control the emission of debug information.
317 After a variable has its DECL_RTL set, we must assume that
318 it may be referenced by the debug information, and we can
319 no longer elide it. */
320 || DECL_RTL_SET_P (decl)))
321 cgraph_varpool_mark_needed_node (node);
322
323 node = next;
324 }
325 /* Make sure we mark alias targets as used targets. */
326 finish_aliases_1 ();
327 cgraph_varpool_analyze_pending_decls ();
328 }
329
330
331 /* When not doing unit-at-a-time, output all functions enqueued.
332 Return true when such a functions were found. */
333
334 bool
335 cgraph_assemble_pending_functions (void)
336 {
337 bool output = false;
338
339 if (flag_unit_at_a_time)
340 return false;
341
342 while (cgraph_nodes_queue)
343 {
344 struct cgraph_node *n = cgraph_nodes_queue;
345
346 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
347 n->next_needed = NULL;
348 if (!n->global.inlined_to
349 && !n->alias
350 && !DECL_EXTERNAL (n->decl))
351 {
352 cgraph_expand_function (n);
353 output = true;
354 }
355 }
356
357 return output;
358 }
359 /* As an GCC extension we allow redefinition of the function. The
360 semantics when both copies of bodies differ is not well defined.
361 We replace the old body with new body so in unit at a time mode
362 we always use new body, while in normal mode we may end up with
363 old body inlined into some functions and new body expanded and
364 inlined in others.
365
366 ??? It may make more sense to use one body for inlining and other
367 body for expanding the function but this is difficult to do. */
368
369 static void
370 cgraph_reset_node (struct cgraph_node *node)
371 {
372 /* If node->output is set, then this is a unit-at-a-time compilation
373 and we have already begun whole-unit analysis. This is *not*
374 testing for whether we've already emitted the function. That
375 case can be sort-of legitimately seen with real function
376 redefinition errors. I would argue that the front end should
377 never present us with such a case, but don't enforce that for now. */
378 gcc_assert (!node->output);
379
380 /* Reset our data structures so we can analyze the function again. */
381 memset (&node->local, 0, sizeof (node->local));
382 memset (&node->global, 0, sizeof (node->global));
383 memset (&node->rtl, 0, sizeof (node->rtl));
384 node->analyzed = false;
385 node->local.redefined_extern_inline = true;
386 node->local.finalized = false;
387
388 if (!flag_unit_at_a_time)
389 {
390 struct cgraph_node *n;
391
392 for (n = cgraph_nodes; n; n = n->next)
393 if (n->global.inlined_to == node)
394 cgraph_remove_node (n);
395 }
396
397 cgraph_node_remove_callees (node);
398
399 /* We may need to re-queue the node for assembling in case
400 we already proceeded it and ignored as not needed. */
401 if (node->reachable && !flag_unit_at_a_time)
402 {
403 struct cgraph_node *n;
404
405 for (n = cgraph_nodes_queue; n; n = n->next_needed)
406 if (n == node)
407 break;
408 if (!n)
409 node->reachable = 0;
410 }
411 }
412
413 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
414 logic in effect. If NESTED is true, then our caller cannot stand to have
415 the garbage collector run at the moment. We would need to either create
416 a new GC context, or just not compile right now. */
417
418 void
419 cgraph_finalize_function (tree decl, bool nested)
420 {
421 struct cgraph_node *node = cgraph_node (decl);
422
423 if (node->local.finalized)
424 cgraph_reset_node (node);
425
426 notice_global_symbol (decl);
427 node->decl = decl;
428 node->local.finalized = true;
429 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
430 if (node->nested)
431 lower_nested_functions (decl);
432 gcc_assert (!node->nested);
433
434 /* If not unit at a time, then we need to create the call graph
435 now, so that called functions can be queued and emitted now. */
436 if (!flag_unit_at_a_time)
437 {
438 cgraph_analyze_function (node);
439 cgraph_decide_inlining_incrementally (node, false);
440 }
441
442 if (decide_is_function_needed (node, decl))
443 cgraph_mark_needed_node (node);
444
445 /* Since we reclaim unreachable nodes at the end of every language
446 level unit, we need to be conservative about possible entry points
447 there. */
448 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
449 cgraph_mark_reachable_node (node);
450
451 /* If not unit at a time, go ahead and emit everything we've found
452 to be reachable at this time. */
453 if (!nested)
454 {
455 if (!cgraph_assemble_pending_functions ())
456 ggc_collect ();
457 }
458
459 /* If we've not yet emitted decl, tell the debug info about it. */
460 if (!TREE_ASM_WRITTEN (decl))
461 (*debug_hooks->deferred_inline_function) (decl);
462
463 /* Possibly warn about unused parameters. */
464 if (warn_unused_parameter)
465 do_warn_unused_parameter (decl);
466 }
467
468 void
469 cgraph_lower_function (struct cgraph_node *node)
470 {
471 if (node->lowered)
472 return;
473 tree_lowering_passes (node->decl);
474 node->lowered = true;
475 }
476
477 /* Walk tree and record all calls. Called via walk_tree. */
478 static tree
479 record_reference (tree *tp, int *walk_subtrees, void *data)
480 {
481 tree t = *tp;
482
483 switch (TREE_CODE (t))
484 {
485 case VAR_DECL:
486 /* ??? Really, we should mark this decl as *potentially* referenced
487 by this function and re-examine whether the decl is actually used
488 after rtl has been generated. */
489 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
490 {
491 cgraph_varpool_mark_needed_node (cgraph_varpool_node (t));
492 if (lang_hooks.callgraph.analyze_expr)
493 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees,
494 data);
495 }
496 break;
497
498 case FDESC_EXPR:
499 case ADDR_EXPR:
500 if (flag_unit_at_a_time)
501 {
502 /* Record dereferences to the functions. This makes the
503 functions reachable unconditionally. */
504 tree decl = TREE_OPERAND (*tp, 0);
505 if (TREE_CODE (decl) == FUNCTION_DECL)
506 cgraph_mark_needed_node (cgraph_node (decl));
507 }
508 break;
509
510 default:
511 /* Save some cycles by not walking types and declaration as we
512 won't find anything useful there anyway. */
513 if (IS_TYPE_OR_DECL_P (*tp))
514 {
515 *walk_subtrees = 0;
516 break;
517 }
518
519 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
520 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees, data);
521 break;
522 }
523
524 return NULL;
525 }
526
527 /* Create cgraph edges for function calls inside BODY from NODE. */
528
529 static void
530 cgraph_create_edges (struct cgraph_node *node, tree body)
531 {
532 basic_block bb;
533
534 struct function *this_cfun = DECL_STRUCT_FUNCTION (body);
535 block_stmt_iterator bsi;
536 tree step;
537 visited_nodes = pointer_set_create ();
538
539 /* Reach the trees by walking over the CFG, and note the
540 enclosing basic-blocks in the call edges. */
541 FOR_EACH_BB_FN (bb, this_cfun)
542 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
543 {
544 tree stmt = bsi_stmt (bsi);
545 tree call = get_call_expr_in (stmt);
546 tree decl;
547
548 if (call && (decl = get_callee_fndecl (call)))
549 {
550 cgraph_create_edge (node, cgraph_node (decl), stmt,
551 bb->count,
552 bb->loop_depth);
553 walk_tree (&TREE_OPERAND (call, 1),
554 record_reference, node, visited_nodes);
555 if (TREE_CODE (stmt) == MODIFY_EXPR)
556 walk_tree (&TREE_OPERAND (stmt, 0),
557 record_reference, node, visited_nodes);
558 }
559 else
560 walk_tree (bsi_stmt_ptr (bsi), record_reference, node, visited_nodes);
561 }
562
563 /* Look for initializers of constant variables and private statics. */
564 for (step = DECL_STRUCT_FUNCTION (body)->unexpanded_var_list;
565 step;
566 step = TREE_CHAIN (step))
567 {
568 tree decl = TREE_VALUE (step);
569 if (TREE_CODE (decl) == VAR_DECL
570 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
571 && flag_unit_at_a_time)
572 cgraph_varpool_finalize_decl (decl);
573 else if (TREE_CODE (decl) == VAR_DECL && DECL_INITIAL (decl))
574 walk_tree (&DECL_INITIAL (decl), record_reference, node, visited_nodes);
575 }
576
577 pointer_set_destroy (visited_nodes);
578 visited_nodes = NULL;
579 }
580
581 /* Give initial reasons why inlining would fail. Those gets
582 either NULLified or usually overwritten by more precise reason
583 later. */
584 static void
585 initialize_inline_failed (struct cgraph_node *node)
586 {
587 struct cgraph_edge *e;
588
589 for (e = node->callers; e; e = e->next_caller)
590 {
591 gcc_assert (!e->callee->global.inlined_to);
592 gcc_assert (e->inline_failed);
593 if (node->local.redefined_extern_inline)
594 e->inline_failed = N_("redefined extern inline functions are not "
595 "considered for inlining");
596 else if (!node->local.inlinable)
597 e->inline_failed = N_("function not inlinable");
598 else
599 e->inline_failed = N_("function not considered for inlining");
600 }
601 }
602
603 /* Rebuild call edges from current function after a passes not aware
604 of cgraph updating. */
605 static void
606 rebuild_cgraph_edges (void)
607 {
608 basic_block bb;
609 struct cgraph_node *node = cgraph_node (current_function_decl);
610 block_stmt_iterator bsi;
611
612 cgraph_node_remove_callees (node);
613
614 node->count = ENTRY_BLOCK_PTR->count;
615
616 FOR_EACH_BB (bb)
617 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
618 {
619 tree stmt = bsi_stmt (bsi);
620 tree call = get_call_expr_in (stmt);
621 tree decl;
622
623 if (call && (decl = get_callee_fndecl (call)))
624 cgraph_create_edge (node, cgraph_node (decl), stmt,
625 bb->count,
626 bb->loop_depth);
627 }
628 initialize_inline_failed (node);
629 gcc_assert (!node->global.inlined_to);
630 }
631
632 struct tree_opt_pass pass_rebuild_cgraph_edges =
633 {
634 NULL, /* name */
635 NULL, /* gate */
636 rebuild_cgraph_edges, /* execute */
637 NULL, /* sub */
638 NULL, /* next */
639 0, /* static_pass_number */
640 0, /* tv_id */
641 PROP_cfg, /* properties_required */
642 0, /* properties_provided */
643 0, /* properties_destroyed */
644 0, /* todo_flags_start */
645 0, /* todo_flags_finish */
646 0 /* letter */
647 };
648
649 /* Verify cgraph nodes of given cgraph node. */
650 void
651 verify_cgraph_node (struct cgraph_node *node)
652 {
653 struct cgraph_edge *e;
654 struct cgraph_node *main_clone;
655 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
656 basic_block this_block;
657 block_stmt_iterator bsi;
658 bool error_found = false;
659
660 timevar_push (TV_CGRAPH_VERIFY);
661 for (e = node->callees; e; e = e->next_callee)
662 if (e->aux)
663 {
664 error ("aux field set for edge %s->%s",
665 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
666 error_found = true;
667 }
668 if (node->count < 0)
669 {
670 error ("Execution count is negative");
671 error_found = true;
672 }
673 for (e = node->callers; e; e = e->next_caller)
674 {
675 if (e->count < 0)
676 {
677 error ("caller edge count is negative");
678 error_found = true;
679 }
680 if (!e->inline_failed)
681 {
682 if (node->global.inlined_to
683 != (e->caller->global.inlined_to
684 ? e->caller->global.inlined_to : e->caller))
685 {
686 error ("inlined_to pointer is wrong");
687 error_found = true;
688 }
689 if (node->callers->next_caller)
690 {
691 error ("multiple inline callers");
692 error_found = true;
693 }
694 }
695 else
696 if (node->global.inlined_to)
697 {
698 error ("inlined_to pointer set for noninline callers");
699 error_found = true;
700 }
701 }
702 if (!node->callers && node->global.inlined_to)
703 {
704 error ("inlined_to pointer is set but no predecesors found");
705 error_found = true;
706 }
707 if (node->global.inlined_to == node)
708 {
709 error ("inlined_to pointer refers to itself");
710 error_found = true;
711 }
712
713 for (main_clone = cgraph_node (node->decl); main_clone;
714 main_clone = main_clone->next_clone)
715 if (main_clone == node)
716 break;
717 if (!node)
718 {
719 error ("node not found in DECL_ASSEMBLER_NAME hash");
720 error_found = true;
721 }
722
723 if (node->analyzed
724 && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
725 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
726 {
727 if (this_cfun->cfg)
728 {
729 /* The nodes we're interested in are never shared, so walk
730 the tree ignoring duplicates. */
731 visited_nodes = pointer_set_create ();
732 /* Reach the trees by walking over the CFG, and note the
733 enclosing basic-blocks in the call edges. */
734 FOR_EACH_BB_FN (this_block, this_cfun)
735 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
736 {
737 tree stmt = bsi_stmt (bsi);
738 tree call = get_call_expr_in (stmt);
739 tree decl;
740 if (call && (decl = get_callee_fndecl (call)))
741 {
742 struct cgraph_edge *e = cgraph_edge (node, stmt);
743 if (e)
744 {
745 if (e->aux)
746 {
747 error ("shared call_stmt:");
748 debug_generic_stmt (stmt);
749 error_found = true;
750 }
751 if (e->callee->decl != cgraph_node (decl)->decl
752 && e->inline_failed)
753 {
754 error ("edge points to wrong declaration:");
755 debug_tree (e->callee->decl);
756 fprintf (stderr," Instead of:");
757 debug_tree (decl);
758 }
759 e->aux = (void *)1;
760 }
761 else
762 {
763 error ("missing callgraph edge for call stmt:");
764 debug_generic_stmt (stmt);
765 error_found = true;
766 }
767 }
768 }
769 pointer_set_destroy (visited_nodes);
770 visited_nodes = NULL;
771 }
772 else
773 /* No CFG available?! */
774 gcc_unreachable ();
775
776 for (e = node->callees; e; e = e->next_callee)
777 {
778 if (!e->aux)
779 {
780 error ("edge %s->%s has no corresponding call_stmt",
781 cgraph_node_name (e->caller),
782 cgraph_node_name (e->callee));
783 debug_generic_stmt (e->call_stmt);
784 error_found = true;
785 }
786 e->aux = 0;
787 }
788 }
789 if (error_found)
790 {
791 dump_cgraph_node (stderr, node);
792 internal_error ("verify_cgraph_node failed");
793 }
794 timevar_pop (TV_CGRAPH_VERIFY);
795 }
796
797 /* Verify whole cgraph structure. */
798 void
799 verify_cgraph (void)
800 {
801 struct cgraph_node *node;
802
803 if (sorrycount || errorcount)
804 return;
805
806 for (node = cgraph_nodes; node; node = node->next)
807 verify_cgraph_node (node);
808 }
809
810 /* Output one variable, if necessary. Return whether we output it. */
811 static bool
812 cgraph_varpool_assemble_decl (struct cgraph_varpool_node *node)
813 {
814 tree decl = node->decl;
815
816 if (!TREE_ASM_WRITTEN (decl)
817 && !node->alias
818 && !DECL_EXTERNAL (decl)
819 && (TREE_CODE (decl) != VAR_DECL || !DECL_HAS_VALUE_EXPR_P (decl)))
820 {
821 assemble_variable (decl, 0, 1, 0);
822 /* Local static variables are never seen by check_global_declarations
823 so we need to output debug info by hand. */
824 if (DECL_CONTEXT (decl)
825 && (TREE_CODE (DECL_CONTEXT (decl)) == BLOCK
826 || TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
827 && errorcount == 0 && sorrycount == 0)
828 {
829 timevar_push (TV_SYMOUT);
830 (*debug_hooks->global_decl) (decl);
831 timevar_pop (TV_SYMOUT);
832 }
833 return true;
834 }
835
836 return false;
837 }
838
839 /* Output all variables enqueued to be assembled. */
840 bool
841 cgraph_varpool_assemble_pending_decls (void)
842 {
843 bool changed = false;
844
845 if (errorcount || sorrycount)
846 return false;
847
848 /* EH might mark decls as needed during expansion. This should be safe since
849 we don't create references to new function, but it should not be used
850 elsewhere. */
851 cgraph_varpool_analyze_pending_decls ();
852
853 while (cgraph_varpool_nodes_queue)
854 {
855 struct cgraph_varpool_node *node = cgraph_varpool_nodes_queue;
856
857 cgraph_varpool_nodes_queue = cgraph_varpool_nodes_queue->next_needed;
858 if (cgraph_varpool_assemble_decl (node))
859 changed = true;
860 node->next_needed = NULL;
861 }
862 return changed;
863 }
864
865 /* Output all asm statements we have stored up to be output. */
866
867 static void
868 cgraph_output_pending_asms (void)
869 {
870 struct cgraph_asm_node *can;
871
872 if (errorcount || sorrycount)
873 return;
874
875 for (can = cgraph_asm_nodes; can; can = can->next)
876 assemble_asm (can->asm_str);
877 cgraph_asm_nodes = NULL;
878 }
879
880 /* Analyze the function scheduled to be output. */
881 static void
882 cgraph_analyze_function (struct cgraph_node *node)
883 {
884 tree decl = node->decl;
885
886 current_function_decl = decl;
887 push_cfun (DECL_STRUCT_FUNCTION (decl));
888 cgraph_lower_function (node);
889
890 /* First kill forward declaration so reverse inlining works properly. */
891 cgraph_create_edges (node, decl);
892
893 node->local.inlinable = tree_inlinable_function_p (decl);
894 node->local.self_insns = estimate_num_insns (decl);
895 if (node->local.inlinable)
896 node->local.disregard_inline_limits
897 = lang_hooks.tree_inlining.disregard_inline_limits (decl);
898 initialize_inline_failed (node);
899 if (flag_really_no_inline && !node->local.disregard_inline_limits)
900 node->local.inlinable = 0;
901 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
902 node->global.insns = node->local.self_insns;
903
904 node->analyzed = true;
905 pop_cfun ();
906 current_function_decl = NULL;
907 }
908
909 /* Analyze the whole compilation unit once it is parsed completely. */
910
911 void
912 cgraph_finalize_compilation_unit (void)
913 {
914 struct cgraph_node *node;
915 /* Keep track of already processed nodes when called multiple times for
916 intermodule optimization. */
917 static struct cgraph_node *first_analyzed;
918
919 finish_aliases_1 ();
920
921 if (!flag_unit_at_a_time)
922 {
923 cgraph_output_pending_asms ();
924 cgraph_assemble_pending_functions ();
925 return;
926 }
927
928 if (!quiet_flag)
929 {
930 fprintf (stderr, "\nAnalyzing compilation unit");
931 fflush (stderr);
932 }
933
934 timevar_push (TV_CGRAPH);
935 cgraph_varpool_analyze_pending_decls ();
936 if (cgraph_dump_file)
937 {
938 fprintf (cgraph_dump_file, "Initial entry points:");
939 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
940 if (node->needed && DECL_SAVED_TREE (node->decl))
941 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
942 fprintf (cgraph_dump_file, "\n");
943 }
944
945 /* Propagate reachability flag and lower representation of all reachable
946 functions. In the future, lowering will introduce new functions and
947 new entry points on the way (by template instantiation and virtual
948 method table generation for instance). */
949 while (cgraph_nodes_queue)
950 {
951 struct cgraph_edge *edge;
952 tree decl = cgraph_nodes_queue->decl;
953
954 node = cgraph_nodes_queue;
955 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
956 node->next_needed = NULL;
957
958 /* ??? It is possible to create extern inline function and later using
959 weak alias attribute to kill its body. See
960 gcc.c-torture/compile/20011119-1.c */
961 if (!DECL_SAVED_TREE (decl))
962 {
963 cgraph_reset_node (node);
964 continue;
965 }
966
967 gcc_assert (!node->analyzed && node->reachable);
968 gcc_assert (DECL_SAVED_TREE (decl));
969
970 cgraph_analyze_function (node);
971
972 for (edge = node->callees; edge; edge = edge->next_callee)
973 if (!edge->callee->reachable)
974 cgraph_mark_reachable_node (edge->callee);
975
976 cgraph_varpool_analyze_pending_decls ();
977 }
978
979 /* Collect entry points to the unit. */
980
981 if (cgraph_dump_file)
982 {
983 fprintf (cgraph_dump_file, "Unit entry points:");
984 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
985 if (node->needed && DECL_SAVED_TREE (node->decl))
986 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
987 fprintf (cgraph_dump_file, "\n\nInitial ");
988 dump_cgraph (cgraph_dump_file);
989 }
990
991 if (cgraph_dump_file)
992 fprintf (cgraph_dump_file, "\nReclaiming functions:");
993
994 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
995 {
996 tree decl = node->decl;
997
998 if (node->local.finalized && !DECL_SAVED_TREE (decl))
999 cgraph_reset_node (node);
1000
1001 if (!node->reachable && DECL_SAVED_TREE (decl))
1002 {
1003 if (cgraph_dump_file)
1004 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1005 cgraph_remove_node (node);
1006 continue;
1007 }
1008 else
1009 node->next_needed = NULL;
1010 gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
1011 gcc_assert (node->analyzed == node->local.finalized);
1012 }
1013 if (cgraph_dump_file)
1014 {
1015 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1016 dump_cgraph (cgraph_dump_file);
1017 }
1018 first_analyzed = cgraph_nodes;
1019 ggc_collect ();
1020 timevar_pop (TV_CGRAPH);
1021 }
1022 /* Figure out what functions we want to assemble. */
1023
1024 static void
1025 cgraph_mark_functions_to_output (void)
1026 {
1027 struct cgraph_node *node;
1028
1029 for (node = cgraph_nodes; node; node = node->next)
1030 {
1031 tree decl = node->decl;
1032 struct cgraph_edge *e;
1033
1034 gcc_assert (!node->output);
1035
1036 for (e = node->callers; e; e = e->next_caller)
1037 if (e->inline_failed)
1038 break;
1039
1040 /* We need to output all local functions that are used and not
1041 always inlined, as well as those that are reachable from
1042 outside the current compilation unit. */
1043 if (DECL_SAVED_TREE (decl)
1044 && !node->global.inlined_to
1045 && (node->needed
1046 || (e && node->reachable))
1047 && !TREE_ASM_WRITTEN (decl)
1048 && !DECL_EXTERNAL (decl))
1049 node->output = 1;
1050 else
1051 {
1052 /* We should've reclaimed all functions that are not needed. */
1053 #ifdef ENABLE_CHECKING
1054 if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
1055 && !DECL_EXTERNAL (decl))
1056 {
1057 dump_cgraph_node (stderr, node);
1058 internal_error ("failed to reclaim unneeded function");
1059 }
1060 #endif
1061 gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
1062 || DECL_EXTERNAL (decl));
1063
1064 }
1065
1066 }
1067 }
1068
1069 /* Expand function specified by NODE. */
1070
1071 static void
1072 cgraph_expand_function (struct cgraph_node *node)
1073 {
1074 tree decl = node->decl;
1075
1076 /* We ought to not compile any inline clones. */
1077 gcc_assert (!node->global.inlined_to);
1078
1079 if (flag_unit_at_a_time)
1080 announce_function (decl);
1081
1082 cgraph_lower_function (node);
1083
1084 /* Generate RTL for the body of DECL. */
1085 lang_hooks.callgraph.expand_function (decl);
1086
1087 /* Make sure that BE didn't give up on compiling. */
1088 /* ??? Can happen with nested function of extern inline. */
1089 gcc_assert (TREE_ASM_WRITTEN (node->decl));
1090
1091 current_function_decl = NULL;
1092 if (!cgraph_preserve_function_body_p (node->decl))
1093 {
1094 DECL_SAVED_TREE (node->decl) = NULL;
1095 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1096 DECL_INITIAL (node->decl) = error_mark_node;
1097 /* Eliminate all call edges. This is important so the call_expr no longer
1098 points to the dead function body. */
1099 cgraph_node_remove_callees (node);
1100 }
1101
1102 cgraph_function_flags_ready = true;
1103 }
1104
1105 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1106
1107 bool
1108 cgraph_inline_p (struct cgraph_edge *e, const char **reason)
1109 {
1110 *reason = e->inline_failed;
1111 return !e->inline_failed;
1112 }
1113
1114
1115
1116 /* Expand all functions that must be output.
1117
1118 Attempt to topologically sort the nodes so function is output when
1119 all called functions are already assembled to allow data to be
1120 propagated across the callgraph. Use a stack to get smaller distance
1121 between a function and its callees (later we may choose to use a more
1122 sophisticated algorithm for function reordering; we will likely want
1123 to use subsections to make the output functions appear in top-down
1124 order). */
1125
1126 static void
1127 cgraph_expand_all_functions (void)
1128 {
1129 struct cgraph_node *node;
1130 struct cgraph_node **order =
1131 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
1132 int order_pos = 0, new_order_pos = 0;
1133 int i;
1134
1135 order_pos = cgraph_postorder (order);
1136 gcc_assert (order_pos == cgraph_n_nodes);
1137
1138 /* Garbage collector may remove inline clones we eliminate during
1139 optimization. So we must be sure to not reference them. */
1140 for (i = 0; i < order_pos; i++)
1141 if (order[i]->output)
1142 order[new_order_pos++] = order[i];
1143
1144 for (i = new_order_pos - 1; i >= 0; i--)
1145 {
1146 node = order[i];
1147 if (node->output)
1148 {
1149 gcc_assert (node->reachable);
1150 node->output = 0;
1151 cgraph_expand_function (node);
1152 }
1153 }
1154 free (order);
1155 }
1156
1157 /* This is used to sort the node types by the cgraph order number. */
1158
1159 struct cgraph_order_sort
1160 {
1161 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1162 union
1163 {
1164 struct cgraph_node *f;
1165 struct cgraph_varpool_node *v;
1166 struct cgraph_asm_node *a;
1167 } u;
1168 };
1169
1170 /* Output all functions, variables, and asm statements in the order
1171 according to their order fields, which is the order in which they
1172 appeared in the file. This implements -fno-toplevel-reorder. In
1173 this mode we may output functions and variables which don't really
1174 need to be output. */
1175
1176 static void
1177 cgraph_output_in_order (void)
1178 {
1179 int max;
1180 size_t size;
1181 struct cgraph_order_sort *nodes;
1182 int i;
1183 struct cgraph_node *pf;
1184 struct cgraph_varpool_node *pv;
1185 struct cgraph_asm_node *pa;
1186
1187 max = cgraph_order;
1188 size = max * sizeof (struct cgraph_order_sort);
1189 nodes = (struct cgraph_order_sort *) alloca (size);
1190 memset (nodes, 0, size);
1191
1192 cgraph_varpool_analyze_pending_decls ();
1193
1194 for (pf = cgraph_nodes; pf; pf = pf->next)
1195 {
1196 if (pf->output)
1197 {
1198 i = pf->order;
1199 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1200 nodes[i].kind = ORDER_FUNCTION;
1201 nodes[i].u.f = pf;
1202 }
1203 }
1204
1205 for (pv = cgraph_varpool_nodes_queue; pv; pv = pv->next_needed)
1206 {
1207 i = pv->order;
1208 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1209 nodes[i].kind = ORDER_VAR;
1210 nodes[i].u.v = pv;
1211 }
1212
1213 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1214 {
1215 i = pa->order;
1216 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1217 nodes[i].kind = ORDER_ASM;
1218 nodes[i].u.a = pa;
1219 }
1220 cgraph_asm_nodes = NULL;
1221
1222 for (i = 0; i < max; ++i)
1223 {
1224 switch (nodes[i].kind)
1225 {
1226 case ORDER_FUNCTION:
1227 nodes[i].u.f->output = 0;
1228 cgraph_expand_function (nodes[i].u.f);
1229 break;
1230
1231 case ORDER_VAR:
1232 cgraph_varpool_assemble_decl (nodes[i].u.v);
1233 break;
1234
1235 case ORDER_ASM:
1236 assemble_asm (nodes[i].u.a->asm_str);
1237 break;
1238
1239 case ORDER_UNDEFINED:
1240 break;
1241
1242 default:
1243 gcc_unreachable ();
1244 }
1245 }
1246 }
1247
1248 /* Mark visibility of all functions.
1249
1250 A local function is one whose calls can occur only in the current
1251 compilation unit and all its calls are explicit, so we can change
1252 its calling convention. We simply mark all static functions whose
1253 address is not taken as local.
1254
1255 We also change the TREE_PUBLIC flag of all declarations that are public
1256 in language point of view but we want to overwrite this default
1257 via visibilities for the backend point of view. */
1258
1259 static void
1260 cgraph_function_and_variable_visibility (void)
1261 {
1262 struct cgraph_node *node;
1263 struct cgraph_varpool_node *vnode;
1264
1265 for (node = cgraph_nodes; node; node = node->next)
1266 {
1267 if (node->reachable
1268 && (DECL_COMDAT (node->decl)
1269 || (!flag_whole_program
1270 && TREE_PUBLIC (node->decl) && !DECL_EXTERNAL (node->decl))))
1271 node->local.externally_visible = true;
1272 if (!node->local.externally_visible && node->analyzed
1273 && !DECL_EXTERNAL (node->decl))
1274 {
1275 gcc_assert (flag_whole_program || !TREE_PUBLIC (node->decl));
1276 TREE_PUBLIC (node->decl) = 0;
1277 }
1278 node->local.local = (!node->needed
1279 && node->analyzed
1280 && !DECL_EXTERNAL (node->decl)
1281 && !node->local.externally_visible);
1282 }
1283 for (vnode = cgraph_varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1284 {
1285 if (vnode->needed
1286 && !flag_whole_program
1287 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl)))
1288 vnode->externally_visible = 1;
1289 if (!vnode->externally_visible)
1290 {
1291 gcc_assert (flag_whole_program || !TREE_PUBLIC (vnode->decl));
1292 TREE_PUBLIC (vnode->decl) = 0;
1293 }
1294 gcc_assert (TREE_STATIC (vnode->decl));
1295 }
1296
1297 /* Because we have to be conservative on the boundaries of source
1298 level units, it is possible that we marked some functions in
1299 reachable just because they might be used later via external
1300 linkage, but after making them local they are really unreachable
1301 now. */
1302 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
1303
1304 if (cgraph_dump_file)
1305 {
1306 fprintf (cgraph_dump_file, "\nMarking local functions:");
1307 for (node = cgraph_nodes; node; node = node->next)
1308 if (node->local.local)
1309 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1310 fprintf (cgraph_dump_file, "\n\n");
1311 fprintf (cgraph_dump_file, "\nMarking externally visible functions:");
1312 for (node = cgraph_nodes; node; node = node->next)
1313 if (node->local.externally_visible)
1314 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1315 fprintf (cgraph_dump_file, "\n\n");
1316 }
1317 cgraph_function_flags_ready = true;
1318 }
1319
1320 /* Return true when function body of DECL still needs to be kept around
1321 for later re-use. */
1322 bool
1323 cgraph_preserve_function_body_p (tree decl)
1324 {
1325 struct cgraph_node *node;
1326 if (!cgraph_global_info_ready)
1327 return (DECL_INLINE (decl) && !flag_really_no_inline);
1328 /* Look if there is any clone around. */
1329 for (node = cgraph_node (decl); node; node = node->next_clone)
1330 if (node->global.inlined_to)
1331 return true;
1332 return false;
1333 }
1334
1335 static void
1336 ipa_passes (void)
1337 {
1338 cfun = NULL;
1339 tree_register_cfg_hooks ();
1340 bitmap_obstack_initialize (NULL);
1341 execute_ipa_pass_list (all_ipa_passes);
1342 bitmap_obstack_release (NULL);
1343 }
1344
1345 /* Perform simple optimizations based on callgraph. */
1346
1347 void
1348 cgraph_optimize (void)
1349 {
1350 #ifdef ENABLE_CHECKING
1351 verify_cgraph ();
1352 #endif
1353 if (!flag_unit_at_a_time)
1354 {
1355 cgraph_output_pending_asms ();
1356 cgraph_varpool_assemble_pending_decls ();
1357 return;
1358 }
1359
1360 process_pending_assemble_externals ();
1361
1362 /* Frontend may output common variables after the unit has been finalized.
1363 It is safe to deal with them here as they are always zero initialized. */
1364 cgraph_varpool_analyze_pending_decls ();
1365
1366 timevar_push (TV_CGRAPHOPT);
1367 if (!quiet_flag)
1368 fprintf (stderr, "Performing intraprocedural optimizations\n");
1369
1370 cgraph_function_and_variable_visibility ();
1371 if (cgraph_dump_file)
1372 {
1373 fprintf (cgraph_dump_file, "Marked ");
1374 dump_cgraph (cgraph_dump_file);
1375 }
1376 ipa_passes ();
1377 /* This pass remove bodies of extern inline functions we never inlined.
1378 Do this later so other IPA passes see what is really going on. */
1379 cgraph_remove_unreachable_nodes (false, dump_file);
1380 cgraph_global_info_ready = true;
1381 if (cgraph_dump_file)
1382 {
1383 fprintf (cgraph_dump_file, "Optimized ");
1384 dump_cgraph (cgraph_dump_file);
1385 dump_varpool (cgraph_dump_file);
1386 }
1387 timevar_pop (TV_CGRAPHOPT);
1388
1389 /* Output everything. */
1390 if (!quiet_flag)
1391 fprintf (stderr, "Assembling functions:\n");
1392 #ifdef ENABLE_CHECKING
1393 verify_cgraph ();
1394 #endif
1395
1396 cgraph_mark_functions_to_output ();
1397
1398 if (!flag_toplevel_reorder)
1399 cgraph_output_in_order ();
1400 else
1401 {
1402 cgraph_output_pending_asms ();
1403
1404 cgraph_expand_all_functions ();
1405 cgraph_varpool_remove_unreferenced_decls ();
1406
1407 cgraph_varpool_assemble_pending_decls ();
1408 }
1409
1410 if (cgraph_dump_file)
1411 {
1412 fprintf (cgraph_dump_file, "\nFinal ");
1413 dump_cgraph (cgraph_dump_file);
1414 }
1415 #ifdef ENABLE_CHECKING
1416 verify_cgraph ();
1417 /* Double check that all inline clones are gone and that all
1418 function bodies have been released from memory. */
1419 if (flag_unit_at_a_time
1420 && !dump_enabled_p (TDI_tree_all)
1421 && !(sorrycount || errorcount))
1422 {
1423 struct cgraph_node *node;
1424 bool error_found = false;
1425
1426 for (node = cgraph_nodes; node; node = node->next)
1427 if (node->analyzed
1428 && (node->global.inlined_to
1429 || DECL_SAVED_TREE (node->decl)))
1430 {
1431 error_found = true;
1432 dump_cgraph_node (stderr, node);
1433 }
1434 if (error_found)
1435 internal_error ("nodes with no released memory found");
1436 }
1437 #endif
1438 }
1439
1440 /* Generate and emit a static constructor or destructor. WHICH must be
1441 one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
1442 GENERIC statements. */
1443
1444 void
1445 cgraph_build_static_cdtor (char which, tree body, int priority)
1446 {
1447 static int counter = 0;
1448 char which_buf[16];
1449 tree decl, name, resdecl;
1450
1451 sprintf (which_buf, "%c_%d", which, counter++);
1452 name = get_file_function_name_long (which_buf);
1453
1454 decl = build_decl (FUNCTION_DECL, name,
1455 build_function_type (void_type_node, void_list_node));
1456 current_function_decl = decl;
1457
1458 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1459 DECL_ARTIFICIAL (resdecl) = 1;
1460 DECL_IGNORED_P (resdecl) = 1;
1461 DECL_RESULT (decl) = resdecl;
1462
1463 allocate_struct_function (decl);
1464
1465 TREE_STATIC (decl) = 1;
1466 TREE_USED (decl) = 1;
1467 DECL_ARTIFICIAL (decl) = 1;
1468 DECL_IGNORED_P (decl) = 1;
1469 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1470 DECL_SAVED_TREE (decl) = body;
1471 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1472 DECL_UNINLINABLE (decl) = 1;
1473
1474 DECL_INITIAL (decl) = make_node (BLOCK);
1475 TREE_USED (DECL_INITIAL (decl)) = 1;
1476
1477 DECL_SOURCE_LOCATION (decl) = input_location;
1478 cfun->function_end_locus = input_location;
1479
1480 switch (which)
1481 {
1482 case 'I':
1483 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1484 break;
1485 case 'D':
1486 DECL_STATIC_DESTRUCTOR (decl) = 1;
1487 break;
1488 default:
1489 gcc_unreachable ();
1490 }
1491
1492 gimplify_function_tree (decl);
1493
1494 /* ??? We will get called LATE in the compilation process. */
1495 if (cgraph_global_info_ready)
1496 {
1497 tree_lowering_passes (decl);
1498 tree_rest_of_compilation (decl);
1499 }
1500 else
1501 cgraph_finalize_function (decl, 0);
1502
1503 if (targetm.have_ctors_dtors)
1504 {
1505 void (*fn) (rtx, int);
1506
1507 if (which == 'I')
1508 fn = targetm.asm_out.constructor;
1509 else
1510 fn = targetm.asm_out.destructor;
1511 fn (XEXP (DECL_RTL (decl), 0), priority);
1512 }
1513 }
1514
1515 void
1516 init_cgraph (void)
1517 {
1518 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1519 }
1520
1521 /* The edges representing the callers of the NEW_VERSION node were
1522 fixed by cgraph_function_versioning (), now the call_expr in their
1523 respective tree code should be updated to call the NEW_VERSION. */
1524
1525 static void
1526 update_call_expr (struct cgraph_node *new_version)
1527 {
1528 struct cgraph_edge *e;
1529
1530 gcc_assert (new_version);
1531 for (e = new_version->callers; e; e = e->next_caller)
1532 /* Update the call expr on the edges
1533 to call the new version. */
1534 TREE_OPERAND (TREE_OPERAND (get_call_expr_in (e->call_stmt), 0), 0) = new_version->decl;
1535 }
1536
1537
1538 /* Create a new cgraph node which is the new version of
1539 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1540 edges which should be redirected to point to
1541 NEW_VERSION. ALL the callees edges of OLD_VERSION
1542 are cloned to the new version node. Return the new
1543 version node. */
1544
1545 static struct cgraph_node *
1546 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1547 tree new_decl, varray_type redirect_callers)
1548 {
1549 struct cgraph_node *new_version;
1550 struct cgraph_edge *e, *new_e;
1551 struct cgraph_edge *next_callee;
1552 unsigned i;
1553
1554 gcc_assert (old_version);
1555
1556 new_version = cgraph_node (new_decl);
1557
1558 new_version->analyzed = true;
1559 new_version->local = old_version->local;
1560 new_version->global = old_version->global;
1561 new_version->rtl = new_version->rtl;
1562 new_version->reachable = true;
1563 new_version->count = old_version->count;
1564
1565 /* Clone the old node callees. Recursive calls are
1566 also cloned. */
1567 for (e = old_version->callees;e; e=e->next_callee)
1568 {
1569 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->loop_nest, true);
1570 new_e->count = e->count;
1571 }
1572 /* Fix recursive calls.
1573 If OLD_VERSION has a recursive call after the
1574 previous edge cloning, the new version will have an edge
1575 pointing to the old version, which is wrong;
1576 Redirect it to point to the new version. */
1577 for (e = new_version->callees ; e; e = next_callee)
1578 {
1579 next_callee = e->next_callee;
1580 if (e->callee == old_version)
1581 cgraph_redirect_edge_callee (e, new_version);
1582
1583 if (!next_callee)
1584 break;
1585 }
1586 if (redirect_callers)
1587 for (i = 0; i < VARRAY_ACTIVE_SIZE (redirect_callers); i++)
1588 {
1589 e = VARRAY_GENERIC_PTR (redirect_callers, i);
1590 /* Redirect calls to the old version node
1591 to point to it's new version. */
1592 cgraph_redirect_edge_callee (e, new_version);
1593 }
1594
1595 return new_version;
1596 }
1597
1598 /* Perform function versioning.
1599 Function versioning includes copying of the tree and
1600 a callgraph update (creating a new cgraph node and updating
1601 its callees and callers).
1602
1603 REDIRECT_CALLERS varray includes the edges to be redirected
1604 to the new version.
1605
1606 TREE_MAP is a mapping of tree nodes we want to replace with
1607 new ones (according to results of prior analysis).
1608 OLD_VERSION_NODE is the node that is versioned.
1609 It returns the new version's cgraph node. */
1610
1611 struct cgraph_node *
1612 cgraph_function_versioning (struct cgraph_node *old_version_node,
1613 varray_type redirect_callers,
1614 varray_type tree_map)
1615 {
1616 tree old_decl = old_version_node->decl;
1617 struct cgraph_node *new_version_node = NULL;
1618 tree new_decl;
1619
1620 if (!tree_versionable_function_p (old_decl))
1621 return NULL;
1622
1623 /* Make a new FUNCTION_DECL tree node for the
1624 new version. */
1625 new_decl = copy_node (old_decl);
1626
1627 /* Create the new version's call-graph node.
1628 and update the edges of the new node. */
1629 new_version_node =
1630 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1631 redirect_callers);
1632
1633 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1634 tree_function_versioning (old_decl, new_decl, tree_map, false);
1635 /* Update the call_expr on the edges to call the new version node. */
1636 update_call_expr (new_version_node);
1637
1638 /* Update the new version's properties.
1639 Make The new version visible only within this translation unit.
1640 ??? We cannot use COMDAT linkage because there is no
1641 ABI support for this. */
1642 DECL_EXTERNAL (new_version_node->decl) = 0;
1643 DECL_ONE_ONLY (new_version_node->decl) = 0;
1644 TREE_PUBLIC (new_version_node->decl) = 0;
1645 DECL_COMDAT (new_version_node->decl) = 0;
1646 new_version_node->local.externally_visible = 0;
1647 new_version_node->local.local = 1;
1648 new_version_node->lowered = true;
1649 return new_version_node;
1650 }
1651
1652 /* Produce separate function body for inline clones so the offline copy can be
1653 modified without affecting them. */
1654 struct cgraph_node *
1655 save_inline_function_body (struct cgraph_node *node)
1656 {
1657 struct cgraph_node *first_clone;
1658
1659 gcc_assert (node == cgraph_node (node->decl));
1660
1661 cgraph_lower_function (node);
1662
1663 /* In non-unit-at-a-time we construct full fledged clone we never output to
1664 assembly file. This clone is pointed out by inline_decl of orginal function
1665 and inlining infrastructure knows how to deal with this. */
1666 if (!flag_unit_at_a_time)
1667 {
1668 struct cgraph_edge *e;
1669
1670 first_clone = cgraph_clone_node (node, node->count, 0, false);
1671 first_clone->needed = 0;
1672 first_clone->reachable = 1;
1673 /* Recursively clone all bodies. */
1674 for (e = first_clone->callees; e; e = e->next_callee)
1675 if (!e->inline_failed)
1676 cgraph_clone_inlined_nodes (e, true, false);
1677 }
1678 else
1679 first_clone = node->next_clone;
1680
1681 first_clone->decl = copy_node (node->decl);
1682 node->next_clone = NULL;
1683 if (!flag_unit_at_a_time)
1684 node->inline_decl = first_clone->decl;
1685 first_clone->prev_clone = NULL;
1686 cgraph_insert_node_to_hashtable (first_clone);
1687 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1688
1689 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1690 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1691
1692 DECL_EXTERNAL (first_clone->decl) = 0;
1693 DECL_ONE_ONLY (first_clone->decl) = 0;
1694 TREE_PUBLIC (first_clone->decl) = 0;
1695 DECL_COMDAT (first_clone->decl) = 0;
1696
1697 for (node = first_clone->next_clone; node; node = node->next_clone)
1698 node->decl = first_clone->decl;
1699 #ifdef ENABLE_CHECKING
1700 verify_cgraph_node (first_clone);
1701 #endif
1702 return first_clone;
1703 }
1704