]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
Daily bump.
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
d7c6d889 1/* Callgraph based intraprocedural optimizations.
dfbf3d71 2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
ae01b312 3 Contributed by Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
67ce556b 19Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2002110-1301, USA. */
ae01b312 21
b0cdf642 22/* This module implements main driver of compilation process as well as
23 few basic intraprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline function.)
36
37 - cgraph_varpool_finalize_variable
38
7bd28bba 39 This function has same behavior as the above but is used for static
b0cdf642 40 variables.
41
42 - cgraph_finalize_compilation_unit
43
44 This function is called once compilation unit is finalized and it will
45 no longer change.
46
47 In the unit-at-a-time the call-graph construction and local function
48 analysis takes place here. Bodies of unreachable functions are released
49 to conserve memory usage.
50
51 ??? The compilation unit in this point of view should be compilation
52 unit as defined by the language - for instance C frontend allows multiple
53 compilation units to be parsed at once and it should call function each
54 time parsing is done so we save memory.
55
56 - cgraph_optimize
57
58 In this unit-at-a-time compilation the intra procedural analysis takes
59 place here. In particular the static functions whose address is never
60 taken are marked as local. Backend can then use this information to
61 modify calling conventions, do better inlining or similar optimizations.
62
63 - cgraph_assemble_pending_functions
64 - cgraph_varpool_assemble_pending_variables
65
66 In non-unit-at-a-time mode these functions can be used to force compilation
67 of functions or variables that are known to be needed at given stage
68 of compilation
69
70 - cgraph_mark_needed_node
71 - cgraph_varpool_mark_needed_node
72
73 When function or variable is referenced by some hidden way (for instance
74 via assembly code and marked by attribute "used"), the call-graph data structure
75 must be updated accordingly by this function.
76
77 - analyze_expr callback
78
79 This function is responsible for lowering tree nodes not understood by
80 generic code into understandable ones or alternatively marking
81 callgraph and varpool nodes referenced by the as needed.
82
83 ??? On the tree-ssa genericizing should take place here and we will avoid
84 need for these hooks (replacing them by genericizing hook)
85
86 - expand_function callback
87
88 This function is used to expand function and pass it into RTL back-end.
89 Front-end should not make any assumptions about when this function can be
90 called. In particular cgraph_assemble_pending_functions,
91 cgraph_varpool_assemble_pending_variables, cgraph_finalize_function,
92 cgraph_varpool_finalize_function, cgraph_optimize can cause arbitrarily
93 previously finalized functions to be expanded.
94
95 We implement two compilation modes.
96
97 - unit-at-a-time: In this mode analyzing of all functions is deferred
98 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
99
100 In cgraph_finalize_compilation_unit the reachable functions are
101 analyzed. During analysis the call-graph edges from reachable
102 functions are constructed and their destinations are marked as
103 reachable. References to functions and variables are discovered too
104 and variables found to be needed output to the assembly file. Via
105 mark_referenced call in assemble_variable functions referenced by
106 static variables are noticed too.
107
ca67a72b 108 The intra-procedural information is produced and its existence
b0cdf642 109 indicated by global_info_ready. Once this flag is set it is impossible
110 to change function from !reachable to reachable and thus
111 assemble_variable no longer call mark_referenced.
112
113 Finally the call-graph is topologically sorted and all reachable functions
114 that has not been completely inlined or are not external are output.
115
116 ??? It is possible that reference to function or variable is optimized
117 out. We can not deal with this nicely because topological order is not
118 suitable for it. For tree-ssa we may consider another pass doing
119 optimization and re-discovering reachable functions.
120
121 ??? Reorganize code so variables are output very last and only if they
122 really has been referenced by produced code, so we catch more cases
123 where reference has been optimized out.
124
125 - non-unit-at-a-time
126
127 All functions are variables are output as early as possible to conserve
128 memory consumption. This may or may not result in less memory used but
129 it is still needed for some legacy code that rely on particular ordering
130 of things output from the compiler.
131
132 Varpool data structures are not used and variables are output directly.
133
134 Functions are output early using call of
135 cgraph_assemble_pending_function from cgraph_finalize_function. The
136 decision on whether function is needed is made more conservative so
137 uninlininable static functions are needed too. During the call-graph
138 construction the edge destinations are not marked as reachable and it
edc6a4c0 139 is completely relied upn assemble_variable to mark them. */
121f3051 140
acc70efa 141
ae01b312 142#include "config.h"
143#include "system.h"
144#include "coretypes.h"
145#include "tm.h"
146#include "tree.h"
b5530559 147#include "rtl.h"
acc70efa 148#include "tree-flow.h"
ae01b312 149#include "tree-inline.h"
150#include "langhooks.h"
c6224531 151#include "pointer-set.h"
ae01b312 152#include "toplev.h"
153#include "flags.h"
154#include "ggc.h"
155#include "debug.h"
156#include "target.h"
157#include "cgraph.h"
80a85d8a 158#include "diagnostic.h"
f79b6507 159#include "timevar.h"
d7c6d889 160#include "params.h"
161#include "fibheap.h"
162#include "c-common.h"
611e5405 163#include "intl.h"
b69eb0ff 164#include "function.h"
b5d36404 165#include "ipa-prop.h"
acc70efa 166#include "tree-gimple.h"
f1e2a033 167#include "tree-pass.h"
c1dcd13c 168#include "output.h"
d7c6d889 169
a6868229 170static void cgraph_expand_all_functions (void);
d9d9733a 171static void cgraph_mark_functions_to_output (void);
172static void cgraph_expand_function (struct cgraph_node *);
9bfec7c2 173static tree record_reference (tree *, int *, void *);
d7c6d889 174
9bfec7c2 175/* Records tree nodes seen in record_reference. Simply using
25bb88de 176 walk_tree_without_duplicates doesn't guarantee each node is visited
177 once because it gets a new htab upon each recursive call from
9bfec7c2 178 record_reference itself. */
c6224531 179static struct pointer_set_t *visited_nodes;
25bb88de 180
121f3051 181static FILE *cgraph_dump_file;
182
2c0b522d 183/* Determine if function DECL is needed. That is, visible to something
184 either outside this translation unit, something magic in the system
185 configury, or (if not doing unit-at-a-time) to something we havn't
186 seen yet. */
187
188static bool
189decide_is_function_needed (struct cgraph_node *node, tree decl)
190{
9d95b2b0 191 tree origin;
62eec3b4 192 if (MAIN_NAME_P (DECL_NAME (decl))
193 && TREE_PUBLIC (decl))
194 {
195 node->local.externally_visible = true;
196 return true;
197 }
4ee9c684 198
3f82b628 199 /* If the user told us it is used, then it must be so. */
62eec3b4 200 if (node->local.externally_visible
201 || lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
3f82b628 202 return true;
203
204 /* ??? If the assembler name is set by hand, it is possible to assemble
205 the name later after finalizing the function and the fact is noticed
206 in assemble_name then. This is arguably a bug. */
207 if (DECL_ASSEMBLER_NAME_SET_P (decl)
208 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
209 return true;
210
2c0b522d 211 /* If we decided it was needed before, but at the time we didn't have
212 the body of the function available, then it's still needed. We have
213 to go back and re-check its dependencies now. */
214 if (node->needed)
215 return true;
216
217 /* Externally visible functions must be output. The exception is
218 COMDAT functions that must be output only when they are needed. */
62eec3b4 219 if ((TREE_PUBLIC (decl) && !flag_whole_program)
220 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
2c0b522d 221 return true;
222
223 /* Constructors and destructors are reachable from the runtime by
224 some mechanism. */
225 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
226 return true;
227
2c0b522d 228 if (flag_unit_at_a_time)
229 return false;
230
231 /* If not doing unit at a time, then we'll only defer this function
232 if its marked for inlining. Otherwise we want to emit it now. */
233
234 /* "extern inline" functions are never output locally. */
235 if (DECL_EXTERNAL (decl))
236 return false;
4ee9c684 237 /* Nested functions of extern inline function shall not be emit unless
238 we inlined the origin. */
9d95b2b0 239 for (origin = decl_function_context (decl); origin;
240 origin = decl_function_context (origin))
241 if (DECL_EXTERNAL (origin))
4ee9c684 242 return false;
f024691d 243 /* We want to emit COMDAT functions only when absolutely necessary. */
c08871a9 244 if (DECL_COMDAT (decl))
2c0b522d 245 return false;
246 if (!DECL_INLINE (decl)
247 || (!node->local.disregard_inline_limits
248 /* When declared inline, defer even the uninlinable functions.
e4200070 249 This allows them to be eliminated when unused. */
2c0b522d 250 && !DECL_DECLARED_INLINE_P (decl)
b30512dd 251 && (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
2c0b522d 252 return true;
253
254 return false;
255}
256
06b27565 257/* Walk the decls we marked as necessary and see if they reference new
258 variables or functions and add them into the worklists. */
c1dcd13c 259static bool
260cgraph_varpool_analyze_pending_decls (void)
261{
262 bool changed = false;
263 timevar_push (TV_CGRAPH);
264
265 while (cgraph_varpool_first_unanalyzed_node)
266 {
267 tree decl = cgraph_varpool_first_unanalyzed_node->decl;
268
269 cgraph_varpool_first_unanalyzed_node->analyzed = true;
270
271 cgraph_varpool_first_unanalyzed_node = cgraph_varpool_first_unanalyzed_node->next_needed;
272
273 if (DECL_INITIAL (decl))
9bfec7c2 274 {
275 visited_nodes = pointer_set_create ();
276 walk_tree (&DECL_INITIAL (decl), record_reference, NULL, visited_nodes);
277 pointer_set_destroy (visited_nodes);
278 visited_nodes = NULL;
279 }
c1dcd13c 280 changed = true;
281 }
282 timevar_pop (TV_CGRAPH);
283 return changed;
284}
285
286/* Optimization of function bodies might've rendered some variables as
06b27565 287 unnecessary so we want to avoid these from being compiled.
c1dcd13c 288
442e3cb9 289 This is done by pruning the queue and keeping only the variables that
06b27565 290 really appear needed (ie they are either externally visible or referenced
c1dcd13c 291 by compiled function). Re-doing the reachability analysis on variables
292 brings back the remaining variables referenced by these. */
293static void
294cgraph_varpool_remove_unreferenced_decls (void)
295{
296 struct cgraph_varpool_node *next, *node = cgraph_varpool_nodes_queue;
297
298 cgraph_varpool_reset_queue ();
299
300 if (errorcount || sorrycount)
301 return;
302
303 while (node)
304 {
305 tree decl = node->decl;
306 next = node->next_needed;
307 node->needed = 0;
308
309 if (node->finalized
310 && ((DECL_ASSEMBLER_NAME_SET_P (decl)
311 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
312 || node->force_output
c131e678 313 || decide_is_variable_needed (node, decl)
314 /* ??? Cgraph does not yet rule the world with an iron hand,
315 and does not control the emission of debug information.
316 After a variable has its DECL_RTL set, we must assume that
317 it may be referenced by the debug information, and we can
318 no longer elide it. */
319 || DECL_RTL_SET_P (decl)))
c1dcd13c 320 cgraph_varpool_mark_needed_node (node);
321
322 node = next;
323 }
279cd732 324 /* Make sure we mark alias targets as used targets. */
325 finish_aliases_1 ();
c1dcd13c 326 cgraph_varpool_analyze_pending_decls ();
327}
acc70efa 328
acc70efa 329
c08871a9 330/* When not doing unit-at-a-time, output all functions enqueued.
331 Return true when such a functions were found. */
050e11c9 332
333bool
c08871a9 334cgraph_assemble_pending_functions (void)
335{
336 bool output = false;
337
338 if (flag_unit_at_a_time)
339 return false;
340
341 while (cgraph_nodes_queue)
342 {
343 struct cgraph_node *n = cgraph_nodes_queue;
344
345 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 346 n->next_needed = NULL;
1d09f0e6 347 if (!n->global.inlined_to
348 && !n->alias
349 && !DECL_EXTERNAL (n->decl))
050e11c9 350 {
351 cgraph_expand_function (n);
352 output = true;
353 }
c08871a9 354 }
050e11c9 355
773c5ba7 356 /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
357 the expansion process. Note that this queue may grow as its
358 being processed, as the new functions may generate new ones. */
359 while (cgraph_expand_queue)
360 {
361 struct cgraph_node *n = cgraph_expand_queue;
362 cgraph_expand_queue = cgraph_expand_queue->next_needed;
363 n->next_needed = NULL;
364 cgraph_finalize_function (n->decl, false);
365 output = true;
366 }
367
c08871a9 368 return output;
369}
773c5ba7 370
371
9b8fb23a 372/* As an GCC extension we allow redefinition of the function. The
373 semantics when both copies of bodies differ is not well defined.
374 We replace the old body with new body so in unit at a time mode
375 we always use new body, while in normal mode we may end up with
376 old body inlined into some functions and new body expanded and
377 inlined in others.
378
379 ??? It may make more sense to use one body for inlining and other
380 body for expanding the function but this is difficult to do. */
381
382static void
383cgraph_reset_node (struct cgraph_node *node)
384{
385 /* If node->output is set, then this is a unit-at-a-time compilation
386 and we have already begun whole-unit analysis. This is *not*
387 testing for whether we've already emitted the function. That
388 case can be sort-of legitimately seen with real function
389 redefinition errors. I would argue that the front end should
390 never present us with such a case, but don't enforce that for now. */
391 gcc_assert (!node->output);
392
393 /* Reset our data structures so we can analyze the function again. */
394 memset (&node->local, 0, sizeof (node->local));
395 memset (&node->global, 0, sizeof (node->global));
396 memset (&node->rtl, 0, sizeof (node->rtl));
397 node->analyzed = false;
398 node->local.redefined_extern_inline = true;
399 node->local.finalized = false;
400
401 if (!flag_unit_at_a_time)
402 {
403 struct cgraph_node *n;
404
405 for (n = cgraph_nodes; n; n = n->next)
406 if (n->global.inlined_to == node)
407 cgraph_remove_node (n);
408 }
409
410 cgraph_node_remove_callees (node);
411
412 /* We may need to re-queue the node for assembling in case
413 we already proceeded it and ignored as not needed. */
414 if (node->reachable && !flag_unit_at_a_time)
415 {
416 struct cgraph_node *n;
417
418 for (n = cgraph_nodes_queue; n; n = n->next_needed)
419 if (n == node)
420 break;
421 if (!n)
422 node->reachable = 0;
423 }
424}
c08871a9 425
1e8e9920 426static void
427cgraph_lower_function (struct cgraph_node *node)
428{
429 if (node->lowered)
430 return;
431 tree_lowering_passes (node->decl);
432 node->lowered = true;
433}
434
28df663b 435/* DECL has been parsed. Take it, queue it, compile it at the whim of the
436 logic in effect. If NESTED is true, then our caller cannot stand to have
437 the garbage collector run at the moment. We would need to either create
438 a new GC context, or just not compile right now. */
ae01b312 439
440void
28df663b 441cgraph_finalize_function (tree decl, bool nested)
ae01b312 442{
443 struct cgraph_node *node = cgraph_node (decl);
444
c08871a9 445 if (node->local.finalized)
9b8fb23a 446 cgraph_reset_node (node);
28df663b 447
c08871a9 448 notice_global_symbol (decl);
ae01b312 449 node->decl = decl;
79bb87b4 450 node->local.finalized = true;
e27482aa 451 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
9d95b2b0 452 if (node->nested)
453 lower_nested_functions (decl);
454 gcc_assert (!node->nested);
ae01b312 455
2c0b522d 456 /* If not unit at a time, then we need to create the call graph
457 now, so that called functions can be queued and emitted now. */
2ff66ee0 458 if (!flag_unit_at_a_time)
19489abd 459 {
460 cgraph_analyze_function (node);
9e0baf4d 461 cgraph_decide_inlining_incrementally (node, false);
19489abd 462 }
2ff66ee0 463
2c0b522d 464 if (decide_is_function_needed (node, decl))
465 cgraph_mark_needed_node (node);
466
ecda6e51 467 /* Since we reclaim unreachable nodes at the end of every language
3f82b628 468 level unit, we need to be conservative about possible entry points
469 there. */
62eec3b4 470 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
3f82b628 471 cgraph_mark_reachable_node (node);
472
28df663b 473 /* If not unit at a time, go ahead and emit everything we've found
474 to be reachable at this time. */
475 if (!nested)
dc721f36 476 {
477 if (!cgraph_assemble_pending_functions ())
478 ggc_collect ();
479 }
3d7bfc56 480
2c0b522d 481 /* If we've not yet emitted decl, tell the debug info about it. */
28df663b 482 if (!TREE_ASM_WRITTEN (decl))
2c0b522d 483 (*debug_hooks->deferred_inline_function) (decl);
4e8871a0 484
b69eb0ff 485 /* Possibly warn about unused parameters. */
486 if (warn_unused_parameter)
487 do_warn_unused_parameter (decl);
ae01b312 488}
489
ae01b312 490/* Walk tree and record all calls. Called via walk_tree. */
491static tree
9bfec7c2 492record_reference (tree *tp, int *walk_subtrees, void *data)
ae01b312 493{
ec1e35b2 494 tree t = *tp;
495
496 switch (TREE_CODE (t))
ae01b312 497 {
ec1e35b2 498 case VAR_DECL:
499 /* ??? Really, we should mark this decl as *potentially* referenced
500 by this function and re-examine whether the decl is actually used
501 after rtl has been generated. */
c1dcd13c 502 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
caa6fdce 503 {
504 cgraph_varpool_mark_needed_node (cgraph_varpool_node (t));
505 if (lang_hooks.callgraph.analyze_expr)
506 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees,
507 data);
508 }
ec1e35b2 509 break;
510
4e4ac74b 511 case FDESC_EXPR:
ec1e35b2 512 case ADDR_EXPR:
513 if (flag_unit_at_a_time)
514 {
515 /* Record dereferences to the functions. This makes the
516 functions reachable unconditionally. */
517 tree decl = TREE_OPERAND (*tp, 0);
518 if (TREE_CODE (decl) == FUNCTION_DECL)
519 cgraph_mark_needed_node (cgraph_node (decl));
520 }
521 break;
522
ec1e35b2 523 default:
524 /* Save some cycles by not walking types and declaration as we
525 won't find anything useful there anyway. */
ce45a448 526 if (IS_TYPE_OR_DECL_P (*tp))
ae01b312 527 {
ae01b312 528 *walk_subtrees = 0;
ec1e35b2 529 break;
ae01b312 530 }
ec1e35b2 531
532 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
dc24ddbd 533 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees, data);
ec1e35b2 534 break;
ae01b312 535 }
ec1e35b2 536
ae01b312 537 return NULL;
538}
539
b0cdf642 540/* Create cgraph edges for function calls inside BODY from NODE. */
ae01b312 541
edc6a4c0 542static void
b0cdf642 543cgraph_create_edges (struct cgraph_node *node, tree body)
ae01b312 544{
9bfec7c2 545 basic_block bb;
546
547 struct function *this_cfun = DECL_STRUCT_FUNCTION (body);
548 block_stmt_iterator bsi;
549 tree step;
c6224531 550 visited_nodes = pointer_set_create ();
e27482aa 551
9bfec7c2 552 /* Reach the trees by walking over the CFG, and note the
553 enclosing basic-blocks in the call edges. */
554 FOR_EACH_BB_FN (bb, this_cfun)
555 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
556 {
557 tree stmt = bsi_stmt (bsi);
558 tree call = get_call_expr_in (stmt);
559 tree decl;
560
561 if (call && (decl = get_callee_fndecl (call)))
e27482aa 562 {
9bfec7c2 563 cgraph_create_edge (node, cgraph_node (decl), stmt,
564 bb->count,
565 bb->loop_depth);
566 walk_tree (&TREE_OPERAND (call, 1),
567 record_reference, node, visited_nodes);
568 if (TREE_CODE (stmt) == MODIFY_EXPR)
569 walk_tree (&TREE_OPERAND (stmt, 0),
570 record_reference, node, visited_nodes);
e27482aa 571 }
9bfec7c2 572 else
573 walk_tree (bsi_stmt_ptr (bsi), record_reference, node, visited_nodes);
574 }
575
127d7f21 576 /* Look for initializers of constant variables and private statics. */
577 for (step = DECL_STRUCT_FUNCTION (body)->unexpanded_var_list;
578 step;
579 step = TREE_CHAIN (step))
9bfec7c2 580 {
127d7f21 581 tree decl = TREE_VALUE (step);
582 if (TREE_CODE (decl) == VAR_DECL
583 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
584 && flag_unit_at_a_time)
585 cgraph_varpool_finalize_decl (decl);
586 else if (TREE_CODE (decl) == VAR_DECL && DECL_INITIAL (decl))
587 walk_tree (&DECL_INITIAL (decl), record_reference, node, visited_nodes);
e27482aa 588 }
e27482aa 589
c6224531 590 pointer_set_destroy (visited_nodes);
25bb88de 591 visited_nodes = NULL;
ae01b312 592}
593
9e0baf4d 594/* Give initial reasons why inlining would fail. Those gets
595 either NULLified or usually overwritten by more precise reason
596 later. */
597static void
598initialize_inline_failed (struct cgraph_node *node)
599{
600 struct cgraph_edge *e;
601
602 for (e = node->callers; e; e = e->next_caller)
603 {
604 gcc_assert (!e->callee->global.inlined_to);
605 gcc_assert (e->inline_failed);
606 if (node->local.redefined_extern_inline)
607 e->inline_failed = N_("redefined extern inline functions are not "
608 "considered for inlining");
609 else if (!node->local.inlinable)
610 e->inline_failed = N_("function not inlinable");
611 else
612 e->inline_failed = N_("function not considered for inlining");
613 }
614}
615
616/* Rebuild call edges from current function after a passes not aware
617 of cgraph updating. */
618static void
619rebuild_cgraph_edges (void)
620{
621 basic_block bb;
622 struct cgraph_node *node = cgraph_node (current_function_decl);
623 block_stmt_iterator bsi;
624
625 cgraph_node_remove_callees (node);
626
627 node->count = ENTRY_BLOCK_PTR->count;
628
629 FOR_EACH_BB (bb)
630 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
631 {
632 tree stmt = bsi_stmt (bsi);
633 tree call = get_call_expr_in (stmt);
634 tree decl;
635
636 if (call && (decl = get_callee_fndecl (call)))
637 cgraph_create_edge (node, cgraph_node (decl), stmt,
638 bb->count,
639 bb->loop_depth);
640 }
641 initialize_inline_failed (node);
642 gcc_assert (!node->global.inlined_to);
643}
644
645struct tree_opt_pass pass_rebuild_cgraph_edges =
646{
647 NULL, /* name */
648 NULL, /* gate */
649 rebuild_cgraph_edges, /* execute */
650 NULL, /* sub */
651 NULL, /* next */
652 0, /* static_pass_number */
653 0, /* tv_id */
654 PROP_cfg, /* properties_required */
655 0, /* properties_provided */
656 0, /* properties_destroyed */
657 0, /* todo_flags_start */
658 0, /* todo_flags_finish */
659 0 /* letter */
660};
b0cdf642 661
662/* Verify cgraph nodes of given cgraph node. */
663void
664verify_cgraph_node (struct cgraph_node *node)
665{
666 struct cgraph_edge *e;
667 struct cgraph_node *main_clone;
e27482aa 668 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
669 basic_block this_block;
670 block_stmt_iterator bsi;
9bfec7c2 671 bool error_found = false;
b0cdf642 672
673 timevar_push (TV_CGRAPH_VERIFY);
b0cdf642 674 for (e = node->callees; e; e = e->next_callee)
675 if (e->aux)
676 {
0a81f5a0 677 error ("aux field set for edge %s->%s",
b0cdf642 678 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
679 error_found = true;
680 }
a2cb9b3b 681 if (node->count < 0)
682 {
683 error ("Execution count is negative");
684 error_found = true;
685 }
b0cdf642 686 for (e = node->callers; e; e = e->next_caller)
687 {
a2cb9b3b 688 if (e->count < 0)
689 {
690 error ("caller edge count is negative");
691 error_found = true;
692 }
b0cdf642 693 if (!e->inline_failed)
694 {
695 if (node->global.inlined_to
696 != (e->caller->global.inlined_to
697 ? e->caller->global.inlined_to : e->caller))
698 {
0a81f5a0 699 error ("inlined_to pointer is wrong");
b0cdf642 700 error_found = true;
701 }
702 if (node->callers->next_caller)
703 {
0a81f5a0 704 error ("multiple inline callers");
b0cdf642 705 error_found = true;
706 }
707 }
708 else
709 if (node->global.inlined_to)
710 {
0a81f5a0 711 error ("inlined_to pointer set for noninline callers");
b0cdf642 712 error_found = true;
713 }
714 }
715 if (!node->callers && node->global.inlined_to)
716 {
0a81f5a0 717 error ("inlined_to pointer is set but no predecesors found");
b0cdf642 718 error_found = true;
719 }
720 if (node->global.inlined_to == node)
721 {
0a81f5a0 722 error ("inlined_to pointer refers to itself");
b0cdf642 723 error_found = true;
724 }
725
726 for (main_clone = cgraph_node (node->decl); main_clone;
727 main_clone = main_clone->next_clone)
728 if (main_clone == node)
729 break;
730 if (!node)
731 {
0a81f5a0 732 error ("node not found in DECL_ASSEMBLER_NAME hash");
b0cdf642 733 error_found = true;
734 }
735
736 if (node->analyzed
737 && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
738 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
739 {
e27482aa 740 if (this_cfun->cfg)
741 {
742 /* The nodes we're interested in are never shared, so walk
743 the tree ignoring duplicates. */
744 visited_nodes = pointer_set_create ();
745 /* Reach the trees by walking over the CFG, and note the
746 enclosing basic-blocks in the call edges. */
747 FOR_EACH_BB_FN (this_block, this_cfun)
748 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
9bfec7c2 749 {
750 tree stmt = bsi_stmt (bsi);
751 tree call = get_call_expr_in (stmt);
752 tree decl;
753 if (call && (decl = get_callee_fndecl (call)))
754 {
755 struct cgraph_edge *e = cgraph_edge (node, stmt);
756 if (e)
757 {
758 if (e->aux)
759 {
0a81f5a0 760 error ("shared call_stmt:");
9bfec7c2 761 debug_generic_stmt (stmt);
762 error_found = true;
763 }
469679ab 764 if (e->callee->decl != cgraph_node (decl)->decl
765 && e->inline_failed)
9bfec7c2 766 {
0a81f5a0 767 error ("edge points to wrong declaration:");
9bfec7c2 768 debug_tree (e->callee->decl);
769 fprintf (stderr," Instead of:");
770 debug_tree (decl);
771 }
772 e->aux = (void *)1;
773 }
774 else
775 {
0a81f5a0 776 error ("missing callgraph edge for call stmt:");
9bfec7c2 777 debug_generic_stmt (stmt);
778 error_found = true;
779 }
780 }
781 }
e27482aa 782 pointer_set_destroy (visited_nodes);
783 visited_nodes = NULL;
784 }
785 else
786 /* No CFG available?! */
787 gcc_unreachable ();
788
b0cdf642 789 for (e = node->callees; e; e = e->next_callee)
790 {
791 if (!e->aux)
792 {
0a81f5a0 793 error ("edge %s->%s has no corresponding call_stmt",
b0cdf642 794 cgraph_node_name (e->caller),
795 cgraph_node_name (e->callee));
9bfec7c2 796 debug_generic_stmt (e->call_stmt);
b0cdf642 797 error_found = true;
798 }
799 e->aux = 0;
800 }
801 }
802 if (error_found)
803 {
804 dump_cgraph_node (stderr, node);
0a81f5a0 805 internal_error ("verify_cgraph_node failed");
b0cdf642 806 }
807 timevar_pop (TV_CGRAPH_VERIFY);
808}
809
810/* Verify whole cgraph structure. */
811void
812verify_cgraph (void)
813{
814 struct cgraph_node *node;
815
8ec2a798 816 if (sorrycount || errorcount)
817 return;
818
b0cdf642 819 for (node = cgraph_nodes; node; node = node->next)
820 verify_cgraph_node (node);
821}
822
56af936e 823/* Output one variable, if necessary. Return whether we output it. */
824static bool
825cgraph_varpool_assemble_decl (struct cgraph_varpool_node *node)
826{
827 tree decl = node->decl;
828
829 if (!TREE_ASM_WRITTEN (decl)
830 && !node->alias
831 && !DECL_EXTERNAL (decl)
832 && (TREE_CODE (decl) != VAR_DECL || !DECL_HAS_VALUE_EXPR_P (decl)))
833 {
834 assemble_variable (decl, 0, 1, 0);
835 /* Local static variables are never seen by check_global_declarations
836 so we need to output debug info by hand. */
837 if (DECL_CONTEXT (decl)
838 && (TREE_CODE (DECL_CONTEXT (decl)) == BLOCK
839 || TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
840 && errorcount == 0 && sorrycount == 0)
841 {
842 timevar_push (TV_SYMOUT);
843 (*debug_hooks->global_decl) (decl);
844 timevar_pop (TV_SYMOUT);
845 }
846 return true;
847 }
848
849 return false;
850}
c1dcd13c 851
852/* Output all variables enqueued to be assembled. */
853bool
854cgraph_varpool_assemble_pending_decls (void)
855{
856 bool changed = false;
857
858 if (errorcount || sorrycount)
859 return false;
860
861 /* EH might mark decls as needed during expansion. This should be safe since
862 we don't create references to new function, but it should not be used
863 elsewhere. */
864 cgraph_varpool_analyze_pending_decls ();
865
866 while (cgraph_varpool_nodes_queue)
867 {
c1dcd13c 868 struct cgraph_varpool_node *node = cgraph_varpool_nodes_queue;
869
870 cgraph_varpool_nodes_queue = cgraph_varpool_nodes_queue->next_needed;
56af936e 871 if (cgraph_varpool_assemble_decl (node))
872 changed = true;
c1dcd13c 873 node->next_needed = NULL;
874 }
875 return changed;
876}
877
56af936e 878/* Output all asm statements we have stored up to be output. */
879
880static void
881cgraph_output_pending_asms (void)
882{
883 struct cgraph_asm_node *can;
884
885 if (errorcount || sorrycount)
886 return;
887
888 for (can = cgraph_asm_nodes; can; can = can->next)
889 assemble_asm (can->asm_str);
890 cgraph_asm_nodes = NULL;
891}
892
0785e435 893/* Analyze the function scheduled to be output. */
1e8e9920 894void
0785e435 895cgraph_analyze_function (struct cgraph_node *node)
896{
897 tree decl = node->decl;
898
ec1e35b2 899 current_function_decl = decl;
e27482aa 900 push_cfun (DECL_STRUCT_FUNCTION (decl));
901 cgraph_lower_function (node);
0785e435 902
903 /* First kill forward declaration so reverse inlining works properly. */
e27482aa 904 cgraph_create_edges (node, decl);
0785e435 905
906 node->local.inlinable = tree_inlinable_function_p (decl);
e27482aa 907 node->local.self_insns = estimate_num_insns (decl);
0785e435 908 if (node->local.inlinable)
909 node->local.disregard_inline_limits
dc24ddbd 910 = lang_hooks.tree_inlining.disregard_inline_limits (decl);
9e0baf4d 911 initialize_inline_failed (node);
99cf25d0 912 if (flag_really_no_inline && !node->local.disregard_inline_limits)
913 node->local.inlinable = 0;
0785e435 914 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
915 node->global.insns = node->local.self_insns;
0785e435 916
ec1e35b2 917 node->analyzed = true;
e27482aa 918 pop_cfun ();
c08871a9 919 current_function_decl = NULL;
0785e435 920}
921
ae01b312 922/* Analyze the whole compilation unit once it is parsed completely. */
923
924void
d9d9733a 925cgraph_finalize_compilation_unit (void)
ae01b312 926{
927 struct cgraph_node *node;
c1dcd13c 928 /* Keep track of already processed nodes when called multiple times for
06b27565 929 intermodule optimization. */
c1dcd13c 930 static struct cgraph_node *first_analyzed;
ae01b312 931
d7401838 932 finish_aliases_1 ();
933
2ff66ee0 934 if (!flag_unit_at_a_time)
c08871a9 935 {
56af936e 936 cgraph_output_pending_asms ();
c08871a9 937 cgraph_assemble_pending_functions ();
938 return;
939 }
2ff66ee0 940
d7c6d889 941 if (!quiet_flag)
c1dcd13c 942 {
943 fprintf (stderr, "\nAnalyzing compilation unit");
944 fflush (stderr);
945 }
229dcfae 946
f79b6507 947 timevar_push (TV_CGRAPH);
c1dcd13c 948 cgraph_varpool_analyze_pending_decls ();
f79b6507 949 if (cgraph_dump_file)
ae01b312 950 {
e4200070 951 fprintf (cgraph_dump_file, "Initial entry points:");
c1dcd13c 952 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
3d7bfc56 953 if (node->needed && DECL_SAVED_TREE (node->decl))
f79b6507 954 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
955 fprintf (cgraph_dump_file, "\n");
ae01b312 956 }
957
e6d2b2d8 958 /* Propagate reachability flag and lower representation of all reachable
959 functions. In the future, lowering will introduce new functions and
960 new entry points on the way (by template instantiation and virtual
961 method table generation for instance). */
3d7bfc56 962 while (cgraph_nodes_queue)
ae01b312 963 {
0785e435 964 struct cgraph_edge *edge;
3d7bfc56 965 tree decl = cgraph_nodes_queue->decl;
966
967 node = cgraph_nodes_queue;
d87976fb 968 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 969 node->next_needed = NULL;
ae01b312 970
638531ad 971 /* ??? It is possible to create extern inline function and later using
bbd5cba2 972 weak alias attribute to kill its body. See
638531ad 973 gcc.c-torture/compile/20011119-1.c */
974 if (!DECL_SAVED_TREE (decl))
9b8fb23a 975 {
976 cgraph_reset_node (node);
977 continue;
978 }
638531ad 979
cc636d56 980 gcc_assert (!node->analyzed && node->reachable);
981 gcc_assert (DECL_SAVED_TREE (decl));
ae01b312 982
0785e435 983 cgraph_analyze_function (node);
2c0b522d 984
ae01b312 985 for (edge = node->callees; edge; edge = edge->next_callee)
0785e435 986 if (!edge->callee->reachable)
2c0b522d 987 cgraph_mark_reachable_node (edge->callee);
988
c1dcd13c 989 cgraph_varpool_analyze_pending_decls ();
ae01b312 990 }
2c0b522d 991
3d7bfc56 992 /* Collect entry points to the unit. */
993
f79b6507 994 if (cgraph_dump_file)
3d7bfc56 995 {
e4200070 996 fprintf (cgraph_dump_file, "Unit entry points:");
c1dcd13c 997 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
3d7bfc56 998 if (node->needed && DECL_SAVED_TREE (node->decl))
f79b6507 999 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
e4200070 1000 fprintf (cgraph_dump_file, "\n\nInitial ");
0785e435 1001 dump_cgraph (cgraph_dump_file);
3d7bfc56 1002 }
e6d2b2d8 1003
f79b6507 1004 if (cgraph_dump_file)
1005 fprintf (cgraph_dump_file, "\nReclaiming functions:");
ae01b312 1006
c1dcd13c 1007 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
ae01b312 1008 {
1009 tree decl = node->decl;
1010
9b8fb23a 1011 if (node->local.finalized && !DECL_SAVED_TREE (decl))
1012 cgraph_reset_node (node);
1013
ae01b312 1014 if (!node->reachable && DECL_SAVED_TREE (decl))
1015 {
f79b6507 1016 if (cgraph_dump_file)
1017 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
b0cdf642 1018 cgraph_remove_node (node);
9b8fb23a 1019 continue;
ae01b312 1020 }
bc5cab3b 1021 else
1022 node->next_needed = NULL;
9b8fb23a 1023 gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
1024 gcc_assert (node->analyzed == node->local.finalized);
ae01b312 1025 }
f79b6507 1026 if (cgraph_dump_file)
e4200070 1027 {
1028 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1029 dump_cgraph (cgraph_dump_file);
1030 }
c1dcd13c 1031 first_analyzed = cgraph_nodes;
ae01b312 1032 ggc_collect ();
f79b6507 1033 timevar_pop (TV_CGRAPH);
ae01b312 1034}
ae01b312 1035/* Figure out what functions we want to assemble. */
1036
1037static void
d9d9733a 1038cgraph_mark_functions_to_output (void)
ae01b312 1039{
1040 struct cgraph_node *node;
1041
ae01b312 1042 for (node = cgraph_nodes; node; node = node->next)
1043 {
1044 tree decl = node->decl;
d7c6d889 1045 struct cgraph_edge *e;
cc636d56 1046
1047 gcc_assert (!node->output);
d7c6d889 1048
1049 for (e = node->callers; e; e = e->next_caller)
611e5405 1050 if (e->inline_failed)
d7c6d889 1051 break;
ae01b312 1052
e6d2b2d8 1053 /* We need to output all local functions that are used and not
1054 always inlined, as well as those that are reachable from
1055 outside the current compilation unit. */
ae01b312 1056 if (DECL_SAVED_TREE (decl)
b0cdf642 1057 && !node->global.inlined_to
ae01b312 1058 && (node->needed
d7c6d889 1059 || (e && node->reachable))
4ee9c684 1060 && !TREE_ASM_WRITTEN (decl)
ae01b312 1061 && !DECL_EXTERNAL (decl))
1062 node->output = 1;
cc636d56 1063 else
9cee7c3f 1064 {
1065 /* We should've reclaimed all functions that are not needed. */
1066#ifdef ENABLE_CHECKING
1067 if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
1068 && !DECL_EXTERNAL (decl))
1069 {
1070 dump_cgraph_node (stderr, node);
1071 internal_error ("failed to reclaim unneeded function");
1072 }
1073#endif
1074 gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
1075 || DECL_EXTERNAL (decl));
1076
1077 }
1078
961e3b13 1079 }
1080}
1081
ae01b312 1082/* Expand function specified by NODE. */
e6d2b2d8 1083
ae01b312 1084static void
d9d9733a 1085cgraph_expand_function (struct cgraph_node *node)
ae01b312 1086{
1087 tree decl = node->decl;
1088
b0cdf642 1089 /* We ought to not compile any inline clones. */
cc636d56 1090 gcc_assert (!node->global.inlined_to);
b0cdf642 1091
28df663b 1092 if (flag_unit_at_a_time)
1093 announce_function (decl);
961e3b13 1094
f8deefc1 1095 cgraph_lower_function (node);
1096
794da2bb 1097 /* Generate RTL for the body of DECL. */
dc24ddbd 1098 lang_hooks.callgraph.expand_function (decl);
961e3b13 1099
4ee9c684 1100 /* Make sure that BE didn't give up on compiling. */
1101 /* ??? Can happen with nested function of extern inline. */
cc636d56 1102 gcc_assert (TREE_ASM_WRITTEN (node->decl));
b0cdf642 1103
ae01b312 1104 current_function_decl = NULL;
8ec2a798 1105 if (!cgraph_preserve_function_body_p (node->decl))
4ee9c684 1106 {
1107 DECL_SAVED_TREE (node->decl) = NULL;
1108 DECL_STRUCT_FUNCTION (node->decl) = NULL;
4ee9c684 1109 DECL_INITIAL (node->decl) = error_mark_node;
7edd21a5 1110 /* Eliminate all call edges. This is important so the call_expr no longer
8ec2a798 1111 points to the dead function body. */
bb4c7a44 1112 cgraph_node_remove_callees (node);
4ee9c684 1113 }
e1be32b8 1114
1115 cgraph_function_flags_ready = true;
ae01b312 1116}
1117
b0cdf642 1118/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
d7c6d889 1119
1120bool
b0cdf642 1121cgraph_inline_p (struct cgraph_edge *e, const char **reason)
d7c6d889 1122{
b0cdf642 1123 *reason = e->inline_failed;
1124 return !e->inline_failed;
d7c6d889 1125}
b0cdf642 1126
acc70efa 1127
acc70efa 1128
d9d9733a 1129/* Expand all functions that must be output.
1130
d7c6d889 1131 Attempt to topologically sort the nodes so function is output when
1132 all called functions are already assembled to allow data to be
91c82c20 1133 propagated across the callgraph. Use a stack to get smaller distance
3927afe0 1134 between a function and its callees (later we may choose to use a more
d7c6d889 1135 sophisticated algorithm for function reordering; we will likely want
1136 to use subsections to make the output functions appear in top-down
1137 order). */
1138
1139static void
a6868229 1140cgraph_expand_all_functions (void)
d7c6d889 1141{
1142 struct cgraph_node *node;
1143 struct cgraph_node **order =
746149b7 1144 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
b0cdf642 1145 int order_pos = 0, new_order_pos = 0;
d7c6d889 1146 int i;
1147
d7c6d889 1148 order_pos = cgraph_postorder (order);
cc636d56 1149 gcc_assert (order_pos == cgraph_n_nodes);
d7c6d889 1150
7bd28bba 1151 /* Garbage collector may remove inline clones we eliminate during
b0cdf642 1152 optimization. So we must be sure to not reference them. */
1153 for (i = 0; i < order_pos; i++)
1154 if (order[i]->output)
1155 order[new_order_pos++] = order[i];
1156
1157 for (i = new_order_pos - 1; i >= 0; i--)
d7c6d889 1158 {
1159 node = order[i];
1160 if (node->output)
1161 {
cc636d56 1162 gcc_assert (node->reachable);
d7c6d889 1163 node->output = 0;
1164 cgraph_expand_function (node);
1165 }
1166 }
773c5ba7 1167
d7c6d889 1168 free (order);
773c5ba7 1169
1170 /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
1171 the expansion process. Note that this queue may grow as its
1172 being processed, as the new functions may generate new ones. */
1173 while (cgraph_expand_queue)
1174 {
1175 node = cgraph_expand_queue;
1176 cgraph_expand_queue = cgraph_expand_queue->next_needed;
1177 node->next_needed = NULL;
1178 node->output = 0;
1179 node->lowered = DECL_STRUCT_FUNCTION (node->decl)->cfg != NULL;
1180 cgraph_expand_function (node);
1181 }
d7c6d889 1182}
1183
56af936e 1184/* This is used to sort the node types by the cgraph order number. */
1185
1186struct cgraph_order_sort
1187{
1188 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1189 union
1190 {
1191 struct cgraph_node *f;
1192 struct cgraph_varpool_node *v;
1193 struct cgraph_asm_node *a;
1194 } u;
1195};
1196
1197/* Output all functions, variables, and asm statements in the order
1198 according to their order fields, which is the order in which they
1199 appeared in the file. This implements -fno-toplevel-reorder. In
1200 this mode we may output functions and variables which don't really
1201 need to be output. */
1202
1203static void
1204cgraph_output_in_order (void)
1205{
1206 int max;
1207 size_t size;
1208 struct cgraph_order_sort *nodes;
1209 int i;
1210 struct cgraph_node *pf;
1211 struct cgraph_varpool_node *pv;
1212 struct cgraph_asm_node *pa;
1213
1214 max = cgraph_order;
1215 size = max * sizeof (struct cgraph_order_sort);
1216 nodes = (struct cgraph_order_sort *) alloca (size);
1217 memset (nodes, 0, size);
1218
1219 cgraph_varpool_analyze_pending_decls ();
1220
1221 for (pf = cgraph_nodes; pf; pf = pf->next)
1222 {
1223 if (pf->output)
1224 {
1225 i = pf->order;
1226 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1227 nodes[i].kind = ORDER_FUNCTION;
1228 nodes[i].u.f = pf;
1229 }
1230 }
1231
1232 for (pv = cgraph_varpool_nodes_queue; pv; pv = pv->next_needed)
1233 {
1234 i = pv->order;
1235 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1236 nodes[i].kind = ORDER_VAR;
1237 nodes[i].u.v = pv;
1238 }
1239
1240 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1241 {
1242 i = pa->order;
1243 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1244 nodes[i].kind = ORDER_ASM;
1245 nodes[i].u.a = pa;
1246 }
1247 cgraph_asm_nodes = NULL;
1248
1249 for (i = 0; i < max; ++i)
1250 {
1251 switch (nodes[i].kind)
1252 {
1253 case ORDER_FUNCTION:
1254 nodes[i].u.f->output = 0;
1255 cgraph_expand_function (nodes[i].u.f);
1256 break;
1257
1258 case ORDER_VAR:
1259 cgraph_varpool_assemble_decl (nodes[i].u.v);
1260 break;
1261
1262 case ORDER_ASM:
1263 assemble_asm (nodes[i].u.a->asm_str);
1264 break;
1265
1266 case ORDER_UNDEFINED:
1267 break;
1268
1269 default:
1270 gcc_unreachable ();
1271 }
1272 }
1273}
1274
3f82b628 1275/* Mark visibility of all functions.
acc70efa 1276
1277 A local function is one whose calls can occur only in the current
1278 compilation unit and all its calls are explicit, so we can change
1279 its calling convention. We simply mark all static functions whose
3f82b628 1280 address is not taken as local.
1281
1282 We also change the TREE_PUBLIC flag of all declarations that are public
1283 in language point of view but we want to overwrite this default
1284 via visibilities for the backend point of view. */
d7c6d889 1285
1286static void
3f82b628 1287cgraph_function_and_variable_visibility (void)
d7c6d889 1288{
1289 struct cgraph_node *node;
3f82b628 1290 struct cgraph_varpool_node *vnode;
d7c6d889 1291
d7c6d889 1292 for (node = cgraph_nodes; node; node = node->next)
1293 {
3f82b628 1294 if (node->reachable
1295 && (DECL_COMDAT (node->decl)
c8b8ac00 1296 || (!flag_whole_program
1297 && TREE_PUBLIC (node->decl) && !DECL_EXTERNAL (node->decl))))
62eec3b4 1298 node->local.externally_visible = true;
1299 if (!node->local.externally_visible && node->analyzed
1300 && !DECL_EXTERNAL (node->decl))
1301 {
1302 gcc_assert (flag_whole_program || !TREE_PUBLIC (node->decl));
1303 TREE_PUBLIC (node->decl) = 0;
1304 }
d7c6d889 1305 node->local.local = (!node->needed
3f82b628 1306 && node->analyzed
e6ad3175 1307 && !DECL_EXTERNAL (node->decl)
8b4f617c 1308 && !node->local.externally_visible);
3f82b628 1309 }
1310 for (vnode = cgraph_varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1311 {
1312 if (vnode->needed
c8b8ac00 1313 && !flag_whole_program
3f82b628 1314 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl)))
1315 vnode->externally_visible = 1;
62eec3b4 1316 if (!vnode->externally_visible)
1317 {
1318 gcc_assert (flag_whole_program || !TREE_PUBLIC (vnode->decl));
1319 TREE_PUBLIC (vnode->decl) = 0;
1320 }
3f82b628 1321 gcc_assert (TREE_STATIC (vnode->decl));
d7c6d889 1322 }
acc70efa 1323
3f82b628 1324 /* Because we have to be conservative on the boundaries of source
1325 level units, it is possible that we marked some functions in
1326 reachable just because they might be used later via external
1327 linkage, but after making them local they are really unreachable
1328 now. */
1329 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
1330
d7c6d889 1331 if (cgraph_dump_file)
acc70efa 1332 {
1333 fprintf (cgraph_dump_file, "\nMarking local functions:");
1334 for (node = cgraph_nodes; node; node = node->next)
1335 if (node->local.local)
1336 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1337 fprintf (cgraph_dump_file, "\n\n");
3f82b628 1338 fprintf (cgraph_dump_file, "\nMarking externally visible functions:");
1339 for (node = cgraph_nodes; node; node = node->next)
1340 if (node->local.externally_visible)
1341 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1342 fprintf (cgraph_dump_file, "\n\n");
58f1b650 1343 }
3f82b628 1344 cgraph_function_flags_ready = true;
acc70efa 1345}
80a85d8a 1346
b0cdf642 1347/* Return true when function body of DECL still needs to be kept around
1348 for later re-use. */
1349bool
1350cgraph_preserve_function_body_p (tree decl)
1351{
1352 struct cgraph_node *node;
b0cdf642 1353 if (!cgraph_global_info_ready)
1354 return (DECL_INLINE (decl) && !flag_really_no_inline);
1355 /* Look if there is any clone around. */
1356 for (node = cgraph_node (decl); node; node = node->next_clone)
1357 if (node->global.inlined_to)
1358 return true;
1359 return false;
1360}
1361
77fce4cd 1362static void
1363ipa_passes (void)
1364{
1365 cfun = NULL;
1366 tree_register_cfg_hooks ();
1367 bitmap_obstack_initialize (NULL);
1368 execute_ipa_pass_list (all_ipa_passes);
1369 bitmap_obstack_release (NULL);
1370}
1371
ae01b312 1372/* Perform simple optimizations based on callgraph. */
1373
1374void
d9d9733a 1375cgraph_optimize (void)
ae01b312 1376{
b0cdf642 1377#ifdef ENABLE_CHECKING
1378 verify_cgraph ();
1379#endif
2ff66ee0 1380 if (!flag_unit_at_a_time)
c1dcd13c 1381 {
56af936e 1382 cgraph_output_pending_asms ();
c1dcd13c 1383 cgraph_varpool_assemble_pending_decls ();
1384 return;
1385 }
e9f08e82 1386
1387 process_pending_assemble_externals ();
c1dcd13c 1388
1389 /* Frontend may output common variables after the unit has been finalized.
1390 It is safe to deal with them here as they are always zero initialized. */
1391 cgraph_varpool_analyze_pending_decls ();
e9f08e82 1392
f79b6507 1393 timevar_push (TV_CGRAPHOPT);
d7c6d889 1394 if (!quiet_flag)
1395 fprintf (stderr, "Performing intraprocedural optimizations\n");
e4200070 1396
3f82b628 1397 cgraph_function_and_variable_visibility ();
f79b6507 1398 if (cgraph_dump_file)
1399 {
e4200070 1400 fprintf (cgraph_dump_file, "Marked ");
f79b6507 1401 dump_cgraph (cgraph_dump_file);
1402 }
f1e2a033 1403 ipa_passes ();
e1be32b8 1404 /* This pass remove bodies of extern inline functions we never inlined.
1405 Do this later so other IPA passes see what is really going on. */
1406 cgraph_remove_unreachable_nodes (false, dump_file);
80a85d8a 1407 cgraph_global_info_ready = true;
f79b6507 1408 if (cgraph_dump_file)
1409 {
e4200070 1410 fprintf (cgraph_dump_file, "Optimized ");
f79b6507 1411 dump_cgraph (cgraph_dump_file);
c1dcd13c 1412 dump_varpool (cgraph_dump_file);
f79b6507 1413 }
1414 timevar_pop (TV_CGRAPHOPT);
ae01b312 1415
d7c6d889 1416 /* Output everything. */
e4200070 1417 if (!quiet_flag)
1418 fprintf (stderr, "Assembling functions:\n");
b0cdf642 1419#ifdef ENABLE_CHECKING
1420 verify_cgraph ();
1421#endif
56af936e 1422
acc70efa 1423 cgraph_mark_functions_to_output ();
c1dcd13c 1424
56af936e 1425 if (!flag_toplevel_reorder)
1426 cgraph_output_in_order ();
1427 else
1428 {
1429 cgraph_output_pending_asms ();
1430
1431 cgraph_expand_all_functions ();
1432 cgraph_varpool_remove_unreferenced_decls ();
1433
1434 cgraph_varpool_assemble_pending_decls ();
1435 }
c1dcd13c 1436
f79b6507 1437 if (cgraph_dump_file)
1438 {
e4200070 1439 fprintf (cgraph_dump_file, "\nFinal ");
f79b6507 1440 dump_cgraph (cgraph_dump_file);
1441 }
b0cdf642 1442#ifdef ENABLE_CHECKING
1443 verify_cgraph ();
4ee9c684 1444 /* Double check that all inline clones are gone and that all
1445 function bodies have been released from memory. */
1446 if (flag_unit_at_a_time
0f9005dd 1447 && !dump_enabled_p (TDI_tree_all)
4ee9c684 1448 && !(sorrycount || errorcount))
1449 {
1450 struct cgraph_node *node;
1451 bool error_found = false;
1452
1453 for (node = cgraph_nodes; node; node = node->next)
1454 if (node->analyzed
1455 && (node->global.inlined_to
1456 || DECL_SAVED_TREE (node->decl)))
1457 {
1458 error_found = true;
1459 dump_cgraph_node (stderr, node);
1460 }
1461 if (error_found)
0a81f5a0 1462 internal_error ("nodes with no released memory found");
4ee9c684 1463 }
b0cdf642 1464#endif
ae01b312 1465}
b5530559 1466
1467/* Generate and emit a static constructor or destructor. WHICH must be
1468 one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
1469 GENERIC statements. */
1470
1471void
c5344b58 1472cgraph_build_static_cdtor (char which, tree body, int priority)
b5530559 1473{
1474 static int counter = 0;
1475 char which_buf[16];
540edea7 1476 tree decl, name, resdecl;
b5530559 1477
1478 sprintf (which_buf, "%c_%d", which, counter++);
1479 name = get_file_function_name_long (which_buf);
1480
1481 decl = build_decl (FUNCTION_DECL, name,
1482 build_function_type (void_type_node, void_list_node));
1483 current_function_decl = decl;
1484
540edea7 1485 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1486 DECL_ARTIFICIAL (resdecl) = 1;
1487 DECL_IGNORED_P (resdecl) = 1;
1488 DECL_RESULT (decl) = resdecl;
1489
b5530559 1490 allocate_struct_function (decl);
1491
1492 TREE_STATIC (decl) = 1;
1493 TREE_USED (decl) = 1;
1494 DECL_ARTIFICIAL (decl) = 1;
1495 DECL_IGNORED_P (decl) = 1;
1496 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1497 DECL_SAVED_TREE (decl) = body;
1498 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1499 DECL_UNINLINABLE (decl) = 1;
1500
1501 DECL_INITIAL (decl) = make_node (BLOCK);
1502 TREE_USED (DECL_INITIAL (decl)) = 1;
1503
1504 DECL_SOURCE_LOCATION (decl) = input_location;
1505 cfun->function_end_locus = input_location;
1506
cc636d56 1507 switch (which)
1508 {
1509 case 'I':
1510 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1511 break;
1512 case 'D':
1513 DECL_STATIC_DESTRUCTOR (decl) = 1;
1514 break;
1515 default:
1516 gcc_unreachable ();
1517 }
b5530559 1518
1519 gimplify_function_tree (decl);
1520
1521 /* ??? We will get called LATE in the compilation process. */
1522 if (cgraph_global_info_ready)
e27482aa 1523 {
1524 tree_lowering_passes (decl);
1525 tree_rest_of_compilation (decl);
1526 }
b5530559 1527 else
1528 cgraph_finalize_function (decl, 0);
1529
1530 if (targetm.have_ctors_dtors)
1531 {
1532 void (*fn) (rtx, int);
1533
1534 if (which == 'I')
1535 fn = targetm.asm_out.constructor;
1536 else
1537 fn = targetm.asm_out.destructor;
c5344b58 1538 fn (XEXP (DECL_RTL (decl), 0), priority);
b5530559 1539 }
1540}
121f3051 1541
1542void
1543init_cgraph (void)
1544{
1545 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1546}
b5d36404 1547
1548/* The edges representing the callers of the NEW_VERSION node were
1549 fixed by cgraph_function_versioning (), now the call_expr in their
1550 respective tree code should be updated to call the NEW_VERSION. */
1551
1552static void
1553update_call_expr (struct cgraph_node *new_version)
1554{
1555 struct cgraph_edge *e;
1556
1557 gcc_assert (new_version);
1558 for (e = new_version->callers; e; e = e->next_caller)
1559 /* Update the call expr on the edges
1560 to call the new version. */
1561 TREE_OPERAND (TREE_OPERAND (get_call_expr_in (e->call_stmt), 0), 0) = new_version->decl;
1562}
1563
1564
1565/* Create a new cgraph node which is the new version of
1566 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1567 edges which should be redirected to point to
1568 NEW_VERSION. ALL the callees edges of OLD_VERSION
1569 are cloned to the new version node. Return the new
1570 version node. */
1571
1572static struct cgraph_node *
1573cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1574 tree new_decl, varray_type redirect_callers)
1575 {
1576 struct cgraph_node *new_version;
1577 struct cgraph_edge *e, *new_e;
1578 struct cgraph_edge *next_callee;
1579 unsigned i;
1580
1581 gcc_assert (old_version);
1582
1583 new_version = cgraph_node (new_decl);
1584
1585 new_version->analyzed = true;
1586 new_version->local = old_version->local;
1587 new_version->global = old_version->global;
1588 new_version->rtl = new_version->rtl;
1589 new_version->reachable = true;
1590 new_version->count = old_version->count;
1591
1592 /* Clone the old node callees. Recursive calls are
1593 also cloned. */
1594 for (e = old_version->callees;e; e=e->next_callee)
1595 {
1596 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->loop_nest, true);
1597 new_e->count = e->count;
1598 }
1599 /* Fix recursive calls.
1600 If OLD_VERSION has a recursive call after the
1601 previous edge cloning, the new version will have an edge
1602 pointing to the old version, which is wrong;
1603 Redirect it to point to the new version. */
1604 for (e = new_version->callees ; e; e = next_callee)
1605 {
1606 next_callee = e->next_callee;
1607 if (e->callee == old_version)
1608 cgraph_redirect_edge_callee (e, new_version);
1609
1610 if (!next_callee)
1611 break;
1612 }
1613 if (redirect_callers)
1614 for (i = 0; i < VARRAY_ACTIVE_SIZE (redirect_callers); i++)
1615 {
1616 e = VARRAY_GENERIC_PTR (redirect_callers, i);
1617 /* Redirect calls to the old version node
1618 to point to it's new version. */
1619 cgraph_redirect_edge_callee (e, new_version);
1620 }
1621
1622 return new_version;
1623 }
1624
1625 /* Perform function versioning.
1626 Function versioning includes copying of the tree and
1627 a callgraph update (creating a new cgraph node and updating
1628 its callees and callers).
1629
1630 REDIRECT_CALLERS varray includes the edges to be redirected
1631 to the new version.
1632
1633 TREE_MAP is a mapping of tree nodes we want to replace with
1634 new ones (according to results of prior analysis).
1635 OLD_VERSION_NODE is the node that is versioned.
1636 It returns the new version's cgraph node. */
1637
1638struct cgraph_node *
1639cgraph_function_versioning (struct cgraph_node *old_version_node,
1640 varray_type redirect_callers,
1641 varray_type tree_map)
1642{
1643 tree old_decl = old_version_node->decl;
1644 struct cgraph_node *new_version_node = NULL;
1645 tree new_decl;
1646
1647 if (!tree_versionable_function_p (old_decl))
1648 return NULL;
1649
1650 /* Make a new FUNCTION_DECL tree node for the
1651 new version. */
1652 new_decl = copy_node (old_decl);
1653
1654 /* Create the new version's call-graph node.
1655 and update the edges of the new node. */
1656 new_version_node =
1657 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1658 redirect_callers);
1659
1660 /* Copy the OLD_VERSION_NODE function tree to the new version. */
469679ab 1661 tree_function_versioning (old_decl, new_decl, tree_map, false);
b5d36404 1662 /* Update the call_expr on the edges to call the new version node. */
1663 update_call_expr (new_version_node);
1664
1665 /* Update the new version's properties.
1666 Make The new version visible only within this translation unit.
1667 ??? We cannot use COMDAT linkage because there is no
1668 ABI support for this. */
1669 DECL_EXTERNAL (new_version_node->decl) = 0;
1670 DECL_ONE_ONLY (new_version_node->decl) = 0;
1671 TREE_PUBLIC (new_version_node->decl) = 0;
1672 DECL_COMDAT (new_version_node->decl) = 0;
1673 new_version_node->local.externally_visible = 0;
1674 new_version_node->local.local = 1;
1675 new_version_node->lowered = true;
1676 return new_version_node;
1677}
469679ab 1678
1679/* Produce separate function body for inline clones so the offline copy can be
1680 modified without affecting them. */
1681struct cgraph_node *
1682save_inline_function_body (struct cgraph_node *node)
1683{
1684 struct cgraph_node *first_clone;
1685
1686 gcc_assert (node == cgraph_node (node->decl));
1687
1688 cgraph_lower_function (node);
1689
1690 /* In non-unit-at-a-time we construct full fledged clone we never output to
1691 assembly file. This clone is pointed out by inline_decl of orginal function
1692 and inlining infrastructure knows how to deal with this. */
1693 if (!flag_unit_at_a_time)
1694 {
1695 struct cgraph_edge *e;
1696
1697 first_clone = cgraph_clone_node (node, node->count, 0, false);
1698 first_clone->needed = 0;
1699 first_clone->reachable = 1;
1700 /* Recursively clone all bodies. */
1701 for (e = first_clone->callees; e; e = e->next_callee)
1702 if (!e->inline_failed)
1703 cgraph_clone_inlined_nodes (e, true, false);
1704 }
1705 else
1706 first_clone = node->next_clone;
1707
1708 first_clone->decl = copy_node (node->decl);
1709 node->next_clone = NULL;
1710 if (!flag_unit_at_a_time)
1711 node->inline_decl = first_clone->decl;
1712 first_clone->prev_clone = NULL;
1713 cgraph_insert_node_to_hashtable (first_clone);
1714 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1715
1716 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1717 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1718
1719 DECL_EXTERNAL (first_clone->decl) = 0;
1720 DECL_ONE_ONLY (first_clone->decl) = 0;
1721 TREE_PUBLIC (first_clone->decl) = 0;
1722 DECL_COMDAT (first_clone->decl) = 0;
1723
1724 for (node = first_clone->next_clone; node; node = node->next_clone)
1725 node->decl = first_clone->decl;
1726#ifdef ENABLE_CHECKING
1727 verify_cgraph_node (first_clone);
1728#endif
1729 return first_clone;
1730}
1731