]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
2006-07-23 Mohan Embar <gnustuff@thisiscool.com>
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
d7c6d889 1/* Callgraph based intraprocedural optimizations.
dfbf3d71 2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
ae01b312 3 Contributed by Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
67ce556b 19Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2002110-1301, USA. */
ae01b312 21
b0cdf642 22/* This module implements main driver of compilation process as well as
23 few basic intraprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline function.)
36
37 - cgraph_varpool_finalize_variable
38
7bd28bba 39 This function has same behavior as the above but is used for static
b0cdf642 40 variables.
41
42 - cgraph_finalize_compilation_unit
43
44 This function is called once compilation unit is finalized and it will
45 no longer change.
46
47 In the unit-at-a-time the call-graph construction and local function
48 analysis takes place here. Bodies of unreachable functions are released
49 to conserve memory usage.
50
51 ??? The compilation unit in this point of view should be compilation
52 unit as defined by the language - for instance C frontend allows multiple
53 compilation units to be parsed at once and it should call function each
54 time parsing is done so we save memory.
55
56 - cgraph_optimize
57
58 In this unit-at-a-time compilation the intra procedural analysis takes
59 place here. In particular the static functions whose address is never
60 taken are marked as local. Backend can then use this information to
61 modify calling conventions, do better inlining or similar optimizations.
62
63 - cgraph_assemble_pending_functions
64 - cgraph_varpool_assemble_pending_variables
65
66 In non-unit-at-a-time mode these functions can be used to force compilation
67 of functions or variables that are known to be needed at given stage
68 of compilation
69
70 - cgraph_mark_needed_node
71 - cgraph_varpool_mark_needed_node
72
73 When function or variable is referenced by some hidden way (for instance
74 via assembly code and marked by attribute "used"), the call-graph data structure
75 must be updated accordingly by this function.
76
77 - analyze_expr callback
78
79 This function is responsible for lowering tree nodes not understood by
80 generic code into understandable ones or alternatively marking
81 callgraph and varpool nodes referenced by the as needed.
82
83 ??? On the tree-ssa genericizing should take place here and we will avoid
84 need for these hooks (replacing them by genericizing hook)
85
86 - expand_function callback
87
88 This function is used to expand function and pass it into RTL back-end.
89 Front-end should not make any assumptions about when this function can be
90 called. In particular cgraph_assemble_pending_functions,
91 cgraph_varpool_assemble_pending_variables, cgraph_finalize_function,
92 cgraph_varpool_finalize_function, cgraph_optimize can cause arbitrarily
93 previously finalized functions to be expanded.
94
95 We implement two compilation modes.
96
97 - unit-at-a-time: In this mode analyzing of all functions is deferred
98 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
99
100 In cgraph_finalize_compilation_unit the reachable functions are
101 analyzed. During analysis the call-graph edges from reachable
102 functions are constructed and their destinations are marked as
103 reachable. References to functions and variables are discovered too
104 and variables found to be needed output to the assembly file. Via
105 mark_referenced call in assemble_variable functions referenced by
106 static variables are noticed too.
107
ca67a72b 108 The intra-procedural information is produced and its existence
b0cdf642 109 indicated by global_info_ready. Once this flag is set it is impossible
110 to change function from !reachable to reachable and thus
111 assemble_variable no longer call mark_referenced.
112
113 Finally the call-graph is topologically sorted and all reachable functions
114 that has not been completely inlined or are not external are output.
115
116 ??? It is possible that reference to function or variable is optimized
117 out. We can not deal with this nicely because topological order is not
118 suitable for it. For tree-ssa we may consider another pass doing
119 optimization and re-discovering reachable functions.
120
121 ??? Reorganize code so variables are output very last and only if they
122 really has been referenced by produced code, so we catch more cases
123 where reference has been optimized out.
124
125 - non-unit-at-a-time
126
127 All functions are variables are output as early as possible to conserve
128 memory consumption. This may or may not result in less memory used but
129 it is still needed for some legacy code that rely on particular ordering
130 of things output from the compiler.
131
132 Varpool data structures are not used and variables are output directly.
133
134 Functions are output early using call of
135 cgraph_assemble_pending_function from cgraph_finalize_function. The
136 decision on whether function is needed is made more conservative so
137 uninlininable static functions are needed too. During the call-graph
138 construction the edge destinations are not marked as reachable and it
edc6a4c0 139 is completely relied upn assemble_variable to mark them. */
121f3051 140
acc70efa 141
ae01b312 142#include "config.h"
143#include "system.h"
144#include "coretypes.h"
145#include "tm.h"
146#include "tree.h"
b5530559 147#include "rtl.h"
acc70efa 148#include "tree-flow.h"
ae01b312 149#include "tree-inline.h"
150#include "langhooks.h"
c6224531 151#include "pointer-set.h"
ae01b312 152#include "toplev.h"
153#include "flags.h"
154#include "ggc.h"
155#include "debug.h"
156#include "target.h"
157#include "cgraph.h"
80a85d8a 158#include "diagnostic.h"
f79b6507 159#include "timevar.h"
d7c6d889 160#include "params.h"
161#include "fibheap.h"
162#include "c-common.h"
611e5405 163#include "intl.h"
b69eb0ff 164#include "function.h"
b5d36404 165#include "ipa-prop.h"
acc70efa 166#include "tree-gimple.h"
f1e2a033 167#include "tree-pass.h"
c1dcd13c 168#include "output.h"
d7c6d889 169
a6868229 170static void cgraph_expand_all_functions (void);
d9d9733a 171static void cgraph_mark_functions_to_output (void);
172static void cgraph_expand_function (struct cgraph_node *);
9bfec7c2 173static tree record_reference (tree *, int *, void *);
f788fff2 174static void cgraph_output_pending_asms (void);
d7c6d889 175
9bfec7c2 176/* Records tree nodes seen in record_reference. Simply using
25bb88de 177 walk_tree_without_duplicates doesn't guarantee each node is visited
178 once because it gets a new htab upon each recursive call from
9bfec7c2 179 record_reference itself. */
c6224531 180static struct pointer_set_t *visited_nodes;
25bb88de 181
121f3051 182static FILE *cgraph_dump_file;
183
2c0b522d 184/* Determine if function DECL is needed. That is, visible to something
185 either outside this translation unit, something magic in the system
186 configury, or (if not doing unit-at-a-time) to something we havn't
187 seen yet. */
188
189static bool
190decide_is_function_needed (struct cgraph_node *node, tree decl)
191{
9d95b2b0 192 tree origin;
62eec3b4 193 if (MAIN_NAME_P (DECL_NAME (decl))
194 && TREE_PUBLIC (decl))
195 {
196 node->local.externally_visible = true;
197 return true;
198 }
4ee9c684 199
3f82b628 200 /* If the user told us it is used, then it must be so. */
62eec3b4 201 if (node->local.externally_visible
202 || lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
3f82b628 203 return true;
204
205 /* ??? If the assembler name is set by hand, it is possible to assemble
206 the name later after finalizing the function and the fact is noticed
207 in assemble_name then. This is arguably a bug. */
208 if (DECL_ASSEMBLER_NAME_SET_P (decl)
209 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
210 return true;
211
2c0b522d 212 /* If we decided it was needed before, but at the time we didn't have
213 the body of the function available, then it's still needed. We have
214 to go back and re-check its dependencies now. */
215 if (node->needed)
216 return true;
217
218 /* Externally visible functions must be output. The exception is
a0c938f0 219 COMDAT functions that must be output only when they are needed.
8baa9d15 220
221 When not optimizing, also output the static functions. (see
95da6220 222 PR24561), but don't do so for always_inline functions, functions
d3d410e1 223 declared inline and nested functions. These was optimized out
224 in the original implementation and it is unclear whether we want
554f2707 225 to change the behavior here. */
bba7ddf8 226 if (((TREE_PUBLIC (decl)
d3d410e1 227 || (!optimize && !node->local.disregard_inline_limits
228 && !DECL_DECLARED_INLINE_P (decl)
229 && !node->origin))
bba7ddf8 230 && !flag_whole_program)
62eec3b4 231 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
2c0b522d 232 return true;
233
234 /* Constructors and destructors are reachable from the runtime by
235 some mechanism. */
236 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
237 return true;
238
2c0b522d 239 if (flag_unit_at_a_time)
240 return false;
241
242 /* If not doing unit at a time, then we'll only defer this function
243 if its marked for inlining. Otherwise we want to emit it now. */
244
245 /* "extern inline" functions are never output locally. */
246 if (DECL_EXTERNAL (decl))
247 return false;
4ee9c684 248 /* Nested functions of extern inline function shall not be emit unless
249 we inlined the origin. */
9d95b2b0 250 for (origin = decl_function_context (decl); origin;
251 origin = decl_function_context (origin))
252 if (DECL_EXTERNAL (origin))
4ee9c684 253 return false;
f024691d 254 /* We want to emit COMDAT functions only when absolutely necessary. */
c08871a9 255 if (DECL_COMDAT (decl))
2c0b522d 256 return false;
257 if (!DECL_INLINE (decl)
258 || (!node->local.disregard_inline_limits
259 /* When declared inline, defer even the uninlinable functions.
e4200070 260 This allows them to be eliminated when unused. */
a0c938f0 261 && !DECL_DECLARED_INLINE_P (decl)
b30512dd 262 && (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
2c0b522d 263 return true;
264
265 return false;
266}
267
06b27565 268/* Walk the decls we marked as necessary and see if they reference new
269 variables or functions and add them into the worklists. */
c1dcd13c 270static bool
271cgraph_varpool_analyze_pending_decls (void)
272{
273 bool changed = false;
274 timevar_push (TV_CGRAPH);
275
276 while (cgraph_varpool_first_unanalyzed_node)
277 {
278 tree decl = cgraph_varpool_first_unanalyzed_node->decl;
279
280 cgraph_varpool_first_unanalyzed_node->analyzed = true;
281
282 cgraph_varpool_first_unanalyzed_node = cgraph_varpool_first_unanalyzed_node->next_needed;
283
e3a1de9d 284 /* Compute the alignment early so function body expanders are
285 already informed about increased alignment. */
286 align_variable (decl, 0);
287
c1dcd13c 288 if (DECL_INITIAL (decl))
9bfec7c2 289 {
290 visited_nodes = pointer_set_create ();
a0c938f0 291 walk_tree (&DECL_INITIAL (decl), record_reference, NULL, visited_nodes);
9bfec7c2 292 pointer_set_destroy (visited_nodes);
293 visited_nodes = NULL;
294 }
c1dcd13c 295 changed = true;
296 }
297 timevar_pop (TV_CGRAPH);
298 return changed;
299}
300
301/* Optimization of function bodies might've rendered some variables as
06b27565 302 unnecessary so we want to avoid these from being compiled.
c1dcd13c 303
442e3cb9 304 This is done by pruning the queue and keeping only the variables that
06b27565 305 really appear needed (ie they are either externally visible or referenced
c1dcd13c 306 by compiled function). Re-doing the reachability analysis on variables
307 brings back the remaining variables referenced by these. */
308static void
309cgraph_varpool_remove_unreferenced_decls (void)
310{
311 struct cgraph_varpool_node *next, *node = cgraph_varpool_nodes_queue;
312
313 cgraph_varpool_reset_queue ();
314
315 if (errorcount || sorrycount)
316 return;
317
318 while (node)
319 {
320 tree decl = node->decl;
321 next = node->next_needed;
322 node->needed = 0;
323
324 if (node->finalized
325 && ((DECL_ASSEMBLER_NAME_SET_P (decl)
326 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
327 || node->force_output
c131e678 328 || decide_is_variable_needed (node, decl)
a0c938f0 329 /* ??? Cgraph does not yet rule the world with an iron hand,
c131e678 330 and does not control the emission of debug information.
331 After a variable has its DECL_RTL set, we must assume that
332 it may be referenced by the debug information, and we can
333 no longer elide it. */
334 || DECL_RTL_SET_P (decl)))
c1dcd13c 335 cgraph_varpool_mark_needed_node (node);
336
337 node = next;
338 }
279cd732 339 /* Make sure we mark alias targets as used targets. */
340 finish_aliases_1 ();
c1dcd13c 341 cgraph_varpool_analyze_pending_decls ();
342}
acc70efa 343
acc70efa 344
c08871a9 345/* When not doing unit-at-a-time, output all functions enqueued.
346 Return true when such a functions were found. */
050e11c9 347
348bool
c08871a9 349cgraph_assemble_pending_functions (void)
350{
351 bool output = false;
352
353 if (flag_unit_at_a_time)
354 return false;
355
f788fff2 356 cgraph_output_pending_asms ();
357
c08871a9 358 while (cgraph_nodes_queue)
359 {
360 struct cgraph_node *n = cgraph_nodes_queue;
361
362 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 363 n->next_needed = NULL;
1d09f0e6 364 if (!n->global.inlined_to
365 && !n->alias
366 && !DECL_EXTERNAL (n->decl))
050e11c9 367 {
368 cgraph_expand_function (n);
369 output = true;
370 }
c08871a9 371 }
050e11c9 372
773c5ba7 373 /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
374 the expansion process. Note that this queue may grow as its
375 being processed, as the new functions may generate new ones. */
376 while (cgraph_expand_queue)
377 {
378 struct cgraph_node *n = cgraph_expand_queue;
379 cgraph_expand_queue = cgraph_expand_queue->next_needed;
380 n->next_needed = NULL;
381 cgraph_finalize_function (n->decl, false);
382 output = true;
383 }
384
c08871a9 385 return output;
386}
773c5ba7 387
388
9b8fb23a 389/* As an GCC extension we allow redefinition of the function. The
390 semantics when both copies of bodies differ is not well defined.
391 We replace the old body with new body so in unit at a time mode
392 we always use new body, while in normal mode we may end up with
393 old body inlined into some functions and new body expanded and
394 inlined in others.
395
396 ??? It may make more sense to use one body for inlining and other
397 body for expanding the function but this is difficult to do. */
398
399static void
400cgraph_reset_node (struct cgraph_node *node)
401{
402 /* If node->output is set, then this is a unit-at-a-time compilation
403 and we have already begun whole-unit analysis. This is *not*
404 testing for whether we've already emitted the function. That
a0c938f0 405 case can be sort-of legitimately seen with real function
9b8fb23a 406 redefinition errors. I would argue that the front end should
407 never present us with such a case, but don't enforce that for now. */
408 gcc_assert (!node->output);
409
410 /* Reset our data structures so we can analyze the function again. */
411 memset (&node->local, 0, sizeof (node->local));
412 memset (&node->global, 0, sizeof (node->global));
413 memset (&node->rtl, 0, sizeof (node->rtl));
414 node->analyzed = false;
415 node->local.redefined_extern_inline = true;
416 node->local.finalized = false;
417
418 if (!flag_unit_at_a_time)
419 {
420 struct cgraph_node *n;
421
422 for (n = cgraph_nodes; n; n = n->next)
423 if (n->global.inlined_to == node)
424 cgraph_remove_node (n);
425 }
426
427 cgraph_node_remove_callees (node);
428
429 /* We may need to re-queue the node for assembling in case
430 we already proceeded it and ignored as not needed. */
431 if (node->reachable && !flag_unit_at_a_time)
432 {
433 struct cgraph_node *n;
434
435 for (n = cgraph_nodes_queue; n; n = n->next_needed)
436 if (n == node)
437 break;
438 if (!n)
439 node->reachable = 0;
440 }
441}
c08871a9 442
1e8e9920 443static void
444cgraph_lower_function (struct cgraph_node *node)
445{
446 if (node->lowered)
447 return;
448 tree_lowering_passes (node->decl);
449 node->lowered = true;
450}
451
28df663b 452/* DECL has been parsed. Take it, queue it, compile it at the whim of the
453 logic in effect. If NESTED is true, then our caller cannot stand to have
454 the garbage collector run at the moment. We would need to either create
455 a new GC context, or just not compile right now. */
ae01b312 456
457void
28df663b 458cgraph_finalize_function (tree decl, bool nested)
ae01b312 459{
460 struct cgraph_node *node = cgraph_node (decl);
461
c08871a9 462 if (node->local.finalized)
9b8fb23a 463 cgraph_reset_node (node);
28df663b 464
c08871a9 465 notice_global_symbol (decl);
ae01b312 466 node->decl = decl;
79bb87b4 467 node->local.finalized = true;
e27482aa 468 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
9d95b2b0 469 if (node->nested)
470 lower_nested_functions (decl);
471 gcc_assert (!node->nested);
ae01b312 472
2c0b522d 473 /* If not unit at a time, then we need to create the call graph
474 now, so that called functions can be queued and emitted now. */
2ff66ee0 475 if (!flag_unit_at_a_time)
19489abd 476 {
477 cgraph_analyze_function (node);
9e0baf4d 478 cgraph_decide_inlining_incrementally (node, false);
19489abd 479 }
2ff66ee0 480
2c0b522d 481 if (decide_is_function_needed (node, decl))
482 cgraph_mark_needed_node (node);
483
ecda6e51 484 /* Since we reclaim unreachable nodes at the end of every language
3f82b628 485 level unit, we need to be conservative about possible entry points
486 there. */
62eec3b4 487 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
3f82b628 488 cgraph_mark_reachable_node (node);
489
28df663b 490 /* If not unit at a time, go ahead and emit everything we've found
491 to be reachable at this time. */
492 if (!nested)
dc721f36 493 {
494 if (!cgraph_assemble_pending_functions ())
495 ggc_collect ();
496 }
3d7bfc56 497
2c0b522d 498 /* If we've not yet emitted decl, tell the debug info about it. */
28df663b 499 if (!TREE_ASM_WRITTEN (decl))
2c0b522d 500 (*debug_hooks->deferred_inline_function) (decl);
4e8871a0 501
b69eb0ff 502 /* Possibly warn about unused parameters. */
503 if (warn_unused_parameter)
504 do_warn_unused_parameter (decl);
ae01b312 505}
506
ae01b312 507/* Walk tree and record all calls. Called via walk_tree. */
508static tree
9bfec7c2 509record_reference (tree *tp, int *walk_subtrees, void *data)
ae01b312 510{
ec1e35b2 511 tree t = *tp;
512
513 switch (TREE_CODE (t))
ae01b312 514 {
ec1e35b2 515 case VAR_DECL:
516 /* ??? Really, we should mark this decl as *potentially* referenced
517 by this function and re-examine whether the decl is actually used
518 after rtl has been generated. */
c1dcd13c 519 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
caa6fdce 520 {
521 cgraph_varpool_mark_needed_node (cgraph_varpool_node (t));
522 if (lang_hooks.callgraph.analyze_expr)
a0c938f0 523 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees,
caa6fdce 524 data);
525 }
ec1e35b2 526 break;
527
4e4ac74b 528 case FDESC_EXPR:
ec1e35b2 529 case ADDR_EXPR:
530 if (flag_unit_at_a_time)
531 {
532 /* Record dereferences to the functions. This makes the
533 functions reachable unconditionally. */
534 tree decl = TREE_OPERAND (*tp, 0);
535 if (TREE_CODE (decl) == FUNCTION_DECL)
536 cgraph_mark_needed_node (cgraph_node (decl));
537 }
538 break;
539
ec1e35b2 540 default:
541 /* Save some cycles by not walking types and declaration as we
542 won't find anything useful there anyway. */
ce45a448 543 if (IS_TYPE_OR_DECL_P (*tp))
ae01b312 544 {
ae01b312 545 *walk_subtrees = 0;
ec1e35b2 546 break;
ae01b312 547 }
ec1e35b2 548
549 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
dc24ddbd 550 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees, data);
ec1e35b2 551 break;
ae01b312 552 }
ec1e35b2 553
ae01b312 554 return NULL;
555}
556
b0cdf642 557/* Create cgraph edges for function calls inside BODY from NODE. */
ae01b312 558
edc6a4c0 559static void
b0cdf642 560cgraph_create_edges (struct cgraph_node *node, tree body)
ae01b312 561{
9bfec7c2 562 basic_block bb;
563
564 struct function *this_cfun = DECL_STRUCT_FUNCTION (body);
565 block_stmt_iterator bsi;
566 tree step;
c6224531 567 visited_nodes = pointer_set_create ();
e27482aa 568
a0c938f0 569 /* Reach the trees by walking over the CFG, and note the
9bfec7c2 570 enclosing basic-blocks in the call edges. */
571 FOR_EACH_BB_FN (bb, this_cfun)
572 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
573 {
574 tree stmt = bsi_stmt (bsi);
575 tree call = get_call_expr_in (stmt);
576 tree decl;
577
578 if (call && (decl = get_callee_fndecl (call)))
e27482aa 579 {
9bfec7c2 580 cgraph_create_edge (node, cgraph_node (decl), stmt,
581 bb->count,
582 bb->loop_depth);
583 walk_tree (&TREE_OPERAND (call, 1),
584 record_reference, node, visited_nodes);
585 if (TREE_CODE (stmt) == MODIFY_EXPR)
586 walk_tree (&TREE_OPERAND (stmt, 0),
587 record_reference, node, visited_nodes);
e27482aa 588 }
a0c938f0 589 else
9bfec7c2 590 walk_tree (bsi_stmt_ptr (bsi), record_reference, node, visited_nodes);
591 }
592
127d7f21 593 /* Look for initializers of constant variables and private statics. */
594 for (step = DECL_STRUCT_FUNCTION (body)->unexpanded_var_list;
595 step;
596 step = TREE_CHAIN (step))
9bfec7c2 597 {
127d7f21 598 tree decl = TREE_VALUE (step);
599 if (TREE_CODE (decl) == VAR_DECL
600 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
601 && flag_unit_at_a_time)
602 cgraph_varpool_finalize_decl (decl);
603 else if (TREE_CODE (decl) == VAR_DECL && DECL_INITIAL (decl))
604 walk_tree (&DECL_INITIAL (decl), record_reference, node, visited_nodes);
e27482aa 605 }
a0c938f0 606
c6224531 607 pointer_set_destroy (visited_nodes);
25bb88de 608 visited_nodes = NULL;
ae01b312 609}
610
9e0baf4d 611/* Give initial reasons why inlining would fail. Those gets
612 either NULLified or usually overwritten by more precise reason
613 later. */
614static void
615initialize_inline_failed (struct cgraph_node *node)
616{
617 struct cgraph_edge *e;
618
619 for (e = node->callers; e; e = e->next_caller)
620 {
621 gcc_assert (!e->callee->global.inlined_to);
622 gcc_assert (e->inline_failed);
623 if (node->local.redefined_extern_inline)
624 e->inline_failed = N_("redefined extern inline functions are not "
625 "considered for inlining");
626 else if (!node->local.inlinable)
627 e->inline_failed = N_("function not inlinable");
628 else
629 e->inline_failed = N_("function not considered for inlining");
630 }
631}
632
633/* Rebuild call edges from current function after a passes not aware
634 of cgraph updating. */
2a1990e9 635static unsigned int
9e0baf4d 636rebuild_cgraph_edges (void)
637{
638 basic_block bb;
639 struct cgraph_node *node = cgraph_node (current_function_decl);
640 block_stmt_iterator bsi;
641
642 cgraph_node_remove_callees (node);
643
644 node->count = ENTRY_BLOCK_PTR->count;
645
646 FOR_EACH_BB (bb)
647 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
648 {
649 tree stmt = bsi_stmt (bsi);
650 tree call = get_call_expr_in (stmt);
651 tree decl;
652
653 if (call && (decl = get_callee_fndecl (call)))
654 cgraph_create_edge (node, cgraph_node (decl), stmt,
655 bb->count,
656 bb->loop_depth);
657 }
658 initialize_inline_failed (node);
659 gcc_assert (!node->global.inlined_to);
2a1990e9 660 return 0;
9e0baf4d 661}
662
663struct tree_opt_pass pass_rebuild_cgraph_edges =
664{
665 NULL, /* name */
666 NULL, /* gate */
667 rebuild_cgraph_edges, /* execute */
668 NULL, /* sub */
669 NULL, /* next */
670 0, /* static_pass_number */
671 0, /* tv_id */
672 PROP_cfg, /* properties_required */
673 0, /* properties_provided */
674 0, /* properties_destroyed */
675 0, /* todo_flags_start */
676 0, /* todo_flags_finish */
677 0 /* letter */
678};
b0cdf642 679
680/* Verify cgraph nodes of given cgraph node. */
681void
682verify_cgraph_node (struct cgraph_node *node)
683{
684 struct cgraph_edge *e;
685 struct cgraph_node *main_clone;
e27482aa 686 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
687 basic_block this_block;
688 block_stmt_iterator bsi;
9bfec7c2 689 bool error_found = false;
b0cdf642 690
691 timevar_push (TV_CGRAPH_VERIFY);
b0cdf642 692 for (e = node->callees; e; e = e->next_callee)
693 if (e->aux)
694 {
0a81f5a0 695 error ("aux field set for edge %s->%s",
b0cdf642 696 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
697 error_found = true;
698 }
a2cb9b3b 699 if (node->count < 0)
700 {
701 error ("Execution count is negative");
702 error_found = true;
703 }
b0cdf642 704 for (e = node->callers; e; e = e->next_caller)
705 {
a2cb9b3b 706 if (e->count < 0)
707 {
708 error ("caller edge count is negative");
709 error_found = true;
710 }
b0cdf642 711 if (!e->inline_failed)
712 {
713 if (node->global.inlined_to
714 != (e->caller->global.inlined_to
715 ? e->caller->global.inlined_to : e->caller))
716 {
0a81f5a0 717 error ("inlined_to pointer is wrong");
b0cdf642 718 error_found = true;
719 }
720 if (node->callers->next_caller)
721 {
0a81f5a0 722 error ("multiple inline callers");
b0cdf642 723 error_found = true;
724 }
725 }
726 else
727 if (node->global.inlined_to)
728 {
0a81f5a0 729 error ("inlined_to pointer set for noninline callers");
b0cdf642 730 error_found = true;
731 }
732 }
733 if (!node->callers && node->global.inlined_to)
734 {
5cd75817 735 error ("inlined_to pointer is set but no predecessors found");
b0cdf642 736 error_found = true;
737 }
738 if (node->global.inlined_to == node)
739 {
0a81f5a0 740 error ("inlined_to pointer refers to itself");
b0cdf642 741 error_found = true;
742 }
743
744 for (main_clone = cgraph_node (node->decl); main_clone;
745 main_clone = main_clone->next_clone)
746 if (main_clone == node)
747 break;
0f6439b9 748 if (!cgraph_node (node->decl))
b0cdf642 749 {
0f6439b9 750 error ("node not found in cgraph_hash");
b0cdf642 751 error_found = true;
752 }
a0c938f0 753
b0cdf642 754 if (node->analyzed
755 && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
756 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
757 {
e27482aa 758 if (this_cfun->cfg)
759 {
760 /* The nodes we're interested in are never shared, so walk
761 the tree ignoring duplicates. */
762 visited_nodes = pointer_set_create ();
763 /* Reach the trees by walking over the CFG, and note the
764 enclosing basic-blocks in the call edges. */
765 FOR_EACH_BB_FN (this_block, this_cfun)
766 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
9bfec7c2 767 {
768 tree stmt = bsi_stmt (bsi);
769 tree call = get_call_expr_in (stmt);
770 tree decl;
771 if (call && (decl = get_callee_fndecl (call)))
772 {
773 struct cgraph_edge *e = cgraph_edge (node, stmt);
774 if (e)
775 {
776 if (e->aux)
777 {
0a81f5a0 778 error ("shared call_stmt:");
9bfec7c2 779 debug_generic_stmt (stmt);
780 error_found = true;
781 }
469679ab 782 if (e->callee->decl != cgraph_node (decl)->decl
783 && e->inline_failed)
9bfec7c2 784 {
0a81f5a0 785 error ("edge points to wrong declaration:");
9bfec7c2 786 debug_tree (e->callee->decl);
787 fprintf (stderr," Instead of:");
788 debug_tree (decl);
789 }
790 e->aux = (void *)1;
791 }
792 else
793 {
0a81f5a0 794 error ("missing callgraph edge for call stmt:");
9bfec7c2 795 debug_generic_stmt (stmt);
796 error_found = true;
797 }
798 }
799 }
e27482aa 800 pointer_set_destroy (visited_nodes);
801 visited_nodes = NULL;
802 }
803 else
804 /* No CFG available?! */
805 gcc_unreachable ();
806
b0cdf642 807 for (e = node->callees; e; e = e->next_callee)
808 {
809 if (!e->aux)
810 {
0a81f5a0 811 error ("edge %s->%s has no corresponding call_stmt",
b0cdf642 812 cgraph_node_name (e->caller),
813 cgraph_node_name (e->callee));
9bfec7c2 814 debug_generic_stmt (e->call_stmt);
b0cdf642 815 error_found = true;
816 }
817 e->aux = 0;
818 }
819 }
820 if (error_found)
821 {
822 dump_cgraph_node (stderr, node);
0a81f5a0 823 internal_error ("verify_cgraph_node failed");
b0cdf642 824 }
825 timevar_pop (TV_CGRAPH_VERIFY);
826}
827
828/* Verify whole cgraph structure. */
829void
830verify_cgraph (void)
831{
832 struct cgraph_node *node;
833
8ec2a798 834 if (sorrycount || errorcount)
835 return;
836
b0cdf642 837 for (node = cgraph_nodes; node; node = node->next)
838 verify_cgraph_node (node);
839}
840
56af936e 841/* Output one variable, if necessary. Return whether we output it. */
842static bool
843cgraph_varpool_assemble_decl (struct cgraph_varpool_node *node)
844{
845 tree decl = node->decl;
846
847 if (!TREE_ASM_WRITTEN (decl)
848 && !node->alias
849 && !DECL_EXTERNAL (decl)
850 && (TREE_CODE (decl) != VAR_DECL || !DECL_HAS_VALUE_EXPR_P (decl)))
851 {
852 assemble_variable (decl, 0, 1, 0);
853 /* Local static variables are never seen by check_global_declarations
854 so we need to output debug info by hand. */
a0c938f0 855 if (DECL_CONTEXT (decl)
56af936e 856 && (TREE_CODE (DECL_CONTEXT (decl)) == BLOCK
857 || TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
858 && errorcount == 0 && sorrycount == 0)
859 {
860 timevar_push (TV_SYMOUT);
861 (*debug_hooks->global_decl) (decl);
862 timevar_pop (TV_SYMOUT);
863 }
864 return true;
865 }
866
867 return false;
868}
c1dcd13c 869
870/* Output all variables enqueued to be assembled. */
871bool
872cgraph_varpool_assemble_pending_decls (void)
873{
874 bool changed = false;
875
876 if (errorcount || sorrycount)
877 return false;
a0c938f0 878
c1dcd13c 879 /* EH might mark decls as needed during expansion. This should be safe since
880 we don't create references to new function, but it should not be used
881 elsewhere. */
882 cgraph_varpool_analyze_pending_decls ();
883
884 while (cgraph_varpool_nodes_queue)
885 {
c1dcd13c 886 struct cgraph_varpool_node *node = cgraph_varpool_nodes_queue;
887
888 cgraph_varpool_nodes_queue = cgraph_varpool_nodes_queue->next_needed;
56af936e 889 if (cgraph_varpool_assemble_decl (node))
890 changed = true;
c1dcd13c 891 node->next_needed = NULL;
892 }
893 return changed;
894}
895
56af936e 896/* Output all asm statements we have stored up to be output. */
897
898static void
899cgraph_output_pending_asms (void)
900{
901 struct cgraph_asm_node *can;
902
903 if (errorcount || sorrycount)
904 return;
905
906 for (can = cgraph_asm_nodes; can; can = can->next)
907 assemble_asm (can->asm_str);
908 cgraph_asm_nodes = NULL;
909}
910
0785e435 911/* Analyze the function scheduled to be output. */
1e8e9920 912void
0785e435 913cgraph_analyze_function (struct cgraph_node *node)
914{
915 tree decl = node->decl;
916
ec1e35b2 917 current_function_decl = decl;
e27482aa 918 push_cfun (DECL_STRUCT_FUNCTION (decl));
919 cgraph_lower_function (node);
0785e435 920
921 /* First kill forward declaration so reverse inlining works properly. */
e27482aa 922 cgraph_create_edges (node, decl);
0785e435 923
924 node->local.inlinable = tree_inlinable_function_p (decl);
e27482aa 925 node->local.self_insns = estimate_num_insns (decl);
0785e435 926 if (node->local.inlinable)
927 node->local.disregard_inline_limits
dc24ddbd 928 = lang_hooks.tree_inlining.disregard_inline_limits (decl);
9e0baf4d 929 initialize_inline_failed (node);
99cf25d0 930 if (flag_really_no_inline && !node->local.disregard_inline_limits)
931 node->local.inlinable = 0;
0785e435 932 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
933 node->global.insns = node->local.self_insns;
0785e435 934
ec1e35b2 935 node->analyzed = true;
e27482aa 936 pop_cfun ();
c08871a9 937 current_function_decl = NULL;
0785e435 938}
939
ae01b312 940/* Analyze the whole compilation unit once it is parsed completely. */
941
942void
d9d9733a 943cgraph_finalize_compilation_unit (void)
ae01b312 944{
945 struct cgraph_node *node;
c1dcd13c 946 /* Keep track of already processed nodes when called multiple times for
06b27565 947 intermodule optimization. */
c1dcd13c 948 static struct cgraph_node *first_analyzed;
ae01b312 949
d7401838 950 finish_aliases_1 ();
951
2ff66ee0 952 if (!flag_unit_at_a_time)
c08871a9 953 {
56af936e 954 cgraph_output_pending_asms ();
c08871a9 955 cgraph_assemble_pending_functions ();
956 return;
957 }
2ff66ee0 958
d7c6d889 959 if (!quiet_flag)
c1dcd13c 960 {
961 fprintf (stderr, "\nAnalyzing compilation unit");
962 fflush (stderr);
963 }
229dcfae 964
f79b6507 965 timevar_push (TV_CGRAPH);
c1dcd13c 966 cgraph_varpool_analyze_pending_decls ();
f79b6507 967 if (cgraph_dump_file)
ae01b312 968 {
e4200070 969 fprintf (cgraph_dump_file, "Initial entry points:");
c1dcd13c 970 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
3d7bfc56 971 if (node->needed && DECL_SAVED_TREE (node->decl))
f79b6507 972 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
973 fprintf (cgraph_dump_file, "\n");
ae01b312 974 }
975
e6d2b2d8 976 /* Propagate reachability flag and lower representation of all reachable
977 functions. In the future, lowering will introduce new functions and
978 new entry points on the way (by template instantiation and virtual
979 method table generation for instance). */
3d7bfc56 980 while (cgraph_nodes_queue)
ae01b312 981 {
0785e435 982 struct cgraph_edge *edge;
3d7bfc56 983 tree decl = cgraph_nodes_queue->decl;
984
985 node = cgraph_nodes_queue;
d87976fb 986 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 987 node->next_needed = NULL;
ae01b312 988
638531ad 989 /* ??? It is possible to create extern inline function and later using
bbd5cba2 990 weak alias attribute to kill its body. See
638531ad 991 gcc.c-torture/compile/20011119-1.c */
992 if (!DECL_SAVED_TREE (decl))
9b8fb23a 993 {
994 cgraph_reset_node (node);
995 continue;
996 }
638531ad 997
cc636d56 998 gcc_assert (!node->analyzed && node->reachable);
999 gcc_assert (DECL_SAVED_TREE (decl));
ae01b312 1000
0785e435 1001 cgraph_analyze_function (node);
2c0b522d 1002
ae01b312 1003 for (edge = node->callees; edge; edge = edge->next_callee)
0785e435 1004 if (!edge->callee->reachable)
2c0b522d 1005 cgraph_mark_reachable_node (edge->callee);
1006
c1dcd13c 1007 cgraph_varpool_analyze_pending_decls ();
ae01b312 1008 }
2c0b522d 1009
3d7bfc56 1010 /* Collect entry points to the unit. */
1011
f79b6507 1012 if (cgraph_dump_file)
3d7bfc56 1013 {
e4200070 1014 fprintf (cgraph_dump_file, "Unit entry points:");
c1dcd13c 1015 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
3d7bfc56 1016 if (node->needed && DECL_SAVED_TREE (node->decl))
f79b6507 1017 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
e4200070 1018 fprintf (cgraph_dump_file, "\n\nInitial ");
0785e435 1019 dump_cgraph (cgraph_dump_file);
3d7bfc56 1020 }
e6d2b2d8 1021
f79b6507 1022 if (cgraph_dump_file)
1023 fprintf (cgraph_dump_file, "\nReclaiming functions:");
ae01b312 1024
c1dcd13c 1025 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
ae01b312 1026 {
1027 tree decl = node->decl;
1028
9b8fb23a 1029 if (node->local.finalized && !DECL_SAVED_TREE (decl))
a0c938f0 1030 cgraph_reset_node (node);
9b8fb23a 1031
ae01b312 1032 if (!node->reachable && DECL_SAVED_TREE (decl))
1033 {
f79b6507 1034 if (cgraph_dump_file)
1035 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
b0cdf642 1036 cgraph_remove_node (node);
9b8fb23a 1037 continue;
ae01b312 1038 }
bc5cab3b 1039 else
1040 node->next_needed = NULL;
9b8fb23a 1041 gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
1042 gcc_assert (node->analyzed == node->local.finalized);
ae01b312 1043 }
f79b6507 1044 if (cgraph_dump_file)
e4200070 1045 {
1046 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1047 dump_cgraph (cgraph_dump_file);
1048 }
c1dcd13c 1049 first_analyzed = cgraph_nodes;
ae01b312 1050 ggc_collect ();
f79b6507 1051 timevar_pop (TV_CGRAPH);
ae01b312 1052}
ae01b312 1053/* Figure out what functions we want to assemble. */
1054
1055static void
d9d9733a 1056cgraph_mark_functions_to_output (void)
ae01b312 1057{
1058 struct cgraph_node *node;
1059
ae01b312 1060 for (node = cgraph_nodes; node; node = node->next)
1061 {
1062 tree decl = node->decl;
d7c6d889 1063 struct cgraph_edge *e;
a0c938f0 1064
cc636d56 1065 gcc_assert (!node->output);
d7c6d889 1066
1067 for (e = node->callers; e; e = e->next_caller)
611e5405 1068 if (e->inline_failed)
d7c6d889 1069 break;
ae01b312 1070
e6d2b2d8 1071 /* We need to output all local functions that are used and not
1072 always inlined, as well as those that are reachable from
1073 outside the current compilation unit. */
ae01b312 1074 if (DECL_SAVED_TREE (decl)
b0cdf642 1075 && !node->global.inlined_to
ae01b312 1076 && (node->needed
d7c6d889 1077 || (e && node->reachable))
4ee9c684 1078 && !TREE_ASM_WRITTEN (decl)
ae01b312 1079 && !DECL_EXTERNAL (decl))
1080 node->output = 1;
cc636d56 1081 else
9cee7c3f 1082 {
1083 /* We should've reclaimed all functions that are not needed. */
1084#ifdef ENABLE_CHECKING
1085 if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
1086 && !DECL_EXTERNAL (decl))
1087 {
1088 dump_cgraph_node (stderr, node);
1089 internal_error ("failed to reclaim unneeded function");
1090 }
1091#endif
1092 gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
1093 || DECL_EXTERNAL (decl));
1094
1095 }
a0c938f0 1096
961e3b13 1097 }
1098}
1099
ae01b312 1100/* Expand function specified by NODE. */
e6d2b2d8 1101
ae01b312 1102static void
d9d9733a 1103cgraph_expand_function (struct cgraph_node *node)
ae01b312 1104{
1105 tree decl = node->decl;
1106
b0cdf642 1107 /* We ought to not compile any inline clones. */
cc636d56 1108 gcc_assert (!node->global.inlined_to);
b0cdf642 1109
28df663b 1110 if (flag_unit_at_a_time)
1111 announce_function (decl);
961e3b13 1112
f8deefc1 1113 cgraph_lower_function (node);
1114
794da2bb 1115 /* Generate RTL for the body of DECL. */
dc24ddbd 1116 lang_hooks.callgraph.expand_function (decl);
961e3b13 1117
4ee9c684 1118 /* Make sure that BE didn't give up on compiling. */
1119 /* ??? Can happen with nested function of extern inline. */
cc636d56 1120 gcc_assert (TREE_ASM_WRITTEN (node->decl));
b0cdf642 1121
ae01b312 1122 current_function_decl = NULL;
8ec2a798 1123 if (!cgraph_preserve_function_body_p (node->decl))
4ee9c684 1124 {
1125 DECL_SAVED_TREE (node->decl) = NULL;
1126 DECL_STRUCT_FUNCTION (node->decl) = NULL;
4ee9c684 1127 DECL_INITIAL (node->decl) = error_mark_node;
7edd21a5 1128 /* Eliminate all call edges. This is important so the call_expr no longer
8ec2a798 1129 points to the dead function body. */
bb4c7a44 1130 cgraph_node_remove_callees (node);
4ee9c684 1131 }
e1be32b8 1132
1133 cgraph_function_flags_ready = true;
ae01b312 1134}
1135
b0cdf642 1136/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
d7c6d889 1137
1138bool
b0cdf642 1139cgraph_inline_p (struct cgraph_edge *e, const char **reason)
d7c6d889 1140{
b0cdf642 1141 *reason = e->inline_failed;
1142 return !e->inline_failed;
d7c6d889 1143}
b0cdf642 1144
acc70efa 1145
acc70efa 1146
d9d9733a 1147/* Expand all functions that must be output.
1148
d7c6d889 1149 Attempt to topologically sort the nodes so function is output when
1150 all called functions are already assembled to allow data to be
91c82c20 1151 propagated across the callgraph. Use a stack to get smaller distance
3927afe0 1152 between a function and its callees (later we may choose to use a more
d7c6d889 1153 sophisticated algorithm for function reordering; we will likely want
1154 to use subsections to make the output functions appear in top-down
1155 order). */
1156
1157static void
a6868229 1158cgraph_expand_all_functions (void)
d7c6d889 1159{
1160 struct cgraph_node *node;
4c36ffe6 1161 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
b0cdf642 1162 int order_pos = 0, new_order_pos = 0;
d7c6d889 1163 int i;
1164
d7c6d889 1165 order_pos = cgraph_postorder (order);
cc636d56 1166 gcc_assert (order_pos == cgraph_n_nodes);
d7c6d889 1167
7bd28bba 1168 /* Garbage collector may remove inline clones we eliminate during
b0cdf642 1169 optimization. So we must be sure to not reference them. */
1170 for (i = 0; i < order_pos; i++)
1171 if (order[i]->output)
1172 order[new_order_pos++] = order[i];
1173
1174 for (i = new_order_pos - 1; i >= 0; i--)
d7c6d889 1175 {
1176 node = order[i];
1177 if (node->output)
1178 {
cc636d56 1179 gcc_assert (node->reachable);
d7c6d889 1180 node->output = 0;
1181 cgraph_expand_function (node);
1182 }
1183 }
773c5ba7 1184
d7c6d889 1185 free (order);
773c5ba7 1186
1187 /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
1188 the expansion process. Note that this queue may grow as its
1189 being processed, as the new functions may generate new ones. */
1190 while (cgraph_expand_queue)
1191 {
1192 node = cgraph_expand_queue;
1193 cgraph_expand_queue = cgraph_expand_queue->next_needed;
1194 node->next_needed = NULL;
1195 node->output = 0;
1196 node->lowered = DECL_STRUCT_FUNCTION (node->decl)->cfg != NULL;
1197 cgraph_expand_function (node);
1198 }
d7c6d889 1199}
1200
56af936e 1201/* This is used to sort the node types by the cgraph order number. */
1202
1203struct cgraph_order_sort
1204{
1205 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1206 union
1207 {
1208 struct cgraph_node *f;
1209 struct cgraph_varpool_node *v;
1210 struct cgraph_asm_node *a;
1211 } u;
1212};
1213
1214/* Output all functions, variables, and asm statements in the order
1215 according to their order fields, which is the order in which they
1216 appeared in the file. This implements -fno-toplevel-reorder. In
1217 this mode we may output functions and variables which don't really
1218 need to be output. */
1219
1220static void
1221cgraph_output_in_order (void)
1222{
1223 int max;
1224 size_t size;
1225 struct cgraph_order_sort *nodes;
1226 int i;
1227 struct cgraph_node *pf;
1228 struct cgraph_varpool_node *pv;
1229 struct cgraph_asm_node *pa;
1230
1231 max = cgraph_order;
1232 size = max * sizeof (struct cgraph_order_sort);
1233 nodes = (struct cgraph_order_sort *) alloca (size);
1234 memset (nodes, 0, size);
1235
1236 cgraph_varpool_analyze_pending_decls ();
1237
1238 for (pf = cgraph_nodes; pf; pf = pf->next)
1239 {
1240 if (pf->output)
1241 {
1242 i = pf->order;
1243 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1244 nodes[i].kind = ORDER_FUNCTION;
1245 nodes[i].u.f = pf;
1246 }
1247 }
1248
1249 for (pv = cgraph_varpool_nodes_queue; pv; pv = pv->next_needed)
1250 {
1251 i = pv->order;
1252 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1253 nodes[i].kind = ORDER_VAR;
1254 nodes[i].u.v = pv;
1255 }
1256
1257 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1258 {
1259 i = pa->order;
1260 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1261 nodes[i].kind = ORDER_ASM;
1262 nodes[i].u.a = pa;
1263 }
56af936e 1264
1265 for (i = 0; i < max; ++i)
1266 {
1267 switch (nodes[i].kind)
1268 {
1269 case ORDER_FUNCTION:
1270 nodes[i].u.f->output = 0;
1271 cgraph_expand_function (nodes[i].u.f);
1272 break;
1273
1274 case ORDER_VAR:
1275 cgraph_varpool_assemble_decl (nodes[i].u.v);
1276 break;
1277
1278 case ORDER_ASM:
1279 assemble_asm (nodes[i].u.a->asm_str);
1280 break;
1281
1282 case ORDER_UNDEFINED:
1283 break;
1284
1285 default:
1286 gcc_unreachable ();
1287 }
1288 }
4b4ea2db 1289
1290 cgraph_asm_nodes = NULL;
56af936e 1291}
1292
3f82b628 1293/* Mark visibility of all functions.
a0c938f0 1294
acc70efa 1295 A local function is one whose calls can occur only in the current
1296 compilation unit and all its calls are explicit, so we can change
1297 its calling convention. We simply mark all static functions whose
3f82b628 1298 address is not taken as local.
1299
1300 We also change the TREE_PUBLIC flag of all declarations that are public
1301 in language point of view but we want to overwrite this default
1302 via visibilities for the backend point of view. */
d7c6d889 1303
1304static void
3f82b628 1305cgraph_function_and_variable_visibility (void)
d7c6d889 1306{
1307 struct cgraph_node *node;
3f82b628 1308 struct cgraph_varpool_node *vnode;
d7c6d889 1309
d7c6d889 1310 for (node = cgraph_nodes; node; node = node->next)
1311 {
3f82b628 1312 if (node->reachable
1313 && (DECL_COMDAT (node->decl)
c8b8ac00 1314 || (!flag_whole_program
1315 && TREE_PUBLIC (node->decl) && !DECL_EXTERNAL (node->decl))))
62eec3b4 1316 node->local.externally_visible = true;
1317 if (!node->local.externally_visible && node->analyzed
1318 && !DECL_EXTERNAL (node->decl))
1319 {
1320 gcc_assert (flag_whole_program || !TREE_PUBLIC (node->decl));
1321 TREE_PUBLIC (node->decl) = 0;
1322 }
d7c6d889 1323 node->local.local = (!node->needed
3f82b628 1324 && node->analyzed
e6ad3175 1325 && !DECL_EXTERNAL (node->decl)
8b4f617c 1326 && !node->local.externally_visible);
3f82b628 1327 }
1328 for (vnode = cgraph_varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1329 {
1330 if (vnode->needed
c8b8ac00 1331 && !flag_whole_program
3f82b628 1332 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl)))
1333 vnode->externally_visible = 1;
62eec3b4 1334 if (!vnode->externally_visible)
1335 {
1336 gcc_assert (flag_whole_program || !TREE_PUBLIC (vnode->decl));
1337 TREE_PUBLIC (vnode->decl) = 0;
1338 }
3f82b628 1339 gcc_assert (TREE_STATIC (vnode->decl));
d7c6d889 1340 }
acc70efa 1341
3f82b628 1342 /* Because we have to be conservative on the boundaries of source
1343 level units, it is possible that we marked some functions in
1344 reachable just because they might be used later via external
1345 linkage, but after making them local they are really unreachable
1346 now. */
1347 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
1348
d7c6d889 1349 if (cgraph_dump_file)
acc70efa 1350 {
1351 fprintf (cgraph_dump_file, "\nMarking local functions:");
1352 for (node = cgraph_nodes; node; node = node->next)
1353 if (node->local.local)
1354 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1355 fprintf (cgraph_dump_file, "\n\n");
3f82b628 1356 fprintf (cgraph_dump_file, "\nMarking externally visible functions:");
1357 for (node = cgraph_nodes; node; node = node->next)
1358 if (node->local.externally_visible)
1359 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1360 fprintf (cgraph_dump_file, "\n\n");
58f1b650 1361 }
3f82b628 1362 cgraph_function_flags_ready = true;
acc70efa 1363}
80a85d8a 1364
b0cdf642 1365/* Return true when function body of DECL still needs to be kept around
1366 for later re-use. */
1367bool
1368cgraph_preserve_function_body_p (tree decl)
1369{
1370 struct cgraph_node *node;
b0cdf642 1371 if (!cgraph_global_info_ready)
1372 return (DECL_INLINE (decl) && !flag_really_no_inline);
1373 /* Look if there is any clone around. */
1374 for (node = cgraph_node (decl); node; node = node->next_clone)
1375 if (node->global.inlined_to)
1376 return true;
1377 return false;
1378}
1379
77fce4cd 1380static void
1381ipa_passes (void)
1382{
1383 cfun = NULL;
1384 tree_register_cfg_hooks ();
1385 bitmap_obstack_initialize (NULL);
1386 execute_ipa_pass_list (all_ipa_passes);
1387 bitmap_obstack_release (NULL);
1388}
1389
ae01b312 1390/* Perform simple optimizations based on callgraph. */
1391
1392void
d9d9733a 1393cgraph_optimize (void)
ae01b312 1394{
b0cdf642 1395#ifdef ENABLE_CHECKING
1396 verify_cgraph ();
1397#endif
2ff66ee0 1398 if (!flag_unit_at_a_time)
c1dcd13c 1399 {
56af936e 1400 cgraph_output_pending_asms ();
c1dcd13c 1401 cgraph_varpool_assemble_pending_decls ();
1402 return;
1403 }
e9f08e82 1404
1405 process_pending_assemble_externals ();
a0c938f0 1406
c1dcd13c 1407 /* Frontend may output common variables after the unit has been finalized.
1408 It is safe to deal with them here as they are always zero initialized. */
1409 cgraph_varpool_analyze_pending_decls ();
e9f08e82 1410
f79b6507 1411 timevar_push (TV_CGRAPHOPT);
d7c6d889 1412 if (!quiet_flag)
1413 fprintf (stderr, "Performing intraprocedural optimizations\n");
e4200070 1414
3f82b628 1415 cgraph_function_and_variable_visibility ();
f79b6507 1416 if (cgraph_dump_file)
1417 {
e4200070 1418 fprintf (cgraph_dump_file, "Marked ");
f79b6507 1419 dump_cgraph (cgraph_dump_file);
1420 }
be4d0974 1421
1422 /* Don't run the IPA passes if there was any error or sorry messages. */
1423 if (errorcount == 0 && sorrycount == 0)
1424 ipa_passes ();
1425
e1be32b8 1426 /* This pass remove bodies of extern inline functions we never inlined.
1427 Do this later so other IPA passes see what is really going on. */
1428 cgraph_remove_unreachable_nodes (false, dump_file);
80a85d8a 1429 cgraph_global_info_ready = true;
f79b6507 1430 if (cgraph_dump_file)
1431 {
e4200070 1432 fprintf (cgraph_dump_file, "Optimized ");
f79b6507 1433 dump_cgraph (cgraph_dump_file);
c1dcd13c 1434 dump_varpool (cgraph_dump_file);
f79b6507 1435 }
1436 timevar_pop (TV_CGRAPHOPT);
ae01b312 1437
d7c6d889 1438 /* Output everything. */
e4200070 1439 if (!quiet_flag)
1440 fprintf (stderr, "Assembling functions:\n");
b0cdf642 1441#ifdef ENABLE_CHECKING
1442 verify_cgraph ();
1443#endif
56af936e 1444
acc70efa 1445 cgraph_mark_functions_to_output ();
c1dcd13c 1446
56af936e 1447 if (!flag_toplevel_reorder)
1448 cgraph_output_in_order ();
1449 else
1450 {
1451 cgraph_output_pending_asms ();
1452
1453 cgraph_expand_all_functions ();
1454 cgraph_varpool_remove_unreferenced_decls ();
1455
1456 cgraph_varpool_assemble_pending_decls ();
1457 }
c1dcd13c 1458
f79b6507 1459 if (cgraph_dump_file)
1460 {
e4200070 1461 fprintf (cgraph_dump_file, "\nFinal ");
f79b6507 1462 dump_cgraph (cgraph_dump_file);
1463 }
b0cdf642 1464#ifdef ENABLE_CHECKING
1465 verify_cgraph ();
4ee9c684 1466 /* Double check that all inline clones are gone and that all
1467 function bodies have been released from memory. */
1468 if (flag_unit_at_a_time
0f9005dd 1469 && !dump_enabled_p (TDI_tree_all)
4ee9c684 1470 && !(sorrycount || errorcount))
1471 {
1472 struct cgraph_node *node;
1473 bool error_found = false;
1474
1475 for (node = cgraph_nodes; node; node = node->next)
1476 if (node->analyzed
1477 && (node->global.inlined_to
a0c938f0 1478 || DECL_SAVED_TREE (node->decl)))
4ee9c684 1479 {
1480 error_found = true;
1481 dump_cgraph_node (stderr, node);
a0c938f0 1482 }
4ee9c684 1483 if (error_found)
0a81f5a0 1484 internal_error ("nodes with no released memory found");
4ee9c684 1485 }
b0cdf642 1486#endif
ae01b312 1487}
b5530559 1488
1489/* Generate and emit a static constructor or destructor. WHICH must be
a0c938f0 1490 one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
b5530559 1491 GENERIC statements. */
1492
1493void
c5344b58 1494cgraph_build_static_cdtor (char which, tree body, int priority)
b5530559 1495{
1496 static int counter = 0;
1497 char which_buf[16];
540edea7 1498 tree decl, name, resdecl;
b5530559 1499
1500 sprintf (which_buf, "%c_%d", which, counter++);
1501 name = get_file_function_name_long (which_buf);
1502
1503 decl = build_decl (FUNCTION_DECL, name,
1504 build_function_type (void_type_node, void_list_node));
1505 current_function_decl = decl;
1506
540edea7 1507 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1508 DECL_ARTIFICIAL (resdecl) = 1;
1509 DECL_IGNORED_P (resdecl) = 1;
1510 DECL_RESULT (decl) = resdecl;
1511
b5530559 1512 allocate_struct_function (decl);
1513
1514 TREE_STATIC (decl) = 1;
1515 TREE_USED (decl) = 1;
1516 DECL_ARTIFICIAL (decl) = 1;
1517 DECL_IGNORED_P (decl) = 1;
1518 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1519 DECL_SAVED_TREE (decl) = body;
1520 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1521 DECL_UNINLINABLE (decl) = 1;
1522
1523 DECL_INITIAL (decl) = make_node (BLOCK);
1524 TREE_USED (DECL_INITIAL (decl)) = 1;
1525
1526 DECL_SOURCE_LOCATION (decl) = input_location;
1527 cfun->function_end_locus = input_location;
1528
cc636d56 1529 switch (which)
1530 {
1531 case 'I':
1532 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1533 break;
1534 case 'D':
1535 DECL_STATIC_DESTRUCTOR (decl) = 1;
1536 break;
1537 default:
1538 gcc_unreachable ();
1539 }
b5530559 1540
1541 gimplify_function_tree (decl);
1542
1543 /* ??? We will get called LATE in the compilation process. */
1544 if (cgraph_global_info_ready)
e27482aa 1545 {
1546 tree_lowering_passes (decl);
1547 tree_rest_of_compilation (decl);
1548 }
b5530559 1549 else
1550 cgraph_finalize_function (decl, 0);
a0c938f0 1551
b5530559 1552 if (targetm.have_ctors_dtors)
1553 {
1554 void (*fn) (rtx, int);
1555
1556 if (which == 'I')
1557 fn = targetm.asm_out.constructor;
1558 else
1559 fn = targetm.asm_out.destructor;
c5344b58 1560 fn (XEXP (DECL_RTL (decl), 0), priority);
b5530559 1561 }
1562}
121f3051 1563
1564void
1565init_cgraph (void)
1566{
1567 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1568}
b5d36404 1569
a0c938f0 1570/* The edges representing the callers of the NEW_VERSION node were
b5d36404 1571 fixed by cgraph_function_versioning (), now the call_expr in their
1572 respective tree code should be updated to call the NEW_VERSION. */
1573
1574static void
1575update_call_expr (struct cgraph_node *new_version)
1576{
1577 struct cgraph_edge *e;
1578
1579 gcc_assert (new_version);
1580 for (e = new_version->callers; e; e = e->next_caller)
1581 /* Update the call expr on the edges
1582 to call the new version. */
1583 TREE_OPERAND (TREE_OPERAND (get_call_expr_in (e->call_stmt), 0), 0) = new_version->decl;
1584}
1585
1586
1587/* Create a new cgraph node which is the new version of
1588 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1589 edges which should be redirected to point to
1590 NEW_VERSION. ALL the callees edges of OLD_VERSION
1591 are cloned to the new version node. Return the new
1592 version node. */
1593
1594static struct cgraph_node *
1595cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
4460a647 1596 tree new_decl,
1597 VEC(cgraph_edge_p,heap) *redirect_callers)
b5d36404 1598 {
1599 struct cgraph_node *new_version;
1600 struct cgraph_edge *e, *new_e;
1601 struct cgraph_edge *next_callee;
1602 unsigned i;
1603
1604 gcc_assert (old_version);
a0c938f0 1605
b5d36404 1606 new_version = cgraph_node (new_decl);
1607
1608 new_version->analyzed = true;
1609 new_version->local = old_version->local;
1610 new_version->global = old_version->global;
1611 new_version->rtl = new_version->rtl;
1612 new_version->reachable = true;
1613 new_version->count = old_version->count;
1614
1615 /* Clone the old node callees. Recursive calls are
1616 also cloned. */
1617 for (e = old_version->callees;e; e=e->next_callee)
1618 {
1619 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->loop_nest, true);
1620 new_e->count = e->count;
1621 }
1622 /* Fix recursive calls.
1623 If OLD_VERSION has a recursive call after the
1624 previous edge cloning, the new version will have an edge
1625 pointing to the old version, which is wrong;
1626 Redirect it to point to the new version. */
1627 for (e = new_version->callees ; e; e = next_callee)
1628 {
1629 next_callee = e->next_callee;
1630 if (e->callee == old_version)
1631 cgraph_redirect_edge_callee (e, new_version);
a0c938f0 1632
b5d36404 1633 if (!next_callee)
1634 break;
1635 }
4460a647 1636 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1637 {
1638 /* Redirect calls to the old version node to point to its new
1639 version. */
1640 cgraph_redirect_edge_callee (e, new_version);
1641 }
b5d36404 1642
1643 return new_version;
1644 }
1645
1646 /* Perform function versioning.
a0c938f0 1647 Function versioning includes copying of the tree and
b5d36404 1648 a callgraph update (creating a new cgraph node and updating
1649 its callees and callers).
1650
1651 REDIRECT_CALLERS varray includes the edges to be redirected
1652 to the new version.
1653
1654 TREE_MAP is a mapping of tree nodes we want to replace with
1655 new ones (according to results of prior analysis).
1656 OLD_VERSION_NODE is the node that is versioned.
1657 It returns the new version's cgraph node. */
1658
1659struct cgraph_node *
1660cgraph_function_versioning (struct cgraph_node *old_version_node,
4460a647 1661 VEC(cgraph_edge_p,heap) *redirect_callers,
b5d36404 1662 varray_type tree_map)
1663{
1664 tree old_decl = old_version_node->decl;
1665 struct cgraph_node *new_version_node = NULL;
1666 tree new_decl;
1667
1668 if (!tree_versionable_function_p (old_decl))
1669 return NULL;
1670
1671 /* Make a new FUNCTION_DECL tree node for the
1672 new version. */
1673 new_decl = copy_node (old_decl);
1674
1675 /* Create the new version's call-graph node.
1676 and update the edges of the new node. */
1677 new_version_node =
1678 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1679 redirect_callers);
1680
1681 /* Copy the OLD_VERSION_NODE function tree to the new version. */
469679ab 1682 tree_function_versioning (old_decl, new_decl, tree_map, false);
b5d36404 1683 /* Update the call_expr on the edges to call the new version node. */
1684 update_call_expr (new_version_node);
1685
a0c938f0 1686 /* Update the new version's properties.
b5d36404 1687 Make The new version visible only within this translation unit.
a0c938f0 1688 ??? We cannot use COMDAT linkage because there is no
b5d36404 1689 ABI support for this. */
1690 DECL_EXTERNAL (new_version_node->decl) = 0;
1691 DECL_ONE_ONLY (new_version_node->decl) = 0;
1692 TREE_PUBLIC (new_version_node->decl) = 0;
1693 DECL_COMDAT (new_version_node->decl) = 0;
1694 new_version_node->local.externally_visible = 0;
1695 new_version_node->local.local = 1;
1696 new_version_node->lowered = true;
1697 return new_version_node;
1698}
469679ab 1699
1700/* Produce separate function body for inline clones so the offline copy can be
1701 modified without affecting them. */
1702struct cgraph_node *
1703save_inline_function_body (struct cgraph_node *node)
1704{
1705 struct cgraph_node *first_clone;
1706
1707 gcc_assert (node == cgraph_node (node->decl));
1708
1709 cgraph_lower_function (node);
1710
1711 /* In non-unit-at-a-time we construct full fledged clone we never output to
334ec2d8 1712 assembly file. This clone is pointed out by inline_decl of original function
469679ab 1713 and inlining infrastructure knows how to deal with this. */
1714 if (!flag_unit_at_a_time)
1715 {
1716 struct cgraph_edge *e;
1717
1718 first_clone = cgraph_clone_node (node, node->count, 0, false);
1719 first_clone->needed = 0;
1720 first_clone->reachable = 1;
1721 /* Recursively clone all bodies. */
1722 for (e = first_clone->callees; e; e = e->next_callee)
1723 if (!e->inline_failed)
1724 cgraph_clone_inlined_nodes (e, true, false);
1725 }
1726 else
1727 first_clone = node->next_clone;
1728
1729 first_clone->decl = copy_node (node->decl);
1730 node->next_clone = NULL;
1731 if (!flag_unit_at_a_time)
1732 node->inline_decl = first_clone->decl;
1733 first_clone->prev_clone = NULL;
1734 cgraph_insert_node_to_hashtable (first_clone);
1735 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1736
1737 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1738 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1739
1740 DECL_EXTERNAL (first_clone->decl) = 0;
1741 DECL_ONE_ONLY (first_clone->decl) = 0;
1742 TREE_PUBLIC (first_clone->decl) = 0;
1743 DECL_COMDAT (first_clone->decl) = 0;
1744
1745 for (node = first_clone->next_clone; node; node = node->next_clone)
1746 node->decl = first_clone->decl;
1747#ifdef ENABLE_CHECKING
1748 verify_cgraph_node (first_clone);
1749#endif
1750 return first_clone;
1751}
1752