]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
PR c/25795
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
d7c6d889 1/* Callgraph based intraprocedural optimizations.
dfbf3d71 2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
ae01b312 3 Contributed by Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
67ce556b 19Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2002110-1301, USA. */
ae01b312 21
b0cdf642 22/* This module implements main driver of compilation process as well as
23 few basic intraprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline function.)
36
37 - cgraph_varpool_finalize_variable
38
7bd28bba 39 This function has same behavior as the above but is used for static
b0cdf642 40 variables.
41
42 - cgraph_finalize_compilation_unit
43
44 This function is called once compilation unit is finalized and it will
45 no longer change.
46
47 In the unit-at-a-time the call-graph construction and local function
48 analysis takes place here. Bodies of unreachable functions are released
49 to conserve memory usage.
50
51 ??? The compilation unit in this point of view should be compilation
52 unit as defined by the language - for instance C frontend allows multiple
53 compilation units to be parsed at once and it should call function each
54 time parsing is done so we save memory.
55
56 - cgraph_optimize
57
58 In this unit-at-a-time compilation the intra procedural analysis takes
59 place here. In particular the static functions whose address is never
60 taken are marked as local. Backend can then use this information to
61 modify calling conventions, do better inlining or similar optimizations.
62
63 - cgraph_assemble_pending_functions
64 - cgraph_varpool_assemble_pending_variables
65
66 In non-unit-at-a-time mode these functions can be used to force compilation
67 of functions or variables that are known to be needed at given stage
68 of compilation
69
70 - cgraph_mark_needed_node
71 - cgraph_varpool_mark_needed_node
72
73 When function or variable is referenced by some hidden way (for instance
74 via assembly code and marked by attribute "used"), the call-graph data structure
75 must be updated accordingly by this function.
76
77 - analyze_expr callback
78
79 This function is responsible for lowering tree nodes not understood by
80 generic code into understandable ones or alternatively marking
81 callgraph and varpool nodes referenced by the as needed.
82
83 ??? On the tree-ssa genericizing should take place here and we will avoid
84 need for these hooks (replacing them by genericizing hook)
85
86 - expand_function callback
87
88 This function is used to expand function and pass it into RTL back-end.
89 Front-end should not make any assumptions about when this function can be
90 called. In particular cgraph_assemble_pending_functions,
91 cgraph_varpool_assemble_pending_variables, cgraph_finalize_function,
92 cgraph_varpool_finalize_function, cgraph_optimize can cause arbitrarily
93 previously finalized functions to be expanded.
94
95 We implement two compilation modes.
96
97 - unit-at-a-time: In this mode analyzing of all functions is deferred
98 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
99
100 In cgraph_finalize_compilation_unit the reachable functions are
101 analyzed. During analysis the call-graph edges from reachable
102 functions are constructed and their destinations are marked as
103 reachable. References to functions and variables are discovered too
104 and variables found to be needed output to the assembly file. Via
105 mark_referenced call in assemble_variable functions referenced by
106 static variables are noticed too.
107
ca67a72b 108 The intra-procedural information is produced and its existence
b0cdf642 109 indicated by global_info_ready. Once this flag is set it is impossible
110 to change function from !reachable to reachable and thus
111 assemble_variable no longer call mark_referenced.
112
113 Finally the call-graph is topologically sorted and all reachable functions
114 that has not been completely inlined or are not external are output.
115
116 ??? It is possible that reference to function or variable is optimized
117 out. We can not deal with this nicely because topological order is not
118 suitable for it. For tree-ssa we may consider another pass doing
119 optimization and re-discovering reachable functions.
120
121 ??? Reorganize code so variables are output very last and only if they
122 really has been referenced by produced code, so we catch more cases
123 where reference has been optimized out.
124
125 - non-unit-at-a-time
126
127 All functions are variables are output as early as possible to conserve
128 memory consumption. This may or may not result in less memory used but
129 it is still needed for some legacy code that rely on particular ordering
130 of things output from the compiler.
131
132 Varpool data structures are not used and variables are output directly.
133
134 Functions are output early using call of
135 cgraph_assemble_pending_function from cgraph_finalize_function. The
136 decision on whether function is needed is made more conservative so
137 uninlininable static functions are needed too. During the call-graph
138 construction the edge destinations are not marked as reachable and it
edc6a4c0 139 is completely relied upn assemble_variable to mark them. */
121f3051 140
acc70efa 141
ae01b312 142#include "config.h"
143#include "system.h"
144#include "coretypes.h"
145#include "tm.h"
146#include "tree.h"
b5530559 147#include "rtl.h"
acc70efa 148#include "tree-flow.h"
ae01b312 149#include "tree-inline.h"
150#include "langhooks.h"
c6224531 151#include "pointer-set.h"
ae01b312 152#include "toplev.h"
153#include "flags.h"
154#include "ggc.h"
155#include "debug.h"
156#include "target.h"
157#include "cgraph.h"
80a85d8a 158#include "diagnostic.h"
f79b6507 159#include "timevar.h"
d7c6d889 160#include "params.h"
161#include "fibheap.h"
162#include "c-common.h"
611e5405 163#include "intl.h"
b69eb0ff 164#include "function.h"
b5d36404 165#include "ipa-prop.h"
acc70efa 166#include "tree-gimple.h"
f1e2a033 167#include "tree-pass.h"
c1dcd13c 168#include "output.h"
d7c6d889 169
a6868229 170static void cgraph_expand_all_functions (void);
d9d9733a 171static void cgraph_mark_functions_to_output (void);
172static void cgraph_expand_function (struct cgraph_node *);
9bfec7c2 173static tree record_reference (tree *, int *, void *);
f788fff2 174static void cgraph_output_pending_asms (void);
d7c6d889 175
9bfec7c2 176/* Records tree nodes seen in record_reference. Simply using
25bb88de 177 walk_tree_without_duplicates doesn't guarantee each node is visited
178 once because it gets a new htab upon each recursive call from
9bfec7c2 179 record_reference itself. */
c6224531 180static struct pointer_set_t *visited_nodes;
25bb88de 181
121f3051 182static FILE *cgraph_dump_file;
183
2c0b522d 184/* Determine if function DECL is needed. That is, visible to something
185 either outside this translation unit, something magic in the system
186 configury, or (if not doing unit-at-a-time) to something we havn't
187 seen yet. */
188
189static bool
190decide_is_function_needed (struct cgraph_node *node, tree decl)
191{
9d95b2b0 192 tree origin;
62eec3b4 193 if (MAIN_NAME_P (DECL_NAME (decl))
194 && TREE_PUBLIC (decl))
195 {
196 node->local.externally_visible = true;
197 return true;
198 }
4ee9c684 199
3f82b628 200 /* If the user told us it is used, then it must be so. */
05806473 201 if (node->local.externally_visible)
202 return true;
203
204 if (!flag_unit_at_a_time && lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
3f82b628 205 return true;
206
207 /* ??? If the assembler name is set by hand, it is possible to assemble
208 the name later after finalizing the function and the fact is noticed
209 in assemble_name then. This is arguably a bug. */
210 if (DECL_ASSEMBLER_NAME_SET_P (decl)
211 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
212 return true;
213
2c0b522d 214 /* If we decided it was needed before, but at the time we didn't have
215 the body of the function available, then it's still needed. We have
216 to go back and re-check its dependencies now. */
217 if (node->needed)
218 return true;
219
220 /* Externally visible functions must be output. The exception is
a0c938f0 221 COMDAT functions that must be output only when they are needed.
8baa9d15 222
223 When not optimizing, also output the static functions. (see
95da6220 224 PR24561), but don't do so for always_inline functions, functions
d3d410e1 225 declared inline and nested functions. These was optimized out
226 in the original implementation and it is unclear whether we want
554f2707 227 to change the behavior here. */
bba7ddf8 228 if (((TREE_PUBLIC (decl)
d3d410e1 229 || (!optimize && !node->local.disregard_inline_limits
230 && !DECL_DECLARED_INLINE_P (decl)
231 && !node->origin))
bba7ddf8 232 && !flag_whole_program)
62eec3b4 233 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
2c0b522d 234 return true;
235
236 /* Constructors and destructors are reachable from the runtime by
237 some mechanism. */
238 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
239 return true;
240
2c0b522d 241 if (flag_unit_at_a_time)
242 return false;
243
244 /* If not doing unit at a time, then we'll only defer this function
245 if its marked for inlining. Otherwise we want to emit it now. */
246
247 /* "extern inline" functions are never output locally. */
248 if (DECL_EXTERNAL (decl))
249 return false;
4ee9c684 250 /* Nested functions of extern inline function shall not be emit unless
251 we inlined the origin. */
9d95b2b0 252 for (origin = decl_function_context (decl); origin;
253 origin = decl_function_context (origin))
254 if (DECL_EXTERNAL (origin))
4ee9c684 255 return false;
f024691d 256 /* We want to emit COMDAT functions only when absolutely necessary. */
c08871a9 257 if (DECL_COMDAT (decl))
2c0b522d 258 return false;
259 if (!DECL_INLINE (decl)
260 || (!node->local.disregard_inline_limits
261 /* When declared inline, defer even the uninlinable functions.
e4200070 262 This allows them to be eliminated when unused. */
a0c938f0 263 && !DECL_DECLARED_INLINE_P (decl)
b30512dd 264 && (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
2c0b522d 265 return true;
266
267 return false;
268}
269
06b27565 270/* Walk the decls we marked as necessary and see if they reference new
271 variables or functions and add them into the worklists. */
c1dcd13c 272static bool
273cgraph_varpool_analyze_pending_decls (void)
274{
275 bool changed = false;
276 timevar_push (TV_CGRAPH);
277
278 while (cgraph_varpool_first_unanalyzed_node)
279 {
280 tree decl = cgraph_varpool_first_unanalyzed_node->decl;
281
282 cgraph_varpool_first_unanalyzed_node->analyzed = true;
283
284 cgraph_varpool_first_unanalyzed_node = cgraph_varpool_first_unanalyzed_node->next_needed;
285
e3a1de9d 286 /* Compute the alignment early so function body expanders are
287 already informed about increased alignment. */
288 align_variable (decl, 0);
289
c1dcd13c 290 if (DECL_INITIAL (decl))
9bfec7c2 291 {
292 visited_nodes = pointer_set_create ();
a0c938f0 293 walk_tree (&DECL_INITIAL (decl), record_reference, NULL, visited_nodes);
9bfec7c2 294 pointer_set_destroy (visited_nodes);
295 visited_nodes = NULL;
296 }
c1dcd13c 297 changed = true;
298 }
299 timevar_pop (TV_CGRAPH);
300 return changed;
301}
302
303/* Optimization of function bodies might've rendered some variables as
06b27565 304 unnecessary so we want to avoid these from being compiled.
c1dcd13c 305
442e3cb9 306 This is done by pruning the queue and keeping only the variables that
06b27565 307 really appear needed (ie they are either externally visible or referenced
c1dcd13c 308 by compiled function). Re-doing the reachability analysis on variables
309 brings back the remaining variables referenced by these. */
310static void
311cgraph_varpool_remove_unreferenced_decls (void)
312{
313 struct cgraph_varpool_node *next, *node = cgraph_varpool_nodes_queue;
314
315 cgraph_varpool_reset_queue ();
316
317 if (errorcount || sorrycount)
318 return;
319
320 while (node)
321 {
322 tree decl = node->decl;
323 next = node->next_needed;
324 node->needed = 0;
325
326 if (node->finalized
327 && ((DECL_ASSEMBLER_NAME_SET_P (decl)
328 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
329 || node->force_output
c131e678 330 || decide_is_variable_needed (node, decl)
a0c938f0 331 /* ??? Cgraph does not yet rule the world with an iron hand,
c131e678 332 and does not control the emission of debug information.
333 After a variable has its DECL_RTL set, we must assume that
334 it may be referenced by the debug information, and we can
335 no longer elide it. */
336 || DECL_RTL_SET_P (decl)))
c1dcd13c 337 cgraph_varpool_mark_needed_node (node);
338
339 node = next;
340 }
279cd732 341 /* Make sure we mark alias targets as used targets. */
342 finish_aliases_1 ();
c1dcd13c 343 cgraph_varpool_analyze_pending_decls ();
344}
acc70efa 345
acc70efa 346
c08871a9 347/* When not doing unit-at-a-time, output all functions enqueued.
348 Return true when such a functions were found. */
050e11c9 349
350bool
c08871a9 351cgraph_assemble_pending_functions (void)
352{
353 bool output = false;
354
355 if (flag_unit_at_a_time)
356 return false;
357
f788fff2 358 cgraph_output_pending_asms ();
359
c08871a9 360 while (cgraph_nodes_queue)
361 {
362 struct cgraph_node *n = cgraph_nodes_queue;
363
364 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 365 n->next_needed = NULL;
1d09f0e6 366 if (!n->global.inlined_to
367 && !n->alias
368 && !DECL_EXTERNAL (n->decl))
050e11c9 369 {
370 cgraph_expand_function (n);
371 output = true;
372 }
c08871a9 373 }
050e11c9 374
773c5ba7 375 /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
376 the expansion process. Note that this queue may grow as its
377 being processed, as the new functions may generate new ones. */
378 while (cgraph_expand_queue)
379 {
380 struct cgraph_node *n = cgraph_expand_queue;
381 cgraph_expand_queue = cgraph_expand_queue->next_needed;
382 n->next_needed = NULL;
383 cgraph_finalize_function (n->decl, false);
384 output = true;
385 }
386
c08871a9 387 return output;
388}
773c5ba7 389
390
9b8fb23a 391/* As an GCC extension we allow redefinition of the function. The
392 semantics when both copies of bodies differ is not well defined.
393 We replace the old body with new body so in unit at a time mode
394 we always use new body, while in normal mode we may end up with
395 old body inlined into some functions and new body expanded and
396 inlined in others.
397
398 ??? It may make more sense to use one body for inlining and other
399 body for expanding the function but this is difficult to do. */
400
401static void
402cgraph_reset_node (struct cgraph_node *node)
403{
404 /* If node->output is set, then this is a unit-at-a-time compilation
405 and we have already begun whole-unit analysis. This is *not*
406 testing for whether we've already emitted the function. That
a0c938f0 407 case can be sort-of legitimately seen with real function
9b8fb23a 408 redefinition errors. I would argue that the front end should
409 never present us with such a case, but don't enforce that for now. */
410 gcc_assert (!node->output);
411
412 /* Reset our data structures so we can analyze the function again. */
413 memset (&node->local, 0, sizeof (node->local));
414 memset (&node->global, 0, sizeof (node->global));
415 memset (&node->rtl, 0, sizeof (node->rtl));
416 node->analyzed = false;
417 node->local.redefined_extern_inline = true;
418 node->local.finalized = false;
419
420 if (!flag_unit_at_a_time)
421 {
422 struct cgraph_node *n;
423
424 for (n = cgraph_nodes; n; n = n->next)
425 if (n->global.inlined_to == node)
426 cgraph_remove_node (n);
427 }
428
429 cgraph_node_remove_callees (node);
430
431 /* We may need to re-queue the node for assembling in case
432 we already proceeded it and ignored as not needed. */
433 if (node->reachable && !flag_unit_at_a_time)
434 {
435 struct cgraph_node *n;
436
437 for (n = cgraph_nodes_queue; n; n = n->next_needed)
438 if (n == node)
439 break;
440 if (!n)
441 node->reachable = 0;
442 }
443}
c08871a9 444
1e8e9920 445static void
446cgraph_lower_function (struct cgraph_node *node)
447{
448 if (node->lowered)
449 return;
450 tree_lowering_passes (node->decl);
451 node->lowered = true;
452}
453
28df663b 454/* DECL has been parsed. Take it, queue it, compile it at the whim of the
455 logic in effect. If NESTED is true, then our caller cannot stand to have
456 the garbage collector run at the moment. We would need to either create
457 a new GC context, or just not compile right now. */
ae01b312 458
459void
28df663b 460cgraph_finalize_function (tree decl, bool nested)
ae01b312 461{
462 struct cgraph_node *node = cgraph_node (decl);
463
c08871a9 464 if (node->local.finalized)
9b8fb23a 465 cgraph_reset_node (node);
28df663b 466
c08871a9 467 notice_global_symbol (decl);
ae01b312 468 node->decl = decl;
79bb87b4 469 node->local.finalized = true;
e27482aa 470 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
9d95b2b0 471 if (node->nested)
472 lower_nested_functions (decl);
473 gcc_assert (!node->nested);
ae01b312 474
2c0b522d 475 /* If not unit at a time, then we need to create the call graph
476 now, so that called functions can be queued and emitted now. */
2ff66ee0 477 if (!flag_unit_at_a_time)
19489abd 478 {
479 cgraph_analyze_function (node);
9e0baf4d 480 cgraph_decide_inlining_incrementally (node, false);
19489abd 481 }
2ff66ee0 482
2c0b522d 483 if (decide_is_function_needed (node, decl))
484 cgraph_mark_needed_node (node);
485
ecda6e51 486 /* Since we reclaim unreachable nodes at the end of every language
3f82b628 487 level unit, we need to be conservative about possible entry points
488 there. */
62eec3b4 489 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
3f82b628 490 cgraph_mark_reachable_node (node);
491
28df663b 492 /* If not unit at a time, go ahead and emit everything we've found
493 to be reachable at this time. */
494 if (!nested)
dc721f36 495 {
496 if (!cgraph_assemble_pending_functions ())
497 ggc_collect ();
498 }
3d7bfc56 499
2c0b522d 500 /* If we've not yet emitted decl, tell the debug info about it. */
28df663b 501 if (!TREE_ASM_WRITTEN (decl))
2c0b522d 502 (*debug_hooks->deferred_inline_function) (decl);
4e8871a0 503
b69eb0ff 504 /* Possibly warn about unused parameters. */
505 if (warn_unused_parameter)
506 do_warn_unused_parameter (decl);
ae01b312 507}
508
ae01b312 509/* Walk tree and record all calls. Called via walk_tree. */
510static tree
9bfec7c2 511record_reference (tree *tp, int *walk_subtrees, void *data)
ae01b312 512{
ec1e35b2 513 tree t = *tp;
514
515 switch (TREE_CODE (t))
ae01b312 516 {
ec1e35b2 517 case VAR_DECL:
518 /* ??? Really, we should mark this decl as *potentially* referenced
519 by this function and re-examine whether the decl is actually used
520 after rtl has been generated. */
c1dcd13c 521 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
caa6fdce 522 {
523 cgraph_varpool_mark_needed_node (cgraph_varpool_node (t));
524 if (lang_hooks.callgraph.analyze_expr)
a0c938f0 525 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees,
caa6fdce 526 data);
527 }
ec1e35b2 528 break;
529
4e4ac74b 530 case FDESC_EXPR:
ec1e35b2 531 case ADDR_EXPR:
532 if (flag_unit_at_a_time)
533 {
534 /* Record dereferences to the functions. This makes the
535 functions reachable unconditionally. */
536 tree decl = TREE_OPERAND (*tp, 0);
537 if (TREE_CODE (decl) == FUNCTION_DECL)
538 cgraph_mark_needed_node (cgraph_node (decl));
539 }
540 break;
541
ec1e35b2 542 default:
543 /* Save some cycles by not walking types and declaration as we
544 won't find anything useful there anyway. */
ce45a448 545 if (IS_TYPE_OR_DECL_P (*tp))
ae01b312 546 {
ae01b312 547 *walk_subtrees = 0;
ec1e35b2 548 break;
ae01b312 549 }
ec1e35b2 550
551 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
dc24ddbd 552 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees, data);
ec1e35b2 553 break;
ae01b312 554 }
ec1e35b2 555
ae01b312 556 return NULL;
557}
558
b0cdf642 559/* Create cgraph edges for function calls inside BODY from NODE. */
ae01b312 560
edc6a4c0 561static void
b0cdf642 562cgraph_create_edges (struct cgraph_node *node, tree body)
ae01b312 563{
9bfec7c2 564 basic_block bb;
565
566 struct function *this_cfun = DECL_STRUCT_FUNCTION (body);
567 block_stmt_iterator bsi;
568 tree step;
c6224531 569 visited_nodes = pointer_set_create ();
e27482aa 570
a0c938f0 571 /* Reach the trees by walking over the CFG, and note the
9bfec7c2 572 enclosing basic-blocks in the call edges. */
573 FOR_EACH_BB_FN (bb, this_cfun)
574 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
575 {
576 tree stmt = bsi_stmt (bsi);
577 tree call = get_call_expr_in (stmt);
578 tree decl;
579
580 if (call && (decl = get_callee_fndecl (call)))
e27482aa 581 {
9bfec7c2 582 cgraph_create_edge (node, cgraph_node (decl), stmt,
583 bb->count,
584 bb->loop_depth);
585 walk_tree (&TREE_OPERAND (call, 1),
586 record_reference, node, visited_nodes);
587 if (TREE_CODE (stmt) == MODIFY_EXPR)
588 walk_tree (&TREE_OPERAND (stmt, 0),
589 record_reference, node, visited_nodes);
e27482aa 590 }
a0c938f0 591 else
9bfec7c2 592 walk_tree (bsi_stmt_ptr (bsi), record_reference, node, visited_nodes);
593 }
594
127d7f21 595 /* Look for initializers of constant variables and private statics. */
596 for (step = DECL_STRUCT_FUNCTION (body)->unexpanded_var_list;
597 step;
598 step = TREE_CHAIN (step))
9bfec7c2 599 {
127d7f21 600 tree decl = TREE_VALUE (step);
601 if (TREE_CODE (decl) == VAR_DECL
602 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
603 && flag_unit_at_a_time)
604 cgraph_varpool_finalize_decl (decl);
605 else if (TREE_CODE (decl) == VAR_DECL && DECL_INITIAL (decl))
606 walk_tree (&DECL_INITIAL (decl), record_reference, node, visited_nodes);
e27482aa 607 }
a0c938f0 608
c6224531 609 pointer_set_destroy (visited_nodes);
25bb88de 610 visited_nodes = NULL;
ae01b312 611}
612
9e0baf4d 613/* Give initial reasons why inlining would fail. Those gets
614 either NULLified or usually overwritten by more precise reason
615 later. */
616static void
617initialize_inline_failed (struct cgraph_node *node)
618{
619 struct cgraph_edge *e;
620
621 for (e = node->callers; e; e = e->next_caller)
622 {
623 gcc_assert (!e->callee->global.inlined_to);
624 gcc_assert (e->inline_failed);
625 if (node->local.redefined_extern_inline)
626 e->inline_failed = N_("redefined extern inline functions are not "
627 "considered for inlining");
628 else if (!node->local.inlinable)
629 e->inline_failed = N_("function not inlinable");
630 else
631 e->inline_failed = N_("function not considered for inlining");
632 }
633}
634
635/* Rebuild call edges from current function after a passes not aware
636 of cgraph updating. */
2a1990e9 637static unsigned int
9e0baf4d 638rebuild_cgraph_edges (void)
639{
640 basic_block bb;
641 struct cgraph_node *node = cgraph_node (current_function_decl);
642 block_stmt_iterator bsi;
643
644 cgraph_node_remove_callees (node);
645
646 node->count = ENTRY_BLOCK_PTR->count;
647
648 FOR_EACH_BB (bb)
649 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
650 {
651 tree stmt = bsi_stmt (bsi);
652 tree call = get_call_expr_in (stmt);
653 tree decl;
654
655 if (call && (decl = get_callee_fndecl (call)))
656 cgraph_create_edge (node, cgraph_node (decl), stmt,
657 bb->count,
658 bb->loop_depth);
659 }
660 initialize_inline_failed (node);
661 gcc_assert (!node->global.inlined_to);
2a1990e9 662 return 0;
9e0baf4d 663}
664
665struct tree_opt_pass pass_rebuild_cgraph_edges =
666{
667 NULL, /* name */
668 NULL, /* gate */
669 rebuild_cgraph_edges, /* execute */
670 NULL, /* sub */
671 NULL, /* next */
672 0, /* static_pass_number */
673 0, /* tv_id */
674 PROP_cfg, /* properties_required */
675 0, /* properties_provided */
676 0, /* properties_destroyed */
677 0, /* todo_flags_start */
678 0, /* todo_flags_finish */
679 0 /* letter */
680};
b0cdf642 681
682/* Verify cgraph nodes of given cgraph node. */
683void
684verify_cgraph_node (struct cgraph_node *node)
685{
686 struct cgraph_edge *e;
687 struct cgraph_node *main_clone;
e27482aa 688 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
689 basic_block this_block;
690 block_stmt_iterator bsi;
9bfec7c2 691 bool error_found = false;
b0cdf642 692
693 timevar_push (TV_CGRAPH_VERIFY);
b0cdf642 694 for (e = node->callees; e; e = e->next_callee)
695 if (e->aux)
696 {
0a81f5a0 697 error ("aux field set for edge %s->%s",
b0cdf642 698 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
699 error_found = true;
700 }
a2cb9b3b 701 if (node->count < 0)
702 {
703 error ("Execution count is negative");
704 error_found = true;
705 }
b0cdf642 706 for (e = node->callers; e; e = e->next_caller)
707 {
a2cb9b3b 708 if (e->count < 0)
709 {
710 error ("caller edge count is negative");
711 error_found = true;
712 }
b0cdf642 713 if (!e->inline_failed)
714 {
715 if (node->global.inlined_to
716 != (e->caller->global.inlined_to
717 ? e->caller->global.inlined_to : e->caller))
718 {
0a81f5a0 719 error ("inlined_to pointer is wrong");
b0cdf642 720 error_found = true;
721 }
722 if (node->callers->next_caller)
723 {
0a81f5a0 724 error ("multiple inline callers");
b0cdf642 725 error_found = true;
726 }
727 }
728 else
729 if (node->global.inlined_to)
730 {
0a81f5a0 731 error ("inlined_to pointer set for noninline callers");
b0cdf642 732 error_found = true;
733 }
734 }
735 if (!node->callers && node->global.inlined_to)
736 {
5cd75817 737 error ("inlined_to pointer is set but no predecessors found");
b0cdf642 738 error_found = true;
739 }
740 if (node->global.inlined_to == node)
741 {
0a81f5a0 742 error ("inlined_to pointer refers to itself");
b0cdf642 743 error_found = true;
744 }
745
746 for (main_clone = cgraph_node (node->decl); main_clone;
747 main_clone = main_clone->next_clone)
748 if (main_clone == node)
749 break;
0f6439b9 750 if (!cgraph_node (node->decl))
b0cdf642 751 {
0f6439b9 752 error ("node not found in cgraph_hash");
b0cdf642 753 error_found = true;
754 }
a0c938f0 755
b0cdf642 756 if (node->analyzed
757 && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
758 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
759 {
e27482aa 760 if (this_cfun->cfg)
761 {
762 /* The nodes we're interested in are never shared, so walk
763 the tree ignoring duplicates. */
764 visited_nodes = pointer_set_create ();
765 /* Reach the trees by walking over the CFG, and note the
766 enclosing basic-blocks in the call edges. */
767 FOR_EACH_BB_FN (this_block, this_cfun)
768 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
9bfec7c2 769 {
770 tree stmt = bsi_stmt (bsi);
771 tree call = get_call_expr_in (stmt);
772 tree decl;
773 if (call && (decl = get_callee_fndecl (call)))
774 {
775 struct cgraph_edge *e = cgraph_edge (node, stmt);
776 if (e)
777 {
778 if (e->aux)
779 {
0a81f5a0 780 error ("shared call_stmt:");
9bfec7c2 781 debug_generic_stmt (stmt);
782 error_found = true;
783 }
469679ab 784 if (e->callee->decl != cgraph_node (decl)->decl
785 && e->inline_failed)
9bfec7c2 786 {
0a81f5a0 787 error ("edge points to wrong declaration:");
9bfec7c2 788 debug_tree (e->callee->decl);
789 fprintf (stderr," Instead of:");
790 debug_tree (decl);
791 }
792 e->aux = (void *)1;
793 }
794 else
795 {
0a81f5a0 796 error ("missing callgraph edge for call stmt:");
9bfec7c2 797 debug_generic_stmt (stmt);
798 error_found = true;
799 }
800 }
801 }
e27482aa 802 pointer_set_destroy (visited_nodes);
803 visited_nodes = NULL;
804 }
805 else
806 /* No CFG available?! */
807 gcc_unreachable ();
808
b0cdf642 809 for (e = node->callees; e; e = e->next_callee)
810 {
811 if (!e->aux)
812 {
0a81f5a0 813 error ("edge %s->%s has no corresponding call_stmt",
b0cdf642 814 cgraph_node_name (e->caller),
815 cgraph_node_name (e->callee));
9bfec7c2 816 debug_generic_stmt (e->call_stmt);
b0cdf642 817 error_found = true;
818 }
819 e->aux = 0;
820 }
821 }
822 if (error_found)
823 {
824 dump_cgraph_node (stderr, node);
0a81f5a0 825 internal_error ("verify_cgraph_node failed");
b0cdf642 826 }
827 timevar_pop (TV_CGRAPH_VERIFY);
828}
829
830/* Verify whole cgraph structure. */
831void
832verify_cgraph (void)
833{
834 struct cgraph_node *node;
835
8ec2a798 836 if (sorrycount || errorcount)
837 return;
838
b0cdf642 839 for (node = cgraph_nodes; node; node = node->next)
840 verify_cgraph_node (node);
841}
842
56af936e 843/* Output one variable, if necessary. Return whether we output it. */
844static bool
845cgraph_varpool_assemble_decl (struct cgraph_varpool_node *node)
846{
847 tree decl = node->decl;
848
849 if (!TREE_ASM_WRITTEN (decl)
850 && !node->alias
851 && !DECL_EXTERNAL (decl)
852 && (TREE_CODE (decl) != VAR_DECL || !DECL_HAS_VALUE_EXPR_P (decl)))
853 {
854 assemble_variable (decl, 0, 1, 0);
855 /* Local static variables are never seen by check_global_declarations
856 so we need to output debug info by hand. */
a0c938f0 857 if (DECL_CONTEXT (decl)
56af936e 858 && (TREE_CODE (DECL_CONTEXT (decl)) == BLOCK
859 || TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
860 && errorcount == 0 && sorrycount == 0)
861 {
862 timevar_push (TV_SYMOUT);
863 (*debug_hooks->global_decl) (decl);
864 timevar_pop (TV_SYMOUT);
865 }
866 return true;
867 }
868
869 return false;
870}
c1dcd13c 871
872/* Output all variables enqueued to be assembled. */
873bool
874cgraph_varpool_assemble_pending_decls (void)
875{
876 bool changed = false;
877
878 if (errorcount || sorrycount)
879 return false;
a0c938f0 880
c1dcd13c 881 /* EH might mark decls as needed during expansion. This should be safe since
882 we don't create references to new function, but it should not be used
883 elsewhere. */
884 cgraph_varpool_analyze_pending_decls ();
885
886 while (cgraph_varpool_nodes_queue)
887 {
c1dcd13c 888 struct cgraph_varpool_node *node = cgraph_varpool_nodes_queue;
889
890 cgraph_varpool_nodes_queue = cgraph_varpool_nodes_queue->next_needed;
56af936e 891 if (cgraph_varpool_assemble_decl (node))
892 changed = true;
c1dcd13c 893 node->next_needed = NULL;
894 }
895 return changed;
896}
897
56af936e 898/* Output all asm statements we have stored up to be output. */
899
900static void
901cgraph_output_pending_asms (void)
902{
903 struct cgraph_asm_node *can;
904
905 if (errorcount || sorrycount)
906 return;
907
908 for (can = cgraph_asm_nodes; can; can = can->next)
909 assemble_asm (can->asm_str);
910 cgraph_asm_nodes = NULL;
911}
912
0785e435 913/* Analyze the function scheduled to be output. */
1e8e9920 914void
0785e435 915cgraph_analyze_function (struct cgraph_node *node)
916{
917 tree decl = node->decl;
918
ec1e35b2 919 current_function_decl = decl;
e27482aa 920 push_cfun (DECL_STRUCT_FUNCTION (decl));
921 cgraph_lower_function (node);
0785e435 922
923 /* First kill forward declaration so reverse inlining works properly. */
e27482aa 924 cgraph_create_edges (node, decl);
0785e435 925
926 node->local.inlinable = tree_inlinable_function_p (decl);
e27482aa 927 node->local.self_insns = estimate_num_insns (decl);
0785e435 928 if (node->local.inlinable)
929 node->local.disregard_inline_limits
dc24ddbd 930 = lang_hooks.tree_inlining.disregard_inline_limits (decl);
9e0baf4d 931 initialize_inline_failed (node);
99cf25d0 932 if (flag_really_no_inline && !node->local.disregard_inline_limits)
933 node->local.inlinable = 0;
0785e435 934 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
935 node->global.insns = node->local.self_insns;
0785e435 936
ec1e35b2 937 node->analyzed = true;
e27482aa 938 pop_cfun ();
c08871a9 939 current_function_decl = NULL;
0785e435 940}
941
05806473 942/* Look for externally_visible and used attributes and mark cgraph nodes
943 accordingly.
944
945 We cannot mark the nodes at the point the attributes are processed (in
946 handle_*_attribute) because the copy of the declarations available at that
947 point may not be canonical. For example, in:
948
949 void f();
950 void f() __attribute__((used));
951
952 the declaration we see in handle_used_attribute will be the second
953 declaration -- but the front end will subsequently merge that declaration
954 with the original declaration and discard the second declaration.
955
956 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
957
958 void f() {}
959 void f() __attribute__((externally_visible));
960
961 is valid.
962
963 So, we walk the nodes at the end of the translation unit, applying the
964 attributes at that point. */
965
966static void
967process_function_and_variable_attributes (struct cgraph_node *first,
968 struct cgraph_varpool_node *first_var)
969{
970 struct cgraph_node *node;
971 struct cgraph_varpool_node *vnode;
972
973 for (node = cgraph_nodes; node != first; node = node->next)
974 {
975 tree decl = node->decl;
976 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
977 {
978 mark_decl_referenced (decl);
979 if (node->local.finalized)
980 cgraph_mark_needed_node (node);
981 }
982 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
983 {
984 if (node->local.finalized)
985 cgraph_mark_needed_node (node);
986 node->externally_visible = true;
987 }
988 }
989 for (vnode = cgraph_varpool_nodes; vnode != first_var; vnode = vnode->next)
990 {
991 tree decl = vnode->decl;
992 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
993 {
994 mark_decl_referenced (decl);
995 if (vnode->finalized)
996 cgraph_varpool_mark_needed_node (vnode);
997 }
998 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
999 {
1000 if (vnode->finalized)
1001 cgraph_varpool_mark_needed_node (vnode);
1002 vnode->externally_visible = true;
1003 }
1004 }
1005}
1006
ae01b312 1007/* Analyze the whole compilation unit once it is parsed completely. */
1008
1009void
d9d9733a 1010cgraph_finalize_compilation_unit (void)
ae01b312 1011{
1012 struct cgraph_node *node;
c1dcd13c 1013 /* Keep track of already processed nodes when called multiple times for
06b27565 1014 intermodule optimization. */
c1dcd13c 1015 static struct cgraph_node *first_analyzed;
05806473 1016 static struct cgraph_varpool_node *first_analyzed_var;
ae01b312 1017
d7401838 1018 finish_aliases_1 ();
1019
2ff66ee0 1020 if (!flag_unit_at_a_time)
c08871a9 1021 {
56af936e 1022 cgraph_output_pending_asms ();
c08871a9 1023 cgraph_assemble_pending_functions ();
1024 return;
1025 }
2ff66ee0 1026
d7c6d889 1027 if (!quiet_flag)
c1dcd13c 1028 {
1029 fprintf (stderr, "\nAnalyzing compilation unit");
1030 fflush (stderr);
1031 }
229dcfae 1032
f79b6507 1033 timevar_push (TV_CGRAPH);
05806473 1034 process_function_and_variable_attributes (first_analyzed, first_analyzed_var);
c1dcd13c 1035 cgraph_varpool_analyze_pending_decls ();
f79b6507 1036 if (cgraph_dump_file)
ae01b312 1037 {
e4200070 1038 fprintf (cgraph_dump_file, "Initial entry points:");
c1dcd13c 1039 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
3d7bfc56 1040 if (node->needed && DECL_SAVED_TREE (node->decl))
f79b6507 1041 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1042 fprintf (cgraph_dump_file, "\n");
ae01b312 1043 }
1044
e6d2b2d8 1045 /* Propagate reachability flag and lower representation of all reachable
1046 functions. In the future, lowering will introduce new functions and
1047 new entry points on the way (by template instantiation and virtual
1048 method table generation for instance). */
3d7bfc56 1049 while (cgraph_nodes_queue)
ae01b312 1050 {
0785e435 1051 struct cgraph_edge *edge;
3d7bfc56 1052 tree decl = cgraph_nodes_queue->decl;
1053
1054 node = cgraph_nodes_queue;
d87976fb 1055 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 1056 node->next_needed = NULL;
ae01b312 1057
638531ad 1058 /* ??? It is possible to create extern inline function and later using
bbd5cba2 1059 weak alias attribute to kill its body. See
638531ad 1060 gcc.c-torture/compile/20011119-1.c */
1061 if (!DECL_SAVED_TREE (decl))
9b8fb23a 1062 {
1063 cgraph_reset_node (node);
1064 continue;
1065 }
638531ad 1066
cc636d56 1067 gcc_assert (!node->analyzed && node->reachable);
1068 gcc_assert (DECL_SAVED_TREE (decl));
ae01b312 1069
0785e435 1070 cgraph_analyze_function (node);
2c0b522d 1071
ae01b312 1072 for (edge = node->callees; edge; edge = edge->next_callee)
0785e435 1073 if (!edge->callee->reachable)
2c0b522d 1074 cgraph_mark_reachable_node (edge->callee);
1075
c1dcd13c 1076 cgraph_varpool_analyze_pending_decls ();
ae01b312 1077 }
2c0b522d 1078
3d7bfc56 1079 /* Collect entry points to the unit. */
1080
f79b6507 1081 if (cgraph_dump_file)
3d7bfc56 1082 {
e4200070 1083 fprintf (cgraph_dump_file, "Unit entry points:");
c1dcd13c 1084 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
3d7bfc56 1085 if (node->needed && DECL_SAVED_TREE (node->decl))
f79b6507 1086 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
e4200070 1087 fprintf (cgraph_dump_file, "\n\nInitial ");
0785e435 1088 dump_cgraph (cgraph_dump_file);
3d7bfc56 1089 }
e6d2b2d8 1090
f79b6507 1091 if (cgraph_dump_file)
1092 fprintf (cgraph_dump_file, "\nReclaiming functions:");
ae01b312 1093
c1dcd13c 1094 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
ae01b312 1095 {
1096 tree decl = node->decl;
1097
9b8fb23a 1098 if (node->local.finalized && !DECL_SAVED_TREE (decl))
a0c938f0 1099 cgraph_reset_node (node);
9b8fb23a 1100
ae01b312 1101 if (!node->reachable && DECL_SAVED_TREE (decl))
1102 {
f79b6507 1103 if (cgraph_dump_file)
1104 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
b0cdf642 1105 cgraph_remove_node (node);
9b8fb23a 1106 continue;
ae01b312 1107 }
bc5cab3b 1108 else
1109 node->next_needed = NULL;
9b8fb23a 1110 gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
1111 gcc_assert (node->analyzed == node->local.finalized);
ae01b312 1112 }
f79b6507 1113 if (cgraph_dump_file)
e4200070 1114 {
1115 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1116 dump_cgraph (cgraph_dump_file);
1117 }
c1dcd13c 1118 first_analyzed = cgraph_nodes;
05806473 1119 first_analyzed_var = cgraph_varpool_nodes;
ae01b312 1120 ggc_collect ();
f79b6507 1121 timevar_pop (TV_CGRAPH);
ae01b312 1122}
ae01b312 1123/* Figure out what functions we want to assemble. */
1124
1125static void
d9d9733a 1126cgraph_mark_functions_to_output (void)
ae01b312 1127{
1128 struct cgraph_node *node;
1129
ae01b312 1130 for (node = cgraph_nodes; node; node = node->next)
1131 {
1132 tree decl = node->decl;
d7c6d889 1133 struct cgraph_edge *e;
a0c938f0 1134
cc636d56 1135 gcc_assert (!node->output);
d7c6d889 1136
1137 for (e = node->callers; e; e = e->next_caller)
611e5405 1138 if (e->inline_failed)
d7c6d889 1139 break;
ae01b312 1140
e6d2b2d8 1141 /* We need to output all local functions that are used and not
1142 always inlined, as well as those that are reachable from
1143 outside the current compilation unit. */
ae01b312 1144 if (DECL_SAVED_TREE (decl)
b0cdf642 1145 && !node->global.inlined_to
ae01b312 1146 && (node->needed
d7c6d889 1147 || (e && node->reachable))
4ee9c684 1148 && !TREE_ASM_WRITTEN (decl)
ae01b312 1149 && !DECL_EXTERNAL (decl))
1150 node->output = 1;
cc636d56 1151 else
9cee7c3f 1152 {
1153 /* We should've reclaimed all functions that are not needed. */
1154#ifdef ENABLE_CHECKING
1155 if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
1156 && !DECL_EXTERNAL (decl))
1157 {
1158 dump_cgraph_node (stderr, node);
1159 internal_error ("failed to reclaim unneeded function");
1160 }
1161#endif
1162 gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
1163 || DECL_EXTERNAL (decl));
1164
1165 }
a0c938f0 1166
961e3b13 1167 }
1168}
1169
ae01b312 1170/* Expand function specified by NODE. */
e6d2b2d8 1171
ae01b312 1172static void
d9d9733a 1173cgraph_expand_function (struct cgraph_node *node)
ae01b312 1174{
1175 tree decl = node->decl;
1176
b0cdf642 1177 /* We ought to not compile any inline clones. */
cc636d56 1178 gcc_assert (!node->global.inlined_to);
b0cdf642 1179
28df663b 1180 if (flag_unit_at_a_time)
1181 announce_function (decl);
961e3b13 1182
f8deefc1 1183 cgraph_lower_function (node);
1184
794da2bb 1185 /* Generate RTL for the body of DECL. */
dc24ddbd 1186 lang_hooks.callgraph.expand_function (decl);
961e3b13 1187
4ee9c684 1188 /* Make sure that BE didn't give up on compiling. */
1189 /* ??? Can happen with nested function of extern inline. */
cc636d56 1190 gcc_assert (TREE_ASM_WRITTEN (node->decl));
b0cdf642 1191
ae01b312 1192 current_function_decl = NULL;
8ec2a798 1193 if (!cgraph_preserve_function_body_p (node->decl))
4ee9c684 1194 {
1195 DECL_SAVED_TREE (node->decl) = NULL;
1196 DECL_STRUCT_FUNCTION (node->decl) = NULL;
4ee9c684 1197 DECL_INITIAL (node->decl) = error_mark_node;
7edd21a5 1198 /* Eliminate all call edges. This is important so the call_expr no longer
8ec2a798 1199 points to the dead function body. */
bb4c7a44 1200 cgraph_node_remove_callees (node);
4ee9c684 1201 }
e1be32b8 1202
1203 cgraph_function_flags_ready = true;
ae01b312 1204}
1205
b0cdf642 1206/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
d7c6d889 1207
1208bool
b0cdf642 1209cgraph_inline_p (struct cgraph_edge *e, const char **reason)
d7c6d889 1210{
b0cdf642 1211 *reason = e->inline_failed;
1212 return !e->inline_failed;
d7c6d889 1213}
b0cdf642 1214
acc70efa 1215
acc70efa 1216
d9d9733a 1217/* Expand all functions that must be output.
1218
d7c6d889 1219 Attempt to topologically sort the nodes so function is output when
1220 all called functions are already assembled to allow data to be
91c82c20 1221 propagated across the callgraph. Use a stack to get smaller distance
3927afe0 1222 between a function and its callees (later we may choose to use a more
d7c6d889 1223 sophisticated algorithm for function reordering; we will likely want
1224 to use subsections to make the output functions appear in top-down
1225 order). */
1226
1227static void
a6868229 1228cgraph_expand_all_functions (void)
d7c6d889 1229{
1230 struct cgraph_node *node;
4c36ffe6 1231 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
b0cdf642 1232 int order_pos = 0, new_order_pos = 0;
d7c6d889 1233 int i;
1234
d7c6d889 1235 order_pos = cgraph_postorder (order);
cc636d56 1236 gcc_assert (order_pos == cgraph_n_nodes);
d7c6d889 1237
7bd28bba 1238 /* Garbage collector may remove inline clones we eliminate during
b0cdf642 1239 optimization. So we must be sure to not reference them. */
1240 for (i = 0; i < order_pos; i++)
1241 if (order[i]->output)
1242 order[new_order_pos++] = order[i];
1243
1244 for (i = new_order_pos - 1; i >= 0; i--)
d7c6d889 1245 {
1246 node = order[i];
1247 if (node->output)
1248 {
cc636d56 1249 gcc_assert (node->reachable);
d7c6d889 1250 node->output = 0;
1251 cgraph_expand_function (node);
1252 }
1253 }
773c5ba7 1254
d7c6d889 1255 free (order);
773c5ba7 1256
1257 /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
1258 the expansion process. Note that this queue may grow as its
1259 being processed, as the new functions may generate new ones. */
1260 while (cgraph_expand_queue)
1261 {
1262 node = cgraph_expand_queue;
1263 cgraph_expand_queue = cgraph_expand_queue->next_needed;
1264 node->next_needed = NULL;
1265 node->output = 0;
1266 node->lowered = DECL_STRUCT_FUNCTION (node->decl)->cfg != NULL;
1267 cgraph_expand_function (node);
1268 }
d7c6d889 1269}
1270
56af936e 1271/* This is used to sort the node types by the cgraph order number. */
1272
1273struct cgraph_order_sort
1274{
1275 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1276 union
1277 {
1278 struct cgraph_node *f;
1279 struct cgraph_varpool_node *v;
1280 struct cgraph_asm_node *a;
1281 } u;
1282};
1283
1284/* Output all functions, variables, and asm statements in the order
1285 according to their order fields, which is the order in which they
1286 appeared in the file. This implements -fno-toplevel-reorder. In
1287 this mode we may output functions and variables which don't really
1288 need to be output. */
1289
1290static void
1291cgraph_output_in_order (void)
1292{
1293 int max;
1294 size_t size;
1295 struct cgraph_order_sort *nodes;
1296 int i;
1297 struct cgraph_node *pf;
1298 struct cgraph_varpool_node *pv;
1299 struct cgraph_asm_node *pa;
1300
1301 max = cgraph_order;
1302 size = max * sizeof (struct cgraph_order_sort);
1303 nodes = (struct cgraph_order_sort *) alloca (size);
1304 memset (nodes, 0, size);
1305
1306 cgraph_varpool_analyze_pending_decls ();
1307
1308 for (pf = cgraph_nodes; pf; pf = pf->next)
1309 {
1310 if (pf->output)
1311 {
1312 i = pf->order;
1313 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1314 nodes[i].kind = ORDER_FUNCTION;
1315 nodes[i].u.f = pf;
1316 }
1317 }
1318
1319 for (pv = cgraph_varpool_nodes_queue; pv; pv = pv->next_needed)
1320 {
1321 i = pv->order;
1322 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1323 nodes[i].kind = ORDER_VAR;
1324 nodes[i].u.v = pv;
1325 }
1326
1327 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1328 {
1329 i = pa->order;
1330 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1331 nodes[i].kind = ORDER_ASM;
1332 nodes[i].u.a = pa;
1333 }
56af936e 1334
1335 for (i = 0; i < max; ++i)
1336 {
1337 switch (nodes[i].kind)
1338 {
1339 case ORDER_FUNCTION:
1340 nodes[i].u.f->output = 0;
1341 cgraph_expand_function (nodes[i].u.f);
1342 break;
1343
1344 case ORDER_VAR:
1345 cgraph_varpool_assemble_decl (nodes[i].u.v);
1346 break;
1347
1348 case ORDER_ASM:
1349 assemble_asm (nodes[i].u.a->asm_str);
1350 break;
1351
1352 case ORDER_UNDEFINED:
1353 break;
1354
1355 default:
1356 gcc_unreachable ();
1357 }
1358 }
4b4ea2db 1359
1360 cgraph_asm_nodes = NULL;
56af936e 1361}
1362
3f82b628 1363/* Mark visibility of all functions.
a0c938f0 1364
acc70efa 1365 A local function is one whose calls can occur only in the current
1366 compilation unit and all its calls are explicit, so we can change
1367 its calling convention. We simply mark all static functions whose
3f82b628 1368 address is not taken as local.
1369
1370 We also change the TREE_PUBLIC flag of all declarations that are public
1371 in language point of view but we want to overwrite this default
1372 via visibilities for the backend point of view. */
d7c6d889 1373
1374static void
3f82b628 1375cgraph_function_and_variable_visibility (void)
d7c6d889 1376{
1377 struct cgraph_node *node;
3f82b628 1378 struct cgraph_varpool_node *vnode;
d7c6d889 1379
d7c6d889 1380 for (node = cgraph_nodes; node; node = node->next)
1381 {
3f82b628 1382 if (node->reachable
1383 && (DECL_COMDAT (node->decl)
c8b8ac00 1384 || (!flag_whole_program
1385 && TREE_PUBLIC (node->decl) && !DECL_EXTERNAL (node->decl))))
62eec3b4 1386 node->local.externally_visible = true;
1387 if (!node->local.externally_visible && node->analyzed
1388 && !DECL_EXTERNAL (node->decl))
1389 {
1390 gcc_assert (flag_whole_program || !TREE_PUBLIC (node->decl));
1391 TREE_PUBLIC (node->decl) = 0;
1392 }
d7c6d889 1393 node->local.local = (!node->needed
3f82b628 1394 && node->analyzed
e6ad3175 1395 && !DECL_EXTERNAL (node->decl)
8b4f617c 1396 && !node->local.externally_visible);
3f82b628 1397 }
1398 for (vnode = cgraph_varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1399 {
1400 if (vnode->needed
c8b8ac00 1401 && !flag_whole_program
3f82b628 1402 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl)))
1403 vnode->externally_visible = 1;
62eec3b4 1404 if (!vnode->externally_visible)
1405 {
1406 gcc_assert (flag_whole_program || !TREE_PUBLIC (vnode->decl));
1407 TREE_PUBLIC (vnode->decl) = 0;
1408 }
3f82b628 1409 gcc_assert (TREE_STATIC (vnode->decl));
d7c6d889 1410 }
acc70efa 1411
3f82b628 1412 /* Because we have to be conservative on the boundaries of source
1413 level units, it is possible that we marked some functions in
1414 reachable just because they might be used later via external
1415 linkage, but after making them local they are really unreachable
1416 now. */
1417 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
1418
d7c6d889 1419 if (cgraph_dump_file)
acc70efa 1420 {
1421 fprintf (cgraph_dump_file, "\nMarking local functions:");
1422 for (node = cgraph_nodes; node; node = node->next)
1423 if (node->local.local)
1424 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1425 fprintf (cgraph_dump_file, "\n\n");
3f82b628 1426 fprintf (cgraph_dump_file, "\nMarking externally visible functions:");
1427 for (node = cgraph_nodes; node; node = node->next)
1428 if (node->local.externally_visible)
1429 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1430 fprintf (cgraph_dump_file, "\n\n");
58f1b650 1431 }
3f82b628 1432 cgraph_function_flags_ready = true;
acc70efa 1433}
80a85d8a 1434
b0cdf642 1435/* Return true when function body of DECL still needs to be kept around
1436 for later re-use. */
1437bool
1438cgraph_preserve_function_body_p (tree decl)
1439{
1440 struct cgraph_node *node;
b0cdf642 1441 if (!cgraph_global_info_ready)
1442 return (DECL_INLINE (decl) && !flag_really_no_inline);
1443 /* Look if there is any clone around. */
1444 for (node = cgraph_node (decl); node; node = node->next_clone)
1445 if (node->global.inlined_to)
1446 return true;
1447 return false;
1448}
1449
77fce4cd 1450static void
1451ipa_passes (void)
1452{
1453 cfun = NULL;
1454 tree_register_cfg_hooks ();
1455 bitmap_obstack_initialize (NULL);
1456 execute_ipa_pass_list (all_ipa_passes);
1457 bitmap_obstack_release (NULL);
1458}
1459
ae01b312 1460/* Perform simple optimizations based on callgraph. */
1461
1462void
d9d9733a 1463cgraph_optimize (void)
ae01b312 1464{
b0cdf642 1465#ifdef ENABLE_CHECKING
1466 verify_cgraph ();
1467#endif
2ff66ee0 1468 if (!flag_unit_at_a_time)
c1dcd13c 1469 {
56af936e 1470 cgraph_output_pending_asms ();
c1dcd13c 1471 cgraph_varpool_assemble_pending_decls ();
1472 return;
1473 }
e9f08e82 1474
1475 process_pending_assemble_externals ();
a0c938f0 1476
c1dcd13c 1477 /* Frontend may output common variables after the unit has been finalized.
1478 It is safe to deal with them here as they are always zero initialized. */
1479 cgraph_varpool_analyze_pending_decls ();
e9f08e82 1480
f79b6507 1481 timevar_push (TV_CGRAPHOPT);
d7c6d889 1482 if (!quiet_flag)
1483 fprintf (stderr, "Performing intraprocedural optimizations\n");
e4200070 1484
3f82b628 1485 cgraph_function_and_variable_visibility ();
f79b6507 1486 if (cgraph_dump_file)
1487 {
e4200070 1488 fprintf (cgraph_dump_file, "Marked ");
f79b6507 1489 dump_cgraph (cgraph_dump_file);
1490 }
be4d0974 1491
1492 /* Don't run the IPA passes if there was any error or sorry messages. */
1493 if (errorcount == 0 && sorrycount == 0)
1494 ipa_passes ();
1495
e1be32b8 1496 /* This pass remove bodies of extern inline functions we never inlined.
1497 Do this later so other IPA passes see what is really going on. */
1498 cgraph_remove_unreachable_nodes (false, dump_file);
80a85d8a 1499 cgraph_global_info_ready = true;
f79b6507 1500 if (cgraph_dump_file)
1501 {
e4200070 1502 fprintf (cgraph_dump_file, "Optimized ");
f79b6507 1503 dump_cgraph (cgraph_dump_file);
c1dcd13c 1504 dump_varpool (cgraph_dump_file);
f79b6507 1505 }
1506 timevar_pop (TV_CGRAPHOPT);
ae01b312 1507
d7c6d889 1508 /* Output everything. */
e4200070 1509 if (!quiet_flag)
1510 fprintf (stderr, "Assembling functions:\n");
b0cdf642 1511#ifdef ENABLE_CHECKING
1512 verify_cgraph ();
1513#endif
56af936e 1514
acc70efa 1515 cgraph_mark_functions_to_output ();
c1dcd13c 1516
56af936e 1517 if (!flag_toplevel_reorder)
1518 cgraph_output_in_order ();
1519 else
1520 {
1521 cgraph_output_pending_asms ();
1522
1523 cgraph_expand_all_functions ();
1524 cgraph_varpool_remove_unreferenced_decls ();
1525
1526 cgraph_varpool_assemble_pending_decls ();
1527 }
c1dcd13c 1528
f79b6507 1529 if (cgraph_dump_file)
1530 {
e4200070 1531 fprintf (cgraph_dump_file, "\nFinal ");
f79b6507 1532 dump_cgraph (cgraph_dump_file);
1533 }
b0cdf642 1534#ifdef ENABLE_CHECKING
1535 verify_cgraph ();
4ee9c684 1536 /* Double check that all inline clones are gone and that all
1537 function bodies have been released from memory. */
1538 if (flag_unit_at_a_time
0f9005dd 1539 && !dump_enabled_p (TDI_tree_all)
4ee9c684 1540 && !(sorrycount || errorcount))
1541 {
1542 struct cgraph_node *node;
1543 bool error_found = false;
1544
1545 for (node = cgraph_nodes; node; node = node->next)
1546 if (node->analyzed
1547 && (node->global.inlined_to
a0c938f0 1548 || DECL_SAVED_TREE (node->decl)))
4ee9c684 1549 {
1550 error_found = true;
1551 dump_cgraph_node (stderr, node);
a0c938f0 1552 }
4ee9c684 1553 if (error_found)
0a81f5a0 1554 internal_error ("nodes with no released memory found");
4ee9c684 1555 }
b0cdf642 1556#endif
ae01b312 1557}
b5530559 1558
1559/* Generate and emit a static constructor or destructor. WHICH must be
a0c938f0 1560 one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
b5530559 1561 GENERIC statements. */
1562
1563void
c5344b58 1564cgraph_build_static_cdtor (char which, tree body, int priority)
b5530559 1565{
1566 static int counter = 0;
1567 char which_buf[16];
540edea7 1568 tree decl, name, resdecl;
b5530559 1569
1570 sprintf (which_buf, "%c_%d", which, counter++);
1571 name = get_file_function_name_long (which_buf);
1572
1573 decl = build_decl (FUNCTION_DECL, name,
1574 build_function_type (void_type_node, void_list_node));
1575 current_function_decl = decl;
1576
540edea7 1577 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1578 DECL_ARTIFICIAL (resdecl) = 1;
1579 DECL_IGNORED_P (resdecl) = 1;
1580 DECL_RESULT (decl) = resdecl;
1581
b5530559 1582 allocate_struct_function (decl);
1583
1584 TREE_STATIC (decl) = 1;
1585 TREE_USED (decl) = 1;
1586 DECL_ARTIFICIAL (decl) = 1;
1587 DECL_IGNORED_P (decl) = 1;
1588 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1589 DECL_SAVED_TREE (decl) = body;
1590 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1591 DECL_UNINLINABLE (decl) = 1;
1592
1593 DECL_INITIAL (decl) = make_node (BLOCK);
1594 TREE_USED (DECL_INITIAL (decl)) = 1;
1595
1596 DECL_SOURCE_LOCATION (decl) = input_location;
1597 cfun->function_end_locus = input_location;
1598
cc636d56 1599 switch (which)
1600 {
1601 case 'I':
1602 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1603 break;
1604 case 'D':
1605 DECL_STATIC_DESTRUCTOR (decl) = 1;
1606 break;
1607 default:
1608 gcc_unreachable ();
1609 }
b5530559 1610
1611 gimplify_function_tree (decl);
1612
1613 /* ??? We will get called LATE in the compilation process. */
1614 if (cgraph_global_info_ready)
e27482aa 1615 {
1616 tree_lowering_passes (decl);
1617 tree_rest_of_compilation (decl);
1618 }
b5530559 1619 else
1620 cgraph_finalize_function (decl, 0);
a0c938f0 1621
b5530559 1622 if (targetm.have_ctors_dtors)
1623 {
1624 void (*fn) (rtx, int);
1625
1626 if (which == 'I')
1627 fn = targetm.asm_out.constructor;
1628 else
1629 fn = targetm.asm_out.destructor;
c5344b58 1630 fn (XEXP (DECL_RTL (decl), 0), priority);
b5530559 1631 }
1632}
121f3051 1633
1634void
1635init_cgraph (void)
1636{
1637 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1638}
b5d36404 1639
a0c938f0 1640/* The edges representing the callers of the NEW_VERSION node were
b5d36404 1641 fixed by cgraph_function_versioning (), now the call_expr in their
1642 respective tree code should be updated to call the NEW_VERSION. */
1643
1644static void
1645update_call_expr (struct cgraph_node *new_version)
1646{
1647 struct cgraph_edge *e;
1648
1649 gcc_assert (new_version);
1650 for (e = new_version->callers; e; e = e->next_caller)
1651 /* Update the call expr on the edges
1652 to call the new version. */
1653 TREE_OPERAND (TREE_OPERAND (get_call_expr_in (e->call_stmt), 0), 0) = new_version->decl;
1654}
1655
1656
1657/* Create a new cgraph node which is the new version of
1658 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1659 edges which should be redirected to point to
1660 NEW_VERSION. ALL the callees edges of OLD_VERSION
1661 are cloned to the new version node. Return the new
1662 version node. */
1663
1664static struct cgraph_node *
1665cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
4460a647 1666 tree new_decl,
1667 VEC(cgraph_edge_p,heap) *redirect_callers)
b5d36404 1668 {
1669 struct cgraph_node *new_version;
1670 struct cgraph_edge *e, *new_e;
1671 struct cgraph_edge *next_callee;
1672 unsigned i;
1673
1674 gcc_assert (old_version);
a0c938f0 1675
b5d36404 1676 new_version = cgraph_node (new_decl);
1677
1678 new_version->analyzed = true;
1679 new_version->local = old_version->local;
1680 new_version->global = old_version->global;
1681 new_version->rtl = new_version->rtl;
1682 new_version->reachable = true;
1683 new_version->count = old_version->count;
1684
1685 /* Clone the old node callees. Recursive calls are
1686 also cloned. */
1687 for (e = old_version->callees;e; e=e->next_callee)
1688 {
1689 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->loop_nest, true);
1690 new_e->count = e->count;
1691 }
1692 /* Fix recursive calls.
1693 If OLD_VERSION has a recursive call after the
1694 previous edge cloning, the new version will have an edge
1695 pointing to the old version, which is wrong;
1696 Redirect it to point to the new version. */
1697 for (e = new_version->callees ; e; e = next_callee)
1698 {
1699 next_callee = e->next_callee;
1700 if (e->callee == old_version)
1701 cgraph_redirect_edge_callee (e, new_version);
a0c938f0 1702
b5d36404 1703 if (!next_callee)
1704 break;
1705 }
4460a647 1706 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1707 {
1708 /* Redirect calls to the old version node to point to its new
1709 version. */
1710 cgraph_redirect_edge_callee (e, new_version);
1711 }
b5d36404 1712
1713 return new_version;
1714 }
1715
1716 /* Perform function versioning.
a0c938f0 1717 Function versioning includes copying of the tree and
b5d36404 1718 a callgraph update (creating a new cgraph node and updating
1719 its callees and callers).
1720
1721 REDIRECT_CALLERS varray includes the edges to be redirected
1722 to the new version.
1723
1724 TREE_MAP is a mapping of tree nodes we want to replace with
1725 new ones (according to results of prior analysis).
1726 OLD_VERSION_NODE is the node that is versioned.
1727 It returns the new version's cgraph node. */
1728
1729struct cgraph_node *
1730cgraph_function_versioning (struct cgraph_node *old_version_node,
4460a647 1731 VEC(cgraph_edge_p,heap) *redirect_callers,
b5d36404 1732 varray_type tree_map)
1733{
1734 tree old_decl = old_version_node->decl;
1735 struct cgraph_node *new_version_node = NULL;
1736 tree new_decl;
1737
1738 if (!tree_versionable_function_p (old_decl))
1739 return NULL;
1740
1741 /* Make a new FUNCTION_DECL tree node for the
1742 new version. */
1743 new_decl = copy_node (old_decl);
1744
1745 /* Create the new version's call-graph node.
1746 and update the edges of the new node. */
1747 new_version_node =
1748 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1749 redirect_callers);
1750
1751 /* Copy the OLD_VERSION_NODE function tree to the new version. */
469679ab 1752 tree_function_versioning (old_decl, new_decl, tree_map, false);
b5d36404 1753 /* Update the call_expr on the edges to call the new version node. */
1754 update_call_expr (new_version_node);
1755
a0c938f0 1756 /* Update the new version's properties.
b5d36404 1757 Make The new version visible only within this translation unit.
a0c938f0 1758 ??? We cannot use COMDAT linkage because there is no
b5d36404 1759 ABI support for this. */
1760 DECL_EXTERNAL (new_version_node->decl) = 0;
1761 DECL_ONE_ONLY (new_version_node->decl) = 0;
1762 TREE_PUBLIC (new_version_node->decl) = 0;
1763 DECL_COMDAT (new_version_node->decl) = 0;
1764 new_version_node->local.externally_visible = 0;
1765 new_version_node->local.local = 1;
1766 new_version_node->lowered = true;
1767 return new_version_node;
1768}
469679ab 1769
1770/* Produce separate function body for inline clones so the offline copy can be
1771 modified without affecting them. */
1772struct cgraph_node *
1773save_inline_function_body (struct cgraph_node *node)
1774{
1775 struct cgraph_node *first_clone;
1776
1777 gcc_assert (node == cgraph_node (node->decl));
1778
1779 cgraph_lower_function (node);
1780
1781 /* In non-unit-at-a-time we construct full fledged clone we never output to
334ec2d8 1782 assembly file. This clone is pointed out by inline_decl of original function
469679ab 1783 and inlining infrastructure knows how to deal with this. */
1784 if (!flag_unit_at_a_time)
1785 {
1786 struct cgraph_edge *e;
1787
1788 first_clone = cgraph_clone_node (node, node->count, 0, false);
1789 first_clone->needed = 0;
1790 first_clone->reachable = 1;
1791 /* Recursively clone all bodies. */
1792 for (e = first_clone->callees; e; e = e->next_callee)
1793 if (!e->inline_failed)
1794 cgraph_clone_inlined_nodes (e, true, false);
1795 }
1796 else
1797 first_clone = node->next_clone;
1798
1799 first_clone->decl = copy_node (node->decl);
1800 node->next_clone = NULL;
1801 if (!flag_unit_at_a_time)
1802 node->inline_decl = first_clone->decl;
1803 first_clone->prev_clone = NULL;
1804 cgraph_insert_node_to_hashtable (first_clone);
1805 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1806
1807 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1808 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1809
1810 DECL_EXTERNAL (first_clone->decl) = 0;
1811 DECL_ONE_ONLY (first_clone->decl) = 0;
1812 TREE_PUBLIC (first_clone->decl) = 0;
1813 DECL_COMDAT (first_clone->decl) = 0;
1814
1815 for (node = first_clone->next_clone; node; node = node->next_clone)
1816 node->decl = first_clone->decl;
1817#ifdef ENABLE_CHECKING
1818 verify_cgraph_node (first_clone);
1819#endif
1820 return first_clone;
1821}
1822