]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
Fix PR 36907, breakage in building libobj with function specific changes
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
cd6bca02 1/* Callgraph based interprocedural optimizations.
e3a37aef 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
ae01b312 4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
ae01b312 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
ae01b312 21
b0cdf642 22/* This module implements main driver of compilation process as well as
cd6bca02 23 few basic interprocedural optimizers.
b0cdf642 24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
b326746d 35 (There is one exception needed for implementing GCC extern inline
36 function.)
b0cdf642 37
1d416bd7 38 - varpool_finalize_variable
b0cdf642 39
7bd28bba 40 This function has same behavior as the above but is used for static
b0cdf642 41 variables.
42
43 - cgraph_finalize_compilation_unit
44
b326746d 45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
b0cdf642 47
48 In the unit-at-a-time the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
b326746d 52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
b0cdf642 54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
b0cdf642 62 - cgraph_mark_needed_node
1d416bd7 63 - varpool_mark_needed_node
b0cdf642 64
b326746d 65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
ccd2f3d1 69 used by C++ frontend to explicitly mark the keyed methods.
b0cdf642 70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
b0cdf642 80 We implement two compilation modes.
81
82 - unit-at-a-time: In this mode analyzing of all functions is deferred
83 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
84
85 In cgraph_finalize_compilation_unit the reachable functions are
86 analyzed. During analysis the call-graph edges from reachable
87 functions are constructed and their destinations are marked as
88 reachable. References to functions and variables are discovered too
89 and variables found to be needed output to the assembly file. Via
90 mark_referenced call in assemble_variable functions referenced by
91 static variables are noticed too.
92
ca67a72b 93 The intra-procedural information is produced and its existence
b0cdf642 94 indicated by global_info_ready. Once this flag is set it is impossible
95 to change function from !reachable to reachable and thus
96 assemble_variable no longer call mark_referenced.
97
98 Finally the call-graph is topologically sorted and all reachable functions
99 that has not been completely inlined or are not external are output.
100
101 ??? It is possible that reference to function or variable is optimized
102 out. We can not deal with this nicely because topological order is not
103 suitable for it. For tree-ssa we may consider another pass doing
104 optimization and re-discovering reachable functions.
105
106 ??? Reorganize code so variables are output very last and only if they
107 really has been referenced by produced code, so we catch more cases
108 where reference has been optimized out.
109
110 - non-unit-at-a-time
111
112 All functions are variables are output as early as possible to conserve
113 memory consumption. This may or may not result in less memory used but
114 it is still needed for some legacy code that rely on particular ordering
115 of things output from the compiler.
116
117 Varpool data structures are not used and variables are output directly.
118
119 Functions are output early using call of
120 cgraph_assemble_pending_function from cgraph_finalize_function. The
121 decision on whether function is needed is made more conservative so
f0b5f617 122 uninlinable static functions are needed too. During the call-graph
b0cdf642 123 construction the edge destinations are not marked as reachable and it
f0b5f617 124 is completely relied upon assemble_variable to mark them. */
121f3051 125
acc70efa 126
ae01b312 127#include "config.h"
128#include "system.h"
129#include "coretypes.h"
130#include "tm.h"
131#include "tree.h"
b5530559 132#include "rtl.h"
acc70efa 133#include "tree-flow.h"
ae01b312 134#include "tree-inline.h"
135#include "langhooks.h"
c6224531 136#include "pointer-set.h"
ae01b312 137#include "toplev.h"
138#include "flags.h"
139#include "ggc.h"
140#include "debug.h"
141#include "target.h"
142#include "cgraph.h"
80a85d8a 143#include "diagnostic.h"
f79b6507 144#include "timevar.h"
d7c6d889 145#include "params.h"
146#include "fibheap.h"
147#include "c-common.h"
611e5405 148#include "intl.h"
b69eb0ff 149#include "function.h"
b5d36404 150#include "ipa-prop.h"
acc70efa 151#include "tree-gimple.h"
f1e2a033 152#include "tree-pass.h"
c1dcd13c 153#include "output.h"
d7c6d889 154
a6868229 155static void cgraph_expand_all_functions (void);
d9d9733a 156static void cgraph_mark_functions_to_output (void);
157static void cgraph_expand_function (struct cgraph_node *);
f788fff2 158static void cgraph_output_pending_asms (void);
25bb88de 159
121f3051 160static FILE *cgraph_dump_file;
161
2c56f72e 162/* A vector of FUNCTION_DECLs declared as static constructors. */
163static GTY (()) VEC(tree, gc) *static_ctors;
164/* A vector of FUNCTION_DECLs declared as static destructors. */
165static GTY (()) VEC(tree, gc) *static_dtors;
a861fe52 166
167/* When target does not have ctors and dtors, we call all constructor
310d2511 168 and destructor by special initialization/destruction function
a861fe52 169 recognized by collect2.
170
171 When we are going to build this function, collect all constructors and
172 destructors and turn them into normal functions. */
173
174static void
175record_cdtor_fn (tree fndecl)
176{
2de29097 177 struct cgraph_node *node;
178 if (targetm.have_ctors_dtors
179 || (!DECL_STATIC_CONSTRUCTOR (fndecl)
180 && !DECL_STATIC_DESTRUCTOR (fndecl)))
a861fe52 181 return;
182
183 if (DECL_STATIC_CONSTRUCTOR (fndecl))
184 {
2c56f72e 185 VEC_safe_push (tree, gc, static_ctors, fndecl);
a861fe52 186 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
a861fe52 187 }
188 if (DECL_STATIC_DESTRUCTOR (fndecl))
189 {
2c56f72e 190 VEC_safe_push (tree, gc, static_dtors, fndecl);
a861fe52 191 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
a861fe52 192 }
2de29097 193 DECL_INLINE (fndecl) = 1;
194 node = cgraph_node (fndecl);
195 node->local.disregard_inline_limits = 1;
196 cgraph_mark_reachable_node (node);
a861fe52 197}
198
2c56f72e 199/* Define global constructors/destructor functions for the CDTORS, of
200 which they are LEN. The CDTORS are sorted by initialization
201 priority. If CTOR_P is true, these are constructors; otherwise,
202 they are destructors. */
203
a861fe52 204static void
2c56f72e 205build_cdtor (bool ctor_p, tree *cdtors, size_t len)
a861fe52 206{
2c56f72e 207 size_t i;
a861fe52 208
2c56f72e 209 i = 0;
210 while (i < len)
211 {
212 tree body;
213 tree fn;
214 priority_type priority;
215
216 priority = 0;
217 body = NULL_TREE;
218 /* Find the next batch of constructors/destructors with the same
219 initialization priority. */
220 do
221 {
222 priority_type p;
223 fn = cdtors[i];
224 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
225 if (!body)
226 priority = p;
227 else if (p != priority)
228 break;
229 append_to_statement_list (build_function_call_expr (fn, 0),
230 &body);
231 ++i;
232 }
233 while (i < len);
234 gcc_assert (body != NULL_TREE);
235 /* Generate a function to call all the function of like
236 priority. */
237 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
238 }
239}
240
241/* Comparison function for qsort. P1 and P2 are actually of type
242 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
243 used to determine the sort order. */
a861fe52 244
2c56f72e 245static int
246compare_ctor (const void *p1, const void *p2)
247{
248 tree f1;
249 tree f2;
250 int priority1;
251 int priority2;
252
253 f1 = *(const tree *)p1;
254 f2 = *(const tree *)p2;
255 priority1 = DECL_INIT_PRIORITY (f1);
256 priority2 = DECL_INIT_PRIORITY (f2);
257
258 if (priority1 < priority2)
259 return -1;
260 else if (priority1 > priority2)
261 return 1;
262 else
263 /* Ensure a stable sort. */
264 return (const tree *)p1 - (const tree *)p2;
265}
266
267/* Comparison function for qsort. P1 and P2 are actually of type
268 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
269 used to determine the sort order. */
a861fe52 270
2c56f72e 271static int
272compare_dtor (const void *p1, const void *p2)
273{
274 tree f1;
275 tree f2;
276 int priority1;
277 int priority2;
278
279 f1 = *(const tree *)p1;
280 f2 = *(const tree *)p2;
281 priority1 = DECL_FINI_PRIORITY (f1);
282 priority2 = DECL_FINI_PRIORITY (f2);
283
284 if (priority1 < priority2)
285 return -1;
286 else if (priority1 > priority2)
287 return 1;
288 else
289 /* Ensure a stable sort. */
290 return (const tree *)p1 - (const tree *)p2;
a861fe52 291}
292
293/* Generate functions to call static constructors and destructors
294 for targets that do not support .ctors/.dtors sections. These
295 functions have magic names which are detected by collect2. */
296
297static void
298cgraph_build_cdtor_fns (void)
299{
2c56f72e 300 if (!VEC_empty (tree, static_ctors))
a861fe52 301 {
2c56f72e 302 gcc_assert (!targetm.have_ctors_dtors);
303 qsort (VEC_address (tree, static_ctors),
304 VEC_length (tree, static_ctors),
305 sizeof (tree),
306 compare_ctor);
307 build_cdtor (/*ctor_p=*/true,
308 VEC_address (tree, static_ctors),
309 VEC_length (tree, static_ctors));
310 VEC_truncate (tree, static_ctors, 0);
a861fe52 311 }
2c56f72e 312
313 if (!VEC_empty (tree, static_dtors))
a861fe52 314 {
2c56f72e 315 gcc_assert (!targetm.have_ctors_dtors);
316 qsort (VEC_address (tree, static_dtors),
317 VEC_length (tree, static_dtors),
318 sizeof (tree),
319 compare_dtor);
320 build_cdtor (/*ctor_p=*/false,
321 VEC_address (tree, static_dtors),
322 VEC_length (tree, static_dtors));
323 VEC_truncate (tree, static_dtors, 0);
a861fe52 324 }
325}
326
2c0b522d 327/* Determine if function DECL is needed. That is, visible to something
328 either outside this translation unit, something magic in the system
f0b5f617 329 configury, or (if not doing unit-at-a-time) to something we haven't
2c0b522d 330 seen yet. */
331
332static bool
333decide_is_function_needed (struct cgraph_node *node, tree decl)
334{
9d95b2b0 335 tree origin;
62eec3b4 336 if (MAIN_NAME_P (DECL_NAME (decl))
337 && TREE_PUBLIC (decl))
338 {
339 node->local.externally_visible = true;
340 return true;
341 }
4ee9c684 342
3f82b628 343 /* If the user told us it is used, then it must be so. */
05806473 344 if (node->local.externally_visible)
345 return true;
346
347 if (!flag_unit_at_a_time && lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
3f82b628 348 return true;
349
350 /* ??? If the assembler name is set by hand, it is possible to assemble
351 the name later after finalizing the function and the fact is noticed
352 in assemble_name then. This is arguably a bug. */
353 if (DECL_ASSEMBLER_NAME_SET_P (decl)
354 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
355 return true;
356
55680bef 357 /* With -fkeep-inline-functions we are keeping all inline functions except
358 for extern inline ones. */
359 if (flag_keep_inline_functions
360 && DECL_DECLARED_INLINE_P (decl)
316ef6d8 361 && !DECL_EXTERNAL (decl)
362 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
55680bef 363 return true;
364
2c0b522d 365 /* If we decided it was needed before, but at the time we didn't have
366 the body of the function available, then it's still needed. We have
367 to go back and re-check its dependencies now. */
368 if (node->needed)
369 return true;
370
371 /* Externally visible functions must be output. The exception is
a0c938f0 372 COMDAT functions that must be output only when they are needed.
8baa9d15 373
374 When not optimizing, also output the static functions. (see
95da6220 375 PR24561), but don't do so for always_inline functions, functions
d3d410e1 376 declared inline and nested functions. These was optimized out
377 in the original implementation and it is unclear whether we want
554f2707 378 to change the behavior here. */
bba7ddf8 379 if (((TREE_PUBLIC (decl)
d3d410e1 380 || (!optimize && !node->local.disregard_inline_limits
381 && !DECL_DECLARED_INLINE_P (decl)
382 && !node->origin))
bba7ddf8 383 && !flag_whole_program)
62eec3b4 384 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
2c0b522d 385 return true;
386
387 /* Constructors and destructors are reachable from the runtime by
388 some mechanism. */
389 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
390 return true;
391
2c0b522d 392 if (flag_unit_at_a_time)
393 return false;
394
395 /* If not doing unit at a time, then we'll only defer this function
396 if its marked for inlining. Otherwise we want to emit it now. */
397
398 /* "extern inline" functions are never output locally. */
399 if (DECL_EXTERNAL (decl))
400 return false;
4ee9c684 401 /* Nested functions of extern inline function shall not be emit unless
402 we inlined the origin. */
9d95b2b0 403 for (origin = decl_function_context (decl); origin;
404 origin = decl_function_context (origin))
405 if (DECL_EXTERNAL (origin))
4ee9c684 406 return false;
f024691d 407 /* We want to emit COMDAT functions only when absolutely necessary. */
c08871a9 408 if (DECL_COMDAT (decl))
2c0b522d 409 return false;
410 if (!DECL_INLINE (decl)
411 || (!node->local.disregard_inline_limits
412 /* When declared inline, defer even the uninlinable functions.
e4200070 413 This allows them to be eliminated when unused. */
a0c938f0 414 && !DECL_DECLARED_INLINE_P (decl)
b30512dd 415 && (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
2c0b522d 416 return true;
417
418 return false;
419}
420
bdc40eb8 421/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
523c1122 422 functions into callgraph in a way so they look like ordinary reachable
423 functions inserted into callgraph already at construction time. */
424
425bool
426cgraph_process_new_functions (void)
427{
428 bool output = false;
429 tree fndecl;
430 struct cgraph_node *node;
431
432 /* Note that this queue may grow as its being processed, as the new
433 functions may generate new ones. */
434 while (cgraph_new_nodes)
435 {
436 node = cgraph_new_nodes;
437 fndecl = node->decl;
438 cgraph_new_nodes = cgraph_new_nodes->next_needed;
439 switch (cgraph_state)
440 {
441 case CGRAPH_STATE_CONSTRUCTION:
442 /* At construction time we just need to finalize function and move
443 it into reachable functions list. */
444
445 node->next_needed = NULL;
446 cgraph_finalize_function (fndecl, false);
447 cgraph_mark_reachable_node (node);
448 output = true;
449 break;
450
451 case CGRAPH_STATE_IPA:
f517b36e 452 case CGRAPH_STATE_IPA_SSA:
523c1122 453 /* When IPA optimization already started, do all essential
454 transformations that has been already performed on the whole
455 cgraph but not on this function. */
456
457 tree_register_cfg_hooks ();
458 if (!node->analyzed)
459 cgraph_analyze_function (node);
460 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
461 current_function_decl = fndecl;
9c1bff7a 462 compute_inline_parameters (node);
f517b36e 463 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
464 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
465 /* When not optimizing, be sure we run early local passes anyway
466 to expand OMP. */
467 || !optimize)
20099e35 468 execute_pass_list (pass_early_local_passes.pass.sub);
523c1122 469 free_dominance_info (CDI_POST_DOMINATORS);
470 free_dominance_info (CDI_DOMINATORS);
471 pop_cfun ();
472 current_function_decl = NULL;
473 break;
474
475 case CGRAPH_STATE_EXPANSION:
476 /* Functions created during expansion shall be compiled
477 directly. */
478 node->output = 0;
479 cgraph_expand_function (node);
480 break;
481
482 default:
483 gcc_unreachable ();
484 break;
485 }
486 }
487 return output;
488}
489
c08871a9 490/* When not doing unit-at-a-time, output all functions enqueued.
491 Return true when such a functions were found. */
050e11c9 492
b326746d 493static bool
c08871a9 494cgraph_assemble_pending_functions (void)
495{
496 bool output = false;
497
27c9bdfe 498 if (flag_unit_at_a_time || errorcount || sorrycount)
c08871a9 499 return false;
500
f788fff2 501 cgraph_output_pending_asms ();
502
c08871a9 503 while (cgraph_nodes_queue)
504 {
505 struct cgraph_node *n = cgraph_nodes_queue;
506
507 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 508 n->next_needed = NULL;
1d09f0e6 509 if (!n->global.inlined_to
510 && !n->alias
511 && !DECL_EXTERNAL (n->decl))
050e11c9 512 {
513 cgraph_expand_function (n);
514 output = true;
515 }
523c1122 516 output |= cgraph_process_new_functions ();
773c5ba7 517 }
518
c08871a9 519 return output;
520}
773c5ba7 521
522
9b8fb23a 523/* As an GCC extension we allow redefinition of the function. The
524 semantics when both copies of bodies differ is not well defined.
525 We replace the old body with new body so in unit at a time mode
526 we always use new body, while in normal mode we may end up with
527 old body inlined into some functions and new body expanded and
528 inlined in others.
529
530 ??? It may make more sense to use one body for inlining and other
531 body for expanding the function but this is difficult to do. */
532
533static void
534cgraph_reset_node (struct cgraph_node *node)
535{
536 /* If node->output is set, then this is a unit-at-a-time compilation
537 and we have already begun whole-unit analysis. This is *not*
538 testing for whether we've already emitted the function. That
a0c938f0 539 case can be sort-of legitimately seen with real function
9b8fb23a 540 redefinition errors. I would argue that the front end should
541 never present us with such a case, but don't enforce that for now. */
542 gcc_assert (!node->output);
543
544 /* Reset our data structures so we can analyze the function again. */
545 memset (&node->local, 0, sizeof (node->local));
546 memset (&node->global, 0, sizeof (node->global));
547 memset (&node->rtl, 0, sizeof (node->rtl));
548 node->analyzed = false;
549 node->local.redefined_extern_inline = true;
550 node->local.finalized = false;
551
552 if (!flag_unit_at_a_time)
553 {
f4ec5ce1 554 struct cgraph_node *n, *next;
9b8fb23a 555
f4ec5ce1 556 for (n = cgraph_nodes; n; n = next)
557 {
558 next = n->next;
559 if (n->global.inlined_to == node)
560 cgraph_remove_node (n);
561 }
9b8fb23a 562 }
563
564 cgraph_node_remove_callees (node);
565
566 /* We may need to re-queue the node for assembling in case
46beef9a 567 we already proceeded it and ignored as not needed or got
568 a re-declaration in IMA mode. */
569 if (node->reachable)
9b8fb23a 570 {
571 struct cgraph_node *n;
572
573 for (n = cgraph_nodes_queue; n; n = n->next_needed)
574 if (n == node)
575 break;
576 if (!n)
577 node->reachable = 0;
578 }
579}
c08871a9 580
1e8e9920 581static void
582cgraph_lower_function (struct cgraph_node *node)
583{
584 if (node->lowered)
585 return;
586 tree_lowering_passes (node->decl);
587 node->lowered = true;
588}
589
28df663b 590/* DECL has been parsed. Take it, queue it, compile it at the whim of the
591 logic in effect. If NESTED is true, then our caller cannot stand to have
592 the garbage collector run at the moment. We would need to either create
593 a new GC context, or just not compile right now. */
ae01b312 594
595void
28df663b 596cgraph_finalize_function (tree decl, bool nested)
ae01b312 597{
598 struct cgraph_node *node = cgraph_node (decl);
599
c08871a9 600 if (node->local.finalized)
9b8fb23a 601 cgraph_reset_node (node);
28df663b 602
167b550b 603 node->pid = cgraph_max_pid ++;
c08871a9 604 notice_global_symbol (decl);
79bb87b4 605 node->local.finalized = true;
e27482aa 606 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
a861fe52 607 record_cdtor_fn (node->decl);
9d95b2b0 608 if (node->nested)
609 lower_nested_functions (decl);
610 gcc_assert (!node->nested);
ae01b312 611
2c0b522d 612 /* If not unit at a time, then we need to create the call graph
613 now, so that called functions can be queued and emitted now. */
2ff66ee0 614 if (!flag_unit_at_a_time)
09a2e412 615 cgraph_analyze_function (node);
2ff66ee0 616
2c0b522d 617 if (decide_is_function_needed (node, decl))
618 cgraph_mark_needed_node (node);
619
ecda6e51 620 /* Since we reclaim unreachable nodes at the end of every language
3f82b628 621 level unit, we need to be conservative about possible entry points
622 there. */
62eec3b4 623 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
3f82b628 624 cgraph_mark_reachable_node (node);
625
28df663b 626 /* If not unit at a time, go ahead and emit everything we've found
627 to be reachable at this time. */
628 if (!nested)
dc721f36 629 {
630 if (!cgraph_assemble_pending_functions ())
631 ggc_collect ();
632 }
3d7bfc56 633
2c0b522d 634 /* If we've not yet emitted decl, tell the debug info about it. */
28df663b 635 if (!TREE_ASM_WRITTEN (decl))
2c0b522d 636 (*debug_hooks->deferred_inline_function) (decl);
4e8871a0 637
b69eb0ff 638 /* Possibly warn about unused parameters. */
639 if (warn_unused_parameter)
640 do_warn_unused_parameter (decl);
ae01b312 641}
642
0da03d11 643/* C99 extern inline keywords allow changing of declaration after function
644 has been finalized. We need to re-decide if we want to mark the function as
645 needed then. */
646
647void
648cgraph_mark_if_needed (tree decl)
649{
650 struct cgraph_node *node = cgraph_node (decl);
651 if (node->local.finalized && decide_is_function_needed (node, decl))
652 cgraph_mark_needed_node (node);
653}
654
b0cdf642 655/* Verify cgraph nodes of given cgraph node. */
656void
657verify_cgraph_node (struct cgraph_node *node)
658{
659 struct cgraph_edge *e;
660 struct cgraph_node *main_clone;
e27482aa 661 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
117ef3d7 662 struct function *saved_cfun = cfun;
e27482aa 663 basic_block this_block;
664 block_stmt_iterator bsi;
9bfec7c2 665 bool error_found = false;
b0cdf642 666
bd09cd3e 667 if (errorcount || sorrycount)
668 return;
669
b0cdf642 670 timevar_push (TV_CGRAPH_VERIFY);
117ef3d7 671 /* debug_generic_stmt needs correct cfun */
672 set_cfun (this_cfun);
b0cdf642 673 for (e = node->callees; e; e = e->next_callee)
674 if (e->aux)
675 {
0a81f5a0 676 error ("aux field set for edge %s->%s",
b0cdf642 677 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
678 error_found = true;
679 }
a2cb9b3b 680 if (node->count < 0)
681 {
682 error ("Execution count is negative");
683 error_found = true;
684 }
b0cdf642 685 for (e = node->callers; e; e = e->next_caller)
686 {
a2cb9b3b 687 if (e->count < 0)
688 {
689 error ("caller edge count is negative");
690 error_found = true;
691 }
4ae20857 692 if (e->frequency < 0)
693 {
694 error ("caller edge frequency is negative");
695 error_found = true;
696 }
697 if (e->frequency > CGRAPH_FREQ_MAX)
698 {
699 error ("caller edge frequency is too large");
700 error_found = true;
701 }
b0cdf642 702 if (!e->inline_failed)
703 {
704 if (node->global.inlined_to
705 != (e->caller->global.inlined_to
706 ? e->caller->global.inlined_to : e->caller))
707 {
0a81f5a0 708 error ("inlined_to pointer is wrong");
b0cdf642 709 error_found = true;
710 }
711 if (node->callers->next_caller)
712 {
0a81f5a0 713 error ("multiple inline callers");
b0cdf642 714 error_found = true;
715 }
716 }
717 else
718 if (node->global.inlined_to)
719 {
0a81f5a0 720 error ("inlined_to pointer set for noninline callers");
b0cdf642 721 error_found = true;
722 }
723 }
724 if (!node->callers && node->global.inlined_to)
725 {
5cd75817 726 error ("inlined_to pointer is set but no predecessors found");
b0cdf642 727 error_found = true;
728 }
729 if (node->global.inlined_to == node)
730 {
0a81f5a0 731 error ("inlined_to pointer refers to itself");
b0cdf642 732 error_found = true;
733 }
734
735 for (main_clone = cgraph_node (node->decl); main_clone;
736 main_clone = main_clone->next_clone)
737 if (main_clone == node)
738 break;
0f6439b9 739 if (!cgraph_node (node->decl))
b0cdf642 740 {
0f6439b9 741 error ("node not found in cgraph_hash");
b0cdf642 742 error_found = true;
743 }
a0c938f0 744
b0cdf642 745 if (node->analyzed
746 && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
747 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
748 {
e27482aa 749 if (this_cfun->cfg)
750 {
751 /* The nodes we're interested in are never shared, so walk
752 the tree ignoring duplicates. */
e7c352d1 753 struct pointer_set_t *visited_nodes = pointer_set_create ();
e27482aa 754 /* Reach the trees by walking over the CFG, and note the
755 enclosing basic-blocks in the call edges. */
756 FOR_EACH_BB_FN (this_block, this_cfun)
757 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
9bfec7c2 758 {
759 tree stmt = bsi_stmt (bsi);
760 tree call = get_call_expr_in (stmt);
761 tree decl;
762 if (call && (decl = get_callee_fndecl (call)))
763 {
764 struct cgraph_edge *e = cgraph_edge (node, stmt);
765 if (e)
766 {
767 if (e->aux)
768 {
0a81f5a0 769 error ("shared call_stmt:");
9bfec7c2 770 debug_generic_stmt (stmt);
771 error_found = true;
772 }
469679ab 773 if (e->callee->decl != cgraph_node (decl)->decl
774 && e->inline_failed)
9bfec7c2 775 {
0a81f5a0 776 error ("edge points to wrong declaration:");
9bfec7c2 777 debug_tree (e->callee->decl);
778 fprintf (stderr," Instead of:");
779 debug_tree (decl);
780 }
781 e->aux = (void *)1;
782 }
783 else
784 {
0a81f5a0 785 error ("missing callgraph edge for call stmt:");
9bfec7c2 786 debug_generic_stmt (stmt);
787 error_found = true;
788 }
789 }
790 }
e27482aa 791 pointer_set_destroy (visited_nodes);
e27482aa 792 }
793 else
794 /* No CFG available?! */
795 gcc_unreachable ();
796
b0cdf642 797 for (e = node->callees; e; e = e->next_callee)
798 {
799 if (!e->aux)
800 {
0a81f5a0 801 error ("edge %s->%s has no corresponding call_stmt",
b0cdf642 802 cgraph_node_name (e->caller),
803 cgraph_node_name (e->callee));
9bfec7c2 804 debug_generic_stmt (e->call_stmt);
b0cdf642 805 error_found = true;
806 }
807 e->aux = 0;
808 }
809 }
810 if (error_found)
811 {
812 dump_cgraph_node (stderr, node);
0a81f5a0 813 internal_error ("verify_cgraph_node failed");
b0cdf642 814 }
117ef3d7 815 set_cfun (saved_cfun);
b0cdf642 816 timevar_pop (TV_CGRAPH_VERIFY);
817}
818
819/* Verify whole cgraph structure. */
820void
821verify_cgraph (void)
822{
823 struct cgraph_node *node;
824
8ec2a798 825 if (sorrycount || errorcount)
826 return;
827
b0cdf642 828 for (node = cgraph_nodes; node; node = node->next)
829 verify_cgraph_node (node);
830}
831
56af936e 832/* Output all asm statements we have stored up to be output. */
833
834static void
835cgraph_output_pending_asms (void)
836{
837 struct cgraph_asm_node *can;
838
839 if (errorcount || sorrycount)
840 return;
841
842 for (can = cgraph_asm_nodes; can; can = can->next)
843 assemble_asm (can->asm_str);
844 cgraph_asm_nodes = NULL;
845}
846
0785e435 847/* Analyze the function scheduled to be output. */
1e8e9920 848void
0785e435 849cgraph_analyze_function (struct cgraph_node *node)
850{
851 tree decl = node->decl;
852
ec1e35b2 853 current_function_decl = decl;
e27482aa 854 push_cfun (DECL_STRUCT_FUNCTION (decl));
855 cgraph_lower_function (node);
6e8d6e86 856 node->analyzed = true;
0785e435 857
27c9bdfe 858 if (!flag_unit_at_a_time && !sorrycount && !errorcount)
f517b36e 859 {
860 bitmap_obstack_initialize (NULL);
861 tree_register_cfg_hooks ();
20099e35 862 execute_pass_list (pass_early_local_passes.pass.sub);
f517b36e 863 free_dominance_info (CDI_POST_DOMINATORS);
864 free_dominance_info (CDI_DOMINATORS);
865 bitmap_obstack_release (NULL);
866 }
0785e435 867
e27482aa 868 pop_cfun ();
c08871a9 869 current_function_decl = NULL;
0785e435 870}
871
05806473 872/* Look for externally_visible and used attributes and mark cgraph nodes
873 accordingly.
874
875 We cannot mark the nodes at the point the attributes are processed (in
876 handle_*_attribute) because the copy of the declarations available at that
877 point may not be canonical. For example, in:
878
879 void f();
880 void f() __attribute__((used));
881
882 the declaration we see in handle_used_attribute will be the second
883 declaration -- but the front end will subsequently merge that declaration
884 with the original declaration and discard the second declaration.
885
886 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
887
888 void f() {}
889 void f() __attribute__((externally_visible));
890
891 is valid.
892
893 So, we walk the nodes at the end of the translation unit, applying the
894 attributes at that point. */
895
896static void
897process_function_and_variable_attributes (struct cgraph_node *first,
1d416bd7 898 struct varpool_node *first_var)
05806473 899{
900 struct cgraph_node *node;
1d416bd7 901 struct varpool_node *vnode;
05806473 902
903 for (node = cgraph_nodes; node != first; node = node->next)
904 {
905 tree decl = node->decl;
906 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
907 {
908 mark_decl_referenced (decl);
909 if (node->local.finalized)
910 cgraph_mark_needed_node (node);
911 }
912 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
913 {
ba12ea31 914 if (! TREE_PUBLIC (node->decl))
915 warning (OPT_Wattributes,
916 "%J%<externally_visible%> attribute have effect only on public objects",
917 node->decl);
918 else
919 {
920 if (node->local.finalized)
921 cgraph_mark_needed_node (node);
922 node->local.externally_visible = true;
923 }
05806473 924 }
925 }
1d416bd7 926 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
05806473 927 {
928 tree decl = vnode->decl;
929 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
930 {
931 mark_decl_referenced (decl);
932 if (vnode->finalized)
1d416bd7 933 varpool_mark_needed_node (vnode);
05806473 934 }
935 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
936 {
ba12ea31 937 if (! TREE_PUBLIC (vnode->decl))
938 warning (OPT_Wattributes,
939 "%J%<externally_visible%> attribute have effect only on public objects",
940 vnode->decl);
941 else
942 {
943 if (vnode->finalized)
1d416bd7 944 varpool_mark_needed_node (vnode);
ba12ea31 945 vnode->externally_visible = true;
946 }
05806473 947 }
948 }
949}
950
aeeb194b 951/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
952 each reachable functions) and build cgraph.
953 The function can be called multiple times after inserting new nodes
0d424440 954 into beginning of queue. Just the new part of queue is re-scanned then. */
ae01b312 955
aeeb194b 956static void
957cgraph_analyze_functions (void)
ae01b312 958{
c1dcd13c 959 /* Keep track of already processed nodes when called multiple times for
06b27565 960 intermodule optimization. */
c1dcd13c 961 static struct cgraph_node *first_analyzed;
c17d0de1 962 struct cgraph_node *first_processed = first_analyzed;
1d416bd7 963 static struct varpool_node *first_analyzed_var;
aeeb194b 964 struct cgraph_node *node, *next;
ae01b312 965
c17d0de1 966 process_function_and_variable_attributes (first_processed,
967 first_analyzed_var);
968 first_processed = cgraph_nodes;
1d416bd7 969 first_analyzed_var = varpool_nodes;
970 varpool_analyze_pending_decls ();
f79b6507 971 if (cgraph_dump_file)
ae01b312 972 {
e4200070 973 fprintf (cgraph_dump_file, "Initial entry points:");
c1dcd13c 974 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
3d7bfc56 975 if (node->needed && DECL_SAVED_TREE (node->decl))
f79b6507 976 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
977 fprintf (cgraph_dump_file, "\n");
ae01b312 978 }
aeeb194b 979 cgraph_process_new_functions ();
ae01b312 980
e6d2b2d8 981 /* Propagate reachability flag and lower representation of all reachable
982 functions. In the future, lowering will introduce new functions and
983 new entry points on the way (by template instantiation and virtual
984 method table generation for instance). */
3d7bfc56 985 while (cgraph_nodes_queue)
ae01b312 986 {
0785e435 987 struct cgraph_edge *edge;
3d7bfc56 988 tree decl = cgraph_nodes_queue->decl;
989
990 node = cgraph_nodes_queue;
d87976fb 991 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 992 node->next_needed = NULL;
ae01b312 993
638531ad 994 /* ??? It is possible to create extern inline function and later using
bbd5cba2 995 weak alias attribute to kill its body. See
638531ad 996 gcc.c-torture/compile/20011119-1.c */
997 if (!DECL_SAVED_TREE (decl))
9b8fb23a 998 {
999 cgraph_reset_node (node);
1000 continue;
1001 }
638531ad 1002
cc636d56 1003 gcc_assert (!node->analyzed && node->reachable);
1004 gcc_assert (DECL_SAVED_TREE (decl));
ae01b312 1005
0785e435 1006 cgraph_analyze_function (node);
2c0b522d 1007
ae01b312 1008 for (edge = node->callees; edge; edge = edge->next_callee)
0785e435 1009 if (!edge->callee->reachable)
2c0b522d 1010 cgraph_mark_reachable_node (edge->callee);
1011
c17d0de1 1012 /* We finalize local static variables during constructing callgraph
1013 edges. Process their attributes too. */
1014 process_function_and_variable_attributes (first_processed,
1015 first_analyzed_var);
1016 first_processed = cgraph_nodes;
1d416bd7 1017 first_analyzed_var = varpool_nodes;
1018 varpool_analyze_pending_decls ();
aeeb194b 1019 cgraph_process_new_functions ();
ae01b312 1020 }
2c0b522d 1021
aa5e06c7 1022 /* Collect entry points to the unit. */
f79b6507 1023 if (cgraph_dump_file)
3d7bfc56 1024 {
e4200070 1025 fprintf (cgraph_dump_file, "Unit entry points:");
c1dcd13c 1026 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
3d7bfc56 1027 if (node->needed && DECL_SAVED_TREE (node->decl))
f79b6507 1028 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
e4200070 1029 fprintf (cgraph_dump_file, "\n\nInitial ");
0785e435 1030 dump_cgraph (cgraph_dump_file);
3d7bfc56 1031 }
e6d2b2d8 1032
f79b6507 1033 if (cgraph_dump_file)
1034 fprintf (cgraph_dump_file, "\nReclaiming functions:");
ae01b312 1035
f4ec5ce1 1036 for (node = cgraph_nodes; node != first_analyzed; node = next)
ae01b312 1037 {
1038 tree decl = node->decl;
f4ec5ce1 1039 next = node->next;
ae01b312 1040
9b8fb23a 1041 if (node->local.finalized && !DECL_SAVED_TREE (decl))
a0c938f0 1042 cgraph_reset_node (node);
9b8fb23a 1043
ae01b312 1044 if (!node->reachable && DECL_SAVED_TREE (decl))
1045 {
f79b6507 1046 if (cgraph_dump_file)
1047 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
b0cdf642 1048 cgraph_remove_node (node);
9b8fb23a 1049 continue;
ae01b312 1050 }
bc5cab3b 1051 else
1052 node->next_needed = NULL;
9b8fb23a 1053 gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
1054 gcc_assert (node->analyzed == node->local.finalized);
ae01b312 1055 }
f79b6507 1056 if (cgraph_dump_file)
e4200070 1057 {
1058 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1059 dump_cgraph (cgraph_dump_file);
1060 }
c1dcd13c 1061 first_analyzed = cgraph_nodes;
ae01b312 1062 ggc_collect ();
aeeb194b 1063}
1064
1065/* Analyze the whole compilation unit once it is parsed completely. */
1066
1067void
1068cgraph_finalize_compilation_unit (void)
1069{
1070 if (errorcount || sorrycount)
1071 return;
1072
1073 finish_aliases_1 ();
1074
1075 if (!flag_unit_at_a_time)
1076 {
1077 cgraph_output_pending_asms ();
1078 cgraph_assemble_pending_functions ();
1079 varpool_output_debug_info ();
1080 return;
1081 }
1082
1083 if (!quiet_flag)
1084 {
1085 fprintf (stderr, "\nAnalyzing compilation unit\n");
1086 fflush (stderr);
1087 }
1088
1089 timevar_push (TV_CGRAPH);
1090 cgraph_analyze_functions ();
f79b6507 1091 timevar_pop (TV_CGRAPH);
ae01b312 1092}
ae01b312 1093/* Figure out what functions we want to assemble. */
1094
1095static void
d9d9733a 1096cgraph_mark_functions_to_output (void)
ae01b312 1097{
1098 struct cgraph_node *node;
1099
ae01b312 1100 for (node = cgraph_nodes; node; node = node->next)
1101 {
1102 tree decl = node->decl;
d7c6d889 1103 struct cgraph_edge *e;
a0c938f0 1104
cc636d56 1105 gcc_assert (!node->output);
d7c6d889 1106
1107 for (e = node->callers; e; e = e->next_caller)
611e5405 1108 if (e->inline_failed)
d7c6d889 1109 break;
ae01b312 1110
e6d2b2d8 1111 /* We need to output all local functions that are used and not
1112 always inlined, as well as those that are reachable from
1113 outside the current compilation unit. */
ae01b312 1114 if (DECL_SAVED_TREE (decl)
b0cdf642 1115 && !node->global.inlined_to
ae01b312 1116 && (node->needed
d7c6d889 1117 || (e && node->reachable))
4ee9c684 1118 && !TREE_ASM_WRITTEN (decl)
ae01b312 1119 && !DECL_EXTERNAL (decl))
1120 node->output = 1;
cc636d56 1121 else
9cee7c3f 1122 {
1123 /* We should've reclaimed all functions that are not needed. */
1124#ifdef ENABLE_CHECKING
1125 if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
1126 && !DECL_EXTERNAL (decl))
1127 {
1128 dump_cgraph_node (stderr, node);
1129 internal_error ("failed to reclaim unneeded function");
1130 }
1131#endif
1132 gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
1133 || DECL_EXTERNAL (decl));
1134
1135 }
a0c938f0 1136
961e3b13 1137 }
1138}
1139
ae01b312 1140/* Expand function specified by NODE. */
e6d2b2d8 1141
ae01b312 1142static void
d9d9733a 1143cgraph_expand_function (struct cgraph_node *node)
ae01b312 1144{
1145 tree decl = node->decl;
1146
b0cdf642 1147 /* We ought to not compile any inline clones. */
cc636d56 1148 gcc_assert (!node->global.inlined_to);
b0cdf642 1149
28df663b 1150 if (flag_unit_at_a_time)
1151 announce_function (decl);
961e3b13 1152
e7c352d1 1153 gcc_assert (node->lowered);
f8deefc1 1154
794da2bb 1155 /* Generate RTL for the body of DECL. */
84e10000 1156 if (lang_hooks.callgraph.emit_associated_thunks)
1157 lang_hooks.callgraph.emit_associated_thunks (decl);
1158 tree_rest_of_compilation (decl);
961e3b13 1159
4ee9c684 1160 /* Make sure that BE didn't give up on compiling. */
1161 /* ??? Can happen with nested function of extern inline. */
c04e3894 1162 gcc_assert (TREE_ASM_WRITTEN (decl));
b0cdf642 1163
ae01b312 1164 current_function_decl = NULL;
c04e3894 1165 if (!cgraph_preserve_function_body_p (decl))
4ee9c684 1166 {
b62f482d 1167 cgraph_release_function_body (node);
7edd21a5 1168 /* Eliminate all call edges. This is important so the call_expr no longer
8ec2a798 1169 points to the dead function body. */
bb4c7a44 1170 cgraph_node_remove_callees (node);
4ee9c684 1171 }
e1be32b8 1172
1173 cgraph_function_flags_ready = true;
ae01b312 1174}
1175
b0cdf642 1176/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
d7c6d889 1177
1178bool
b0cdf642 1179cgraph_inline_p (struct cgraph_edge *e, const char **reason)
d7c6d889 1180{
b0cdf642 1181 *reason = e->inline_failed;
1182 return !e->inline_failed;
d7c6d889 1183}
b0cdf642 1184
acc70efa 1185
acc70efa 1186
d9d9733a 1187/* Expand all functions that must be output.
1188
d7c6d889 1189 Attempt to topologically sort the nodes so function is output when
1190 all called functions are already assembled to allow data to be
91c82c20 1191 propagated across the callgraph. Use a stack to get smaller distance
3927afe0 1192 between a function and its callees (later we may choose to use a more
d7c6d889 1193 sophisticated algorithm for function reordering; we will likely want
1194 to use subsections to make the output functions appear in top-down
1195 order). */
1196
1197static void
a6868229 1198cgraph_expand_all_functions (void)
d7c6d889 1199{
1200 struct cgraph_node *node;
4c36ffe6 1201 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
c04e3894 1202 int order_pos, new_order_pos = 0;
d7c6d889 1203 int i;
1204
d7c6d889 1205 order_pos = cgraph_postorder (order);
cc636d56 1206 gcc_assert (order_pos == cgraph_n_nodes);
d7c6d889 1207
7bd28bba 1208 /* Garbage collector may remove inline clones we eliminate during
b0cdf642 1209 optimization. So we must be sure to not reference them. */
1210 for (i = 0; i < order_pos; i++)
1211 if (order[i]->output)
1212 order[new_order_pos++] = order[i];
1213
1214 for (i = new_order_pos - 1; i >= 0; i--)
d7c6d889 1215 {
1216 node = order[i];
1217 if (node->output)
1218 {
cc636d56 1219 gcc_assert (node->reachable);
d7c6d889 1220 node->output = 0;
1221 cgraph_expand_function (node);
1222 }
1223 }
523c1122 1224 cgraph_process_new_functions ();
773c5ba7 1225
d7c6d889 1226 free (order);
773c5ba7 1227
d7c6d889 1228}
1229
56af936e 1230/* This is used to sort the node types by the cgraph order number. */
1231
1232struct cgraph_order_sort
1233{
1234 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1235 union
1236 {
1237 struct cgraph_node *f;
1d416bd7 1238 struct varpool_node *v;
56af936e 1239 struct cgraph_asm_node *a;
1240 } u;
1241};
1242
1243/* Output all functions, variables, and asm statements in the order
1244 according to their order fields, which is the order in which they
1245 appeared in the file. This implements -fno-toplevel-reorder. In
1246 this mode we may output functions and variables which don't really
1247 need to be output. */
1248
1249static void
1250cgraph_output_in_order (void)
1251{
1252 int max;
1253 size_t size;
1254 struct cgraph_order_sort *nodes;
1255 int i;
1256 struct cgraph_node *pf;
1d416bd7 1257 struct varpool_node *pv;
56af936e 1258 struct cgraph_asm_node *pa;
1259
1260 max = cgraph_order;
1261 size = max * sizeof (struct cgraph_order_sort);
1262 nodes = (struct cgraph_order_sort *) alloca (size);
1263 memset (nodes, 0, size);
1264
1d416bd7 1265 varpool_analyze_pending_decls ();
56af936e 1266
1267 for (pf = cgraph_nodes; pf; pf = pf->next)
1268 {
1269 if (pf->output)
1270 {
1271 i = pf->order;
1272 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1273 nodes[i].kind = ORDER_FUNCTION;
1274 nodes[i].u.f = pf;
1275 }
1276 }
1277
1d416bd7 1278 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
56af936e 1279 {
1280 i = pv->order;
1281 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1282 nodes[i].kind = ORDER_VAR;
1283 nodes[i].u.v = pv;
1284 }
1285
1286 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1287 {
1288 i = pa->order;
1289 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1290 nodes[i].kind = ORDER_ASM;
1291 nodes[i].u.a = pa;
1292 }
56af936e 1293
304e5318 1294 /* In toplevel reorder mode we output all statics; mark them as needed. */
1295 for (i = 0; i < max; ++i)
1296 {
1297 if (nodes[i].kind == ORDER_VAR)
1298 {
1299 varpool_mark_needed_node (nodes[i].u.v);
1300 }
1301 }
1302 varpool_empty_needed_queue ();
1303
56af936e 1304 for (i = 0; i < max; ++i)
1305 {
1306 switch (nodes[i].kind)
1307 {
1308 case ORDER_FUNCTION:
1309 nodes[i].u.f->output = 0;
1310 cgraph_expand_function (nodes[i].u.f);
1311 break;
1312
1313 case ORDER_VAR:
1d416bd7 1314 varpool_assemble_decl (nodes[i].u.v);
56af936e 1315 break;
1316
1317 case ORDER_ASM:
1318 assemble_asm (nodes[i].u.a->asm_str);
1319 break;
1320
1321 case ORDER_UNDEFINED:
1322 break;
1323
1324 default:
1325 gcc_unreachable ();
1326 }
1327 }
4b4ea2db 1328
1329 cgraph_asm_nodes = NULL;
56af936e 1330}
1331
b0cdf642 1332/* Return true when function body of DECL still needs to be kept around
1333 for later re-use. */
1334bool
1335cgraph_preserve_function_body_p (tree decl)
1336{
1337 struct cgraph_node *node;
b0cdf642 1338 if (!cgraph_global_info_ready)
46b2c17d 1339 return (flag_really_no_inline
ebb7d626 1340 ? DECL_DISREGARD_INLINE_LIMITS (decl)
46b2c17d 1341 : DECL_INLINE (decl));
b0cdf642 1342 /* Look if there is any clone around. */
1343 for (node = cgraph_node (decl); node; node = node->next_clone)
1344 if (node->global.inlined_to)
1345 return true;
1346 return false;
1347}
1348
77fce4cd 1349static void
1350ipa_passes (void)
1351{
87d4aa85 1352 set_cfun (NULL);
4b14adf9 1353 current_function_decl = NULL;
77fce4cd 1354 tree_register_cfg_hooks ();
1355 bitmap_obstack_initialize (NULL);
1356 execute_ipa_pass_list (all_ipa_passes);
1357 bitmap_obstack_release (NULL);
1358}
1359
ae01b312 1360/* Perform simple optimizations based on callgraph. */
1361
1362void
d9d9733a 1363cgraph_optimize (void)
ae01b312 1364{
cb2b5570 1365 if (errorcount || sorrycount)
1366 return;
1367
b0cdf642 1368#ifdef ENABLE_CHECKING
1369 verify_cgraph ();
1370#endif
a861fe52 1371
1372 /* Call functions declared with the "constructor" or "destructor"
1373 attribute. */
1374 cgraph_build_cdtor_fns ();
2ff66ee0 1375 if (!flag_unit_at_a_time)
c1dcd13c 1376 {
523c1122 1377 cgraph_assemble_pending_functions ();
1378 cgraph_process_new_functions ();
1379 cgraph_state = CGRAPH_STATE_FINISHED;
56af936e 1380 cgraph_output_pending_asms ();
1d416bd7 1381 varpool_assemble_pending_decls ();
1382 varpool_output_debug_info ();
c1dcd13c 1383 return;
1384 }
e9f08e82 1385
c1dcd13c 1386 /* Frontend may output common variables after the unit has been finalized.
1387 It is safe to deal with them here as they are always zero initialized. */
1d416bd7 1388 varpool_analyze_pending_decls ();
aeeb194b 1389 cgraph_analyze_functions ();
e9f08e82 1390
f79b6507 1391 timevar_push (TV_CGRAPHOPT);
51949610 1392 if (pre_ipa_mem_report)
1393 {
1394 fprintf (stderr, "Memory consumption before IPA\n");
1395 dump_memory_report (false);
1396 }
d7c6d889 1397 if (!quiet_flag)
cd6bca02 1398 fprintf (stderr, "Performing interprocedural optimizations\n");
523c1122 1399 cgraph_state = CGRAPH_STATE_IPA;
c04e3894 1400
be4d0974 1401 /* Don't run the IPA passes if there was any error or sorry messages. */
1402 if (errorcount == 0 && sorrycount == 0)
1403 ipa_passes ();
1404
e1be32b8 1405 /* This pass remove bodies of extern inline functions we never inlined.
1406 Do this later so other IPA passes see what is really going on. */
1407 cgraph_remove_unreachable_nodes (false, dump_file);
80a85d8a 1408 cgraph_global_info_ready = true;
f79b6507 1409 if (cgraph_dump_file)
1410 {
e4200070 1411 fprintf (cgraph_dump_file, "Optimized ");
f79b6507 1412 dump_cgraph (cgraph_dump_file);
c1dcd13c 1413 dump_varpool (cgraph_dump_file);
f79b6507 1414 }
51949610 1415 if (post_ipa_mem_report)
1416 {
defa2fa6 1417 fprintf (stderr, "Memory consumption after IPA\n");
51949610 1418 dump_memory_report (false);
1419 }
f79b6507 1420 timevar_pop (TV_CGRAPHOPT);
ae01b312 1421
d7c6d889 1422 /* Output everything. */
e4200070 1423 if (!quiet_flag)
1424 fprintf (stderr, "Assembling functions:\n");
b0cdf642 1425#ifdef ENABLE_CHECKING
1426 verify_cgraph ();
1427#endif
56af936e 1428
acc70efa 1429 cgraph_mark_functions_to_output ();
c1dcd13c 1430
523c1122 1431 cgraph_state = CGRAPH_STATE_EXPANSION;
56af936e 1432 if (!flag_toplevel_reorder)
1433 cgraph_output_in_order ();
1434 else
1435 {
1436 cgraph_output_pending_asms ();
1437
1438 cgraph_expand_all_functions ();
1d416bd7 1439 varpool_remove_unreferenced_decls ();
56af936e 1440
1d416bd7 1441 varpool_assemble_pending_decls ();
56af936e 1442 }
070eb4db 1443 varpool_output_debug_info ();
523c1122 1444 cgraph_process_new_functions ();
1445 cgraph_state = CGRAPH_STATE_FINISHED;
c1dcd13c 1446
f79b6507 1447 if (cgraph_dump_file)
1448 {
e4200070 1449 fprintf (cgraph_dump_file, "\nFinal ");
f79b6507 1450 dump_cgraph (cgraph_dump_file);
1451 }
b0cdf642 1452#ifdef ENABLE_CHECKING
1453 verify_cgraph ();
4ee9c684 1454 /* Double check that all inline clones are gone and that all
1455 function bodies have been released from memory. */
1456 if (flag_unit_at_a_time
4ee9c684 1457 && !(sorrycount || errorcount))
1458 {
1459 struct cgraph_node *node;
1460 bool error_found = false;
1461
1462 for (node = cgraph_nodes; node; node = node->next)
1463 if (node->analyzed
1464 && (node->global.inlined_to
a0c938f0 1465 || DECL_SAVED_TREE (node->decl)))
4ee9c684 1466 {
1467 error_found = true;
1468 dump_cgraph_node (stderr, node);
a0c938f0 1469 }
4ee9c684 1470 if (error_found)
c04e3894 1471 internal_error ("nodes with unreleased memory found");
4ee9c684 1472 }
b0cdf642 1473#endif
ae01b312 1474}
2c56f72e 1475/* Generate and emit a static constructor or destructor. WHICH must
1476 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1477 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
f0b5f617 1478 initialization priority for this constructor or destructor. */
b5530559 1479
1480void
c5344b58 1481cgraph_build_static_cdtor (char which, tree body, int priority)
b5530559 1482{
1483 static int counter = 0;
1484 char which_buf[16];
540edea7 1485 tree decl, name, resdecl;
b5530559 1486
2c56f72e 1487 /* The priority is encoded in the constructor or destructor name.
1488 collect2 will sort the names and arrange that they are called at
1489 program startup. */
1490 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
db85cc4f 1491 name = get_file_function_name (which_buf);
b5530559 1492
1493 decl = build_decl (FUNCTION_DECL, name,
1494 build_function_type (void_type_node, void_list_node));
1495 current_function_decl = decl;
1496
540edea7 1497 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1498 DECL_ARTIFICIAL (resdecl) = 1;
540edea7 1499 DECL_RESULT (decl) = resdecl;
1500
80f2ef47 1501 allocate_struct_function (decl, false);
b5530559 1502
1503 TREE_STATIC (decl) = 1;
1504 TREE_USED (decl) = 1;
1505 DECL_ARTIFICIAL (decl) = 1;
b5530559 1506 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1507 DECL_SAVED_TREE (decl) = body;
1508 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1509 DECL_UNINLINABLE (decl) = 1;
1510
1511 DECL_INITIAL (decl) = make_node (BLOCK);
1512 TREE_USED (DECL_INITIAL (decl)) = 1;
1513
1514 DECL_SOURCE_LOCATION (decl) = input_location;
1515 cfun->function_end_locus = input_location;
1516
cc636d56 1517 switch (which)
1518 {
1519 case 'I':
1520 DECL_STATIC_CONSTRUCTOR (decl) = 1;
64c2e9b0 1521 decl_init_priority_insert (decl, priority);
cc636d56 1522 break;
1523 case 'D':
1524 DECL_STATIC_DESTRUCTOR (decl) = 1;
64c2e9b0 1525 decl_fini_priority_insert (decl, priority);
cc636d56 1526 break;
1527 default:
1528 gcc_unreachable ();
1529 }
b5530559 1530
1531 gimplify_function_tree (decl);
1532
523c1122 1533 cgraph_add_new_function (decl, false);
1534 cgraph_mark_needed_node (cgraph_node (decl));
e3a37aef 1535 set_cfun (NULL);
b5530559 1536}
121f3051 1537
1538void
1539init_cgraph (void)
1540{
1541 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1542}
b5d36404 1543
a0c938f0 1544/* The edges representing the callers of the NEW_VERSION node were
b5d36404 1545 fixed by cgraph_function_versioning (), now the call_expr in their
1546 respective tree code should be updated to call the NEW_VERSION. */
1547
1548static void
1549update_call_expr (struct cgraph_node *new_version)
1550{
1551 struct cgraph_edge *e;
1552
1553 gcc_assert (new_version);
1554 for (e = new_version->callers; e; e = e->next_caller)
1555 /* Update the call expr on the edges
1556 to call the new version. */
c2f47e15 1557 TREE_OPERAND (CALL_EXPR_FN (get_call_expr_in (e->call_stmt)), 0) = new_version->decl;
b5d36404 1558}
1559
1560
1561/* Create a new cgraph node which is the new version of
1562 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1563 edges which should be redirected to point to
1564 NEW_VERSION. ALL the callees edges of OLD_VERSION
1565 are cloned to the new version node. Return the new
1566 version node. */
1567
1568static struct cgraph_node *
1569cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
4460a647 1570 tree new_decl,
1571 VEC(cgraph_edge_p,heap) *redirect_callers)
b5d36404 1572 {
1573 struct cgraph_node *new_version;
1574 struct cgraph_edge *e, *new_e;
1575 struct cgraph_edge *next_callee;
1576 unsigned i;
1577
1578 gcc_assert (old_version);
a0c938f0 1579
b5d36404 1580 new_version = cgraph_node (new_decl);
1581
1582 new_version->analyzed = true;
1583 new_version->local = old_version->local;
1584 new_version->global = old_version->global;
1585 new_version->rtl = new_version->rtl;
1586 new_version->reachable = true;
1587 new_version->count = old_version->count;
1588
1589 /* Clone the old node callees. Recursive calls are
1590 also cloned. */
1591 for (e = old_version->callees;e; e=e->next_callee)
1592 {
4ae20857 1593 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->frequency,
1594 e->loop_nest, true);
b5d36404 1595 new_e->count = e->count;
1596 }
1597 /* Fix recursive calls.
1598 If OLD_VERSION has a recursive call after the
1599 previous edge cloning, the new version will have an edge
1600 pointing to the old version, which is wrong;
1601 Redirect it to point to the new version. */
1602 for (e = new_version->callees ; e; e = next_callee)
1603 {
1604 next_callee = e->next_callee;
1605 if (e->callee == old_version)
1606 cgraph_redirect_edge_callee (e, new_version);
a0c938f0 1607
b5d36404 1608 if (!next_callee)
1609 break;
1610 }
4460a647 1611 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1612 {
1613 /* Redirect calls to the old version node to point to its new
1614 version. */
1615 cgraph_redirect_edge_callee (e, new_version);
1616 }
b5d36404 1617
1618 return new_version;
1619 }
1620
1621 /* Perform function versioning.
a0c938f0 1622 Function versioning includes copying of the tree and
b5d36404 1623 a callgraph update (creating a new cgraph node and updating
1624 its callees and callers).
1625
1626 REDIRECT_CALLERS varray includes the edges to be redirected
1627 to the new version.
1628
1629 TREE_MAP is a mapping of tree nodes we want to replace with
1630 new ones (according to results of prior analysis).
1631 OLD_VERSION_NODE is the node that is versioned.
1632 It returns the new version's cgraph node. */
1633
1634struct cgraph_node *
1635cgraph_function_versioning (struct cgraph_node *old_version_node,
4460a647 1636 VEC(cgraph_edge_p,heap) *redirect_callers,
b5d36404 1637 varray_type tree_map)
1638{
1639 tree old_decl = old_version_node->decl;
1640 struct cgraph_node *new_version_node = NULL;
1641 tree new_decl;
1642
1643 if (!tree_versionable_function_p (old_decl))
1644 return NULL;
1645
1646 /* Make a new FUNCTION_DECL tree node for the
1647 new version. */
1648 new_decl = copy_node (old_decl);
1649
1650 /* Create the new version's call-graph node.
1651 and update the edges of the new node. */
1652 new_version_node =
1653 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1654 redirect_callers);
1655
1656 /* Copy the OLD_VERSION_NODE function tree to the new version. */
469679ab 1657 tree_function_versioning (old_decl, new_decl, tree_map, false);
b5d36404 1658 /* Update the call_expr on the edges to call the new version node. */
1659 update_call_expr (new_version_node);
1660
a0c938f0 1661 /* Update the new version's properties.
b5d36404 1662 Make The new version visible only within this translation unit.
a0c938f0 1663 ??? We cannot use COMDAT linkage because there is no
b5d36404 1664 ABI support for this. */
1665 DECL_EXTERNAL (new_version_node->decl) = 0;
1666 DECL_ONE_ONLY (new_version_node->decl) = 0;
1667 TREE_PUBLIC (new_version_node->decl) = 0;
1668 DECL_COMDAT (new_version_node->decl) = 0;
1669 new_version_node->local.externally_visible = 0;
1670 new_version_node->local.local = 1;
1671 new_version_node->lowered = true;
1672 return new_version_node;
1673}
469679ab 1674
1675/* Produce separate function body for inline clones so the offline copy can be
1676 modified without affecting them. */
1677struct cgraph_node *
1678save_inline_function_body (struct cgraph_node *node)
1679{
1680 struct cgraph_node *first_clone;
1681
1682 gcc_assert (node == cgraph_node (node->decl));
1683
1684 cgraph_lower_function (node);
1685
1686 /* In non-unit-at-a-time we construct full fledged clone we never output to
334ec2d8 1687 assembly file. This clone is pointed out by inline_decl of original function
469679ab 1688 and inlining infrastructure knows how to deal with this. */
1689 if (!flag_unit_at_a_time)
1690 {
1691 struct cgraph_edge *e;
1692
4ae20857 1693 first_clone = cgraph_clone_node (node, node->count, 0, CGRAPH_FREQ_BASE,
1694 false);
469679ab 1695 first_clone->needed = 0;
1696 first_clone->reachable = 1;
1697 /* Recursively clone all bodies. */
1698 for (e = first_clone->callees; e; e = e->next_callee)
1699 if (!e->inline_failed)
1700 cgraph_clone_inlined_nodes (e, true, false);
1701 }
1702 else
1703 first_clone = node->next_clone;
1704
1705 first_clone->decl = copy_node (node->decl);
1706 node->next_clone = NULL;
1707 if (!flag_unit_at_a_time)
1708 node->inline_decl = first_clone->decl;
1709 first_clone->prev_clone = NULL;
1710 cgraph_insert_node_to_hashtable (first_clone);
1711 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1712
1713 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1714 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1715
1716 DECL_EXTERNAL (first_clone->decl) = 0;
1717 DECL_ONE_ONLY (first_clone->decl) = 0;
1718 TREE_PUBLIC (first_clone->decl) = 0;
1719 DECL_COMDAT (first_clone->decl) = 0;
1720
1721 for (node = first_clone->next_clone; node; node = node->next_clone)
1722 node->decl = first_clone->decl;
1723#ifdef ENABLE_CHECKING
1724 verify_cgraph_node (first_clone);
1725#endif
1726 return first_clone;
1727}
a861fe52 1728
1729#include "gt-cgraphunit.h"