]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
tree-ssa-live.c: Include debug.h and flags.h.
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
a418679d 1/* Callgraph based interprocedural optimizations.
efe75b6f 2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
1c4a429a
JH
3 Contributed by Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
366ccddb
KC
19Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2002110-1301, USA. */
1c4a429a 21
18c6ada9 22/* This module implements main driver of compilation process as well as
a418679d 23 few basic interprocedural optimizers.
18c6ada9
JH
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
efe75b6f
JH
35 (There is one exception needed for implementing GCC extern inline
36 function.)
18c6ada9 37
8a4a83ed 38 - varpool_finalize_variable
18c6ada9 39
1ae58c30 40 This function has same behavior as the above but is used for static
18c6ada9
JH
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
efe75b6f
JH
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
18c6ada9
JH
47
48 In the unit-at-a-time the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
efe75b6f
JH
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
18c6ada9
JH
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
18c6ada9 62 - cgraph_mark_needed_node
8a4a83ed 63 - varpool_mark_needed_node
18c6ada9 64
efe75b6f
JH
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
dbb23ff7 69 used by C++ frontend to explicitly mark the keyed methods.
18c6ada9
JH
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
80 - expand_function callback
81
82 This function is used to expand function and pass it into RTL back-end.
83 Front-end should not make any assumptions about when this function can be
84 called. In particular cgraph_assemble_pending_functions,
8a4a83ed
JH
85 varpool_assemble_pending_variables, cgraph_finalize_function,
86 varpool_finalize_function, cgraph_optimize can cause arbitrarily
18c6ada9
JH
87 previously finalized functions to be expanded.
88
89 We implement two compilation modes.
90
91 - unit-at-a-time: In this mode analyzing of all functions is deferred
92 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
93
94 In cgraph_finalize_compilation_unit the reachable functions are
95 analyzed. During analysis the call-graph edges from reachable
96 functions are constructed and their destinations are marked as
97 reachable. References to functions and variables are discovered too
98 and variables found to be needed output to the assembly file. Via
99 mark_referenced call in assemble_variable functions referenced by
100 static variables are noticed too.
101
e1990f69 102 The intra-procedural information is produced and its existence
18c6ada9
JH
103 indicated by global_info_ready. Once this flag is set it is impossible
104 to change function from !reachable to reachable and thus
105 assemble_variable no longer call mark_referenced.
106
107 Finally the call-graph is topologically sorted and all reachable functions
108 that has not been completely inlined or are not external are output.
109
110 ??? It is possible that reference to function or variable is optimized
111 out. We can not deal with this nicely because topological order is not
112 suitable for it. For tree-ssa we may consider another pass doing
113 optimization and re-discovering reachable functions.
114
115 ??? Reorganize code so variables are output very last and only if they
116 really has been referenced by produced code, so we catch more cases
117 where reference has been optimized out.
118
119 - non-unit-at-a-time
120
121 All functions are variables are output as early as possible to conserve
122 memory consumption. This may or may not result in less memory used but
123 it is still needed for some legacy code that rely on particular ordering
124 of things output from the compiler.
125
126 Varpool data structures are not used and variables are output directly.
127
128 Functions are output early using call of
129 cgraph_assemble_pending_function from cgraph_finalize_function. The
130 decision on whether function is needed is made more conservative so
131 uninlininable static functions are needed too. During the call-graph
132 construction the edge destinations are not marked as reachable and it
e42922b1 133 is completely relied upn assemble_variable to mark them. */
9b3e897d 134
6674a6ce 135
1c4a429a
JH
136#include "config.h"
137#include "system.h"
138#include "coretypes.h"
139#include "tm.h"
140#include "tree.h"
c9b9aa64 141#include "rtl.h"
6674a6ce 142#include "tree-flow.h"
1c4a429a
JH
143#include "tree-inline.h"
144#include "langhooks.h"
0c58f841 145#include "pointer-set.h"
1c4a429a
JH
146#include "toplev.h"
147#include "flags.h"
148#include "ggc.h"
149#include "debug.h"
150#include "target.h"
151#include "cgraph.h"
dafc5b82 152#include "diagnostic.h"
a194aa56 153#include "timevar.h"
b58b1157
JH
154#include "params.h"
155#include "fibheap.h"
156#include "c-common.h"
dc0bfe6a 157#include "intl.h"
902edd36 158#include "function.h"
57fb5341 159#include "ipa-prop.h"
6674a6ce 160#include "tree-gimple.h"
b4861090 161#include "tree-pass.h"
cd9c7bd2 162#include "output.h"
b58b1157 163
a20af5b8 164static void cgraph_expand_all_functions (void);
db0e878d
AJ
165static void cgraph_mark_functions_to_output (void);
166static void cgraph_expand_function (struct cgraph_node *);
21c4a6a7 167static void cgraph_output_pending_asms (void);
7dff32e6 168
9b3e897d
PB
169static FILE *cgraph_dump_file;
170
7be82279
JH
171static GTY (()) tree static_ctors;
172static GTY (()) tree static_dtors;
173
174/* When target does not have ctors and dtors, we call all constructor
c80b4100 175 and destructor by special initialization/destruction function
7be82279
JH
176 recognized by collect2.
177
178 When we are going to build this function, collect all constructors and
179 destructors and turn them into normal functions. */
180
181static void
182record_cdtor_fn (tree fndecl)
183{
184 if (targetm.have_ctors_dtors)
185 return;
186
187 if (DECL_STATIC_CONSTRUCTOR (fndecl))
188 {
189 static_ctors = tree_cons (NULL_TREE, fndecl, static_ctors);
190 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
191 cgraph_mark_reachable_node (cgraph_node (fndecl));
192 }
193 if (DECL_STATIC_DESTRUCTOR (fndecl))
194 {
195 static_dtors = tree_cons (NULL_TREE, fndecl, static_dtors);
196 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
197 cgraph_mark_reachable_node (cgraph_node (fndecl));
198 }
199}
200
201/* Synthesize a function which calls all the global ctors or global
202 dtors in this file. This is only used for targets which do not
203 support .ctors/.dtors sections. */
204static void
205build_cdtor (int method_type, tree cdtors)
206{
207 tree body = 0;
208
209 if (!cdtors)
210 return;
211
212 for (; cdtors; cdtors = TREE_CHAIN (cdtors))
213 append_to_statement_list (build_function_call_expr (TREE_VALUE (cdtors), 0),
214 &body);
215
216 cgraph_build_static_cdtor (method_type, body, DEFAULT_INIT_PRIORITY);
217}
218
219/* Generate functions to call static constructors and destructors
220 for targets that do not support .ctors/.dtors sections. These
221 functions have magic names which are detected by collect2. */
222
223static void
224cgraph_build_cdtor_fns (void)
225{
226 if (!targetm.have_ctors_dtors)
227 {
228 build_cdtor ('I', static_ctors);
229 static_ctors = NULL_TREE;
230 build_cdtor ('D', static_dtors);
231 static_dtors = NULL_TREE;
232 }
233 else
234 {
235 gcc_assert (!static_ctors);
236 gcc_assert (!static_dtors);
237 }
238}
239
8dafba3c
RH
240/* Determine if function DECL is needed. That is, visible to something
241 either outside this translation unit, something magic in the system
242 configury, or (if not doing unit-at-a-time) to something we havn't
243 seen yet. */
244
245static bool
246decide_is_function_needed (struct cgraph_node *node, tree decl)
247{
8f235343 248 tree origin;
ce91e74c
JH
249 if (MAIN_NAME_P (DECL_NAME (decl))
250 && TREE_PUBLIC (decl))
251 {
252 node->local.externally_visible = true;
253 return true;
254 }
6de9cd9a 255
e7d6beb0 256 /* If the user told us it is used, then it must be so. */
386b46cf
JH
257 if (node->local.externally_visible)
258 return true;
259
260 if (!flag_unit_at_a_time && lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
e7d6beb0
JH
261 return true;
262
263 /* ??? If the assembler name is set by hand, it is possible to assemble
264 the name later after finalizing the function and the fact is noticed
265 in assemble_name then. This is arguably a bug. */
266 if (DECL_ASSEMBLER_NAME_SET_P (decl)
267 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
268 return true;
269
a1d31187
JH
270 /* With -fkeep-inline-functions we are keeping all inline functions except
271 for extern inline ones. */
272 if (flag_keep_inline_functions
273 && DECL_DECLARED_INLINE_P (decl)
b521dcbe
JH
274 && !DECL_EXTERNAL (decl)
275 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
a1d31187
JH
276 return true;
277
8dafba3c
RH
278 /* If we decided it was needed before, but at the time we didn't have
279 the body of the function available, then it's still needed. We have
280 to go back and re-check its dependencies now. */
281 if (node->needed)
282 return true;
283
284 /* Externally visible functions must be output. The exception is
c22cacf3 285 COMDAT functions that must be output only when they are needed.
04f77d0f
JH
286
287 When not optimizing, also output the static functions. (see
46f5f7f2 288 PR24561), but don't do so for always_inline functions, functions
b633db7b
JH
289 declared inline and nested functions. These was optimized out
290 in the original implementation and it is unclear whether we want
6fc0bb99 291 to change the behavior here. */
5d342ef9 292 if (((TREE_PUBLIC (decl)
b633db7b
JH
293 || (!optimize && !node->local.disregard_inline_limits
294 && !DECL_DECLARED_INLINE_P (decl)
295 && !node->origin))
5d342ef9 296 && !flag_whole_program)
ce91e74c 297 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
8dafba3c
RH
298 return true;
299
300 /* Constructors and destructors are reachable from the runtime by
301 some mechanism. */
302 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
303 return true;
304
8dafba3c
RH
305 if (flag_unit_at_a_time)
306 return false;
307
308 /* If not doing unit at a time, then we'll only defer this function
309 if its marked for inlining. Otherwise we want to emit it now. */
310
311 /* "extern inline" functions are never output locally. */
312 if (DECL_EXTERNAL (decl))
313 return false;
6de9cd9a
DN
314 /* Nested functions of extern inline function shall not be emit unless
315 we inlined the origin. */
8f235343
JH
316 for (origin = decl_function_context (decl); origin;
317 origin = decl_function_context (origin))
318 if (DECL_EXTERNAL (origin))
6de9cd9a 319 return false;
2067c116 320 /* We want to emit COMDAT functions only when absolutely necessary. */
d853a20e 321 if (DECL_COMDAT (decl))
8dafba3c
RH
322 return false;
323 if (!DECL_INLINE (decl)
324 || (!node->local.disregard_inline_limits
325 /* When declared inline, defer even the uninlinable functions.
7d82fe7c 326 This allows them to be eliminated when unused. */
c22cacf3 327 && !DECL_DECLARED_INLINE_P (decl)
9de21a23 328 && (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
8dafba3c
RH
329 return true;
330
331 return false;
332}
333
d60ab196 334/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
f45e0ad1
JH
335 functions into callgraph in a way so they look like ordinary reachable
336 functions inserted into callgraph already at construction time. */
337
338bool
339cgraph_process_new_functions (void)
340{
341 bool output = false;
342 tree fndecl;
343 struct cgraph_node *node;
344
345 /* Note that this queue may grow as its being processed, as the new
346 functions may generate new ones. */
347 while (cgraph_new_nodes)
348 {
349 node = cgraph_new_nodes;
350 fndecl = node->decl;
351 cgraph_new_nodes = cgraph_new_nodes->next_needed;
352 switch (cgraph_state)
353 {
354 case CGRAPH_STATE_CONSTRUCTION:
355 /* At construction time we just need to finalize function and move
356 it into reachable functions list. */
357
358 node->next_needed = NULL;
151e6f24 359 node->needed = node->reachable = false;
f45e0ad1
JH
360 cgraph_finalize_function (fndecl, false);
361 cgraph_mark_reachable_node (node);
362 output = true;
363 break;
364
365 case CGRAPH_STATE_IPA:
7a388ee4 366 case CGRAPH_STATE_IPA_SSA:
f45e0ad1
JH
367 /* When IPA optimization already started, do all essential
368 transformations that has been already performed on the whole
369 cgraph but not on this function. */
370
371 tree_register_cfg_hooks ();
372 if (!node->analyzed)
373 cgraph_analyze_function (node);
374 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
375 current_function_decl = fndecl;
376 node->local.inlinable = tree_inlinable_function_p (fndecl);
7f9bc51b
ZD
377 node->local.self_insns = estimate_num_insns (fndecl,
378 &eni_inlining_weights);
f45e0ad1
JH
379 node->local.disregard_inline_limits
380 = lang_hooks.tree_inlining.disregard_inline_limits (fndecl);
381 /* Inlining characteristics are maintained by the
382 cgraph_mark_inline. */
383 node->global.insns = node->local.self_insns;
f45e0ad1
JH
384 if (flag_really_no_inline && !node->local.disregard_inline_limits)
385 node->local.inlinable = 0;
7a388ee4
JH
386 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
387 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
388 /* When not optimizing, be sure we run early local passes anyway
389 to expand OMP. */
390 || !optimize)
391 execute_pass_list (pass_early_local_passes.sub);
f45e0ad1
JH
392 free_dominance_info (CDI_POST_DOMINATORS);
393 free_dominance_info (CDI_DOMINATORS);
394 pop_cfun ();
395 current_function_decl = NULL;
396 break;
397
398 case CGRAPH_STATE_EXPANSION:
399 /* Functions created during expansion shall be compiled
400 directly. */
401 node->output = 0;
402 cgraph_expand_function (node);
403 break;
404
405 default:
406 gcc_unreachable ();
407 break;
408 }
409 }
410 return output;
411}
412
d853a20e
JH
413/* When not doing unit-at-a-time, output all functions enqueued.
414 Return true when such a functions were found. */
f6d1b84a 415
efe75b6f 416static bool
d853a20e
JH
417cgraph_assemble_pending_functions (void)
418{
419 bool output = false;
420
421 if (flag_unit_at_a_time)
422 return false;
423
21c4a6a7
ILT
424 cgraph_output_pending_asms ();
425
d853a20e
JH
426 while (cgraph_nodes_queue)
427 {
428 struct cgraph_node *n = cgraph_nodes_queue;
429
430 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
18c6ada9 431 n->next_needed = NULL;
12527dce
RH
432 if (!n->global.inlined_to
433 && !n->alias
434 && !DECL_EXTERNAL (n->decl))
f6d1b84a
RH
435 {
436 cgraph_expand_function (n);
437 output = true;
438 }
f45e0ad1 439 output |= cgraph_process_new_functions ();
50674e96
DN
440 }
441
d853a20e
JH
442 return output;
443}
50674e96
DN
444
445
d71cc23f
JH
446/* As an GCC extension we allow redefinition of the function. The
447 semantics when both copies of bodies differ is not well defined.
448 We replace the old body with new body so in unit at a time mode
449 we always use new body, while in normal mode we may end up with
450 old body inlined into some functions and new body expanded and
451 inlined in others.
452
453 ??? It may make more sense to use one body for inlining and other
454 body for expanding the function but this is difficult to do. */
455
456static void
457cgraph_reset_node (struct cgraph_node *node)
458{
459 /* If node->output is set, then this is a unit-at-a-time compilation
460 and we have already begun whole-unit analysis. This is *not*
461 testing for whether we've already emitted the function. That
c22cacf3 462 case can be sort-of legitimately seen with real function
d71cc23f
JH
463 redefinition errors. I would argue that the front end should
464 never present us with such a case, but don't enforce that for now. */
465 gcc_assert (!node->output);
466
467 /* Reset our data structures so we can analyze the function again. */
468 memset (&node->local, 0, sizeof (node->local));
469 memset (&node->global, 0, sizeof (node->global));
470 memset (&node->rtl, 0, sizeof (node->rtl));
471 node->analyzed = false;
472 node->local.redefined_extern_inline = true;
473 node->local.finalized = false;
474
475 if (!flag_unit_at_a_time)
476 {
96fc428c 477 struct cgraph_node *n, *next;
d71cc23f 478
96fc428c
JH
479 for (n = cgraph_nodes; n; n = next)
480 {
481 next = n->next;
482 if (n->global.inlined_to == node)
483 cgraph_remove_node (n);
484 }
d71cc23f
JH
485 }
486
487 cgraph_node_remove_callees (node);
488
489 /* We may need to re-queue the node for assembling in case
490 we already proceeded it and ignored as not needed. */
491 if (node->reachable && !flag_unit_at_a_time)
492 {
493 struct cgraph_node *n;
494
495 for (n = cgraph_nodes_queue; n; n = n->next_needed)
496 if (n == node)
497 break;
498 if (!n)
499 node->reachable = 0;
500 }
501}
d853a20e 502
953ff289
DN
503static void
504cgraph_lower_function (struct cgraph_node *node)
505{
506 if (node->lowered)
507 return;
508 tree_lowering_passes (node->decl);
509 node->lowered = true;
510}
511
6b00c969
RH
512/* DECL has been parsed. Take it, queue it, compile it at the whim of the
513 logic in effect. If NESTED is true, then our caller cannot stand to have
514 the garbage collector run at the moment. We would need to either create
515 a new GC context, or just not compile right now. */
1c4a429a
JH
516
517void
6b00c969 518cgraph_finalize_function (tree decl, bool nested)
1c4a429a
JH
519{
520 struct cgraph_node *node = cgraph_node (decl);
521
d853a20e 522 if (node->local.finalized)
d71cc23f 523 cgraph_reset_node (node);
6b00c969 524
6bad2617 525 node->pid = cgraph_max_pid ++;
d853a20e 526 notice_global_symbol (decl);
1c4a429a 527 node->decl = decl;
f6981e16 528 node->local.finalized = true;
e21aff8a 529 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
7be82279 530 record_cdtor_fn (node->decl);
8f235343
JH
531 if (node->nested)
532 lower_nested_functions (decl);
533 gcc_assert (!node->nested);
1c4a429a 534
8dafba3c
RH
535 /* If not unit at a time, then we need to create the call graph
536 now, so that called functions can be queued and emitted now. */
4a46cbfb 537 if (!flag_unit_at_a_time)
873aa8f5 538 cgraph_analyze_function (node);
4a46cbfb 539
8dafba3c
RH
540 if (decide_is_function_needed (node, decl))
541 cgraph_mark_needed_node (node);
542
ff5c4582 543 /* Since we reclaim unreachable nodes at the end of every language
e7d6beb0
JH
544 level unit, we need to be conservative about possible entry points
545 there. */
ce91e74c 546 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
e7d6beb0
JH
547 cgraph_mark_reachable_node (node);
548
6b00c969
RH
549 /* If not unit at a time, go ahead and emit everything we've found
550 to be reachable at this time. */
551 if (!nested)
d34cb6a1
JH
552 {
553 if (!cgraph_assemble_pending_functions ())
554 ggc_collect ();
555 }
1668aabc 556
8dafba3c 557 /* If we've not yet emitted decl, tell the debug info about it. */
6b00c969 558 if (!TREE_ASM_WRITTEN (decl))
8dafba3c 559 (*debug_hooks->deferred_inline_function) (decl);
d173e685 560
902edd36
JH
561 /* Possibly warn about unused parameters. */
562 if (warn_unused_parameter)
563 do_warn_unused_parameter (decl);
1c4a429a
JH
564}
565
18c6ada9
JH
566/* Verify cgraph nodes of given cgraph node. */
567void
568verify_cgraph_node (struct cgraph_node *node)
569{
570 struct cgraph_edge *e;
571 struct cgraph_node *main_clone;
e21aff8a
SB
572 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
573 basic_block this_block;
574 block_stmt_iterator bsi;
e0704a46 575 bool error_found = false;
18c6ada9 576
5771bd91
RG
577 if (errorcount || sorrycount)
578 return;
579
18c6ada9 580 timevar_push (TV_CGRAPH_VERIFY);
18c6ada9
JH
581 for (e = node->callees; e; e = e->next_callee)
582 if (e->aux)
583 {
ab532386 584 error ("aux field set for edge %s->%s",
18c6ada9
JH
585 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
586 error_found = true;
587 }
06191a23
JH
588 if (node->count < 0)
589 {
590 error ("Execution count is negative");
591 error_found = true;
592 }
18c6ada9
JH
593 for (e = node->callers; e; e = e->next_caller)
594 {
06191a23
JH
595 if (e->count < 0)
596 {
597 error ("caller edge count is negative");
598 error_found = true;
599 }
45a80bb9
JH
600 if (e->frequency < 0)
601 {
602 error ("caller edge frequency is negative");
603 error_found = true;
604 }
605 if (e->frequency > CGRAPH_FREQ_MAX)
606 {
607 error ("caller edge frequency is too large");
608 error_found = true;
609 }
18c6ada9
JH
610 if (!e->inline_failed)
611 {
612 if (node->global.inlined_to
613 != (e->caller->global.inlined_to
614 ? e->caller->global.inlined_to : e->caller))
615 {
ab532386 616 error ("inlined_to pointer is wrong");
18c6ada9
JH
617 error_found = true;
618 }
619 if (node->callers->next_caller)
620 {
ab532386 621 error ("multiple inline callers");
18c6ada9
JH
622 error_found = true;
623 }
624 }
625 else
626 if (node->global.inlined_to)
627 {
ab532386 628 error ("inlined_to pointer set for noninline callers");
18c6ada9
JH
629 error_found = true;
630 }
631 }
632 if (!node->callers && node->global.inlined_to)
633 {
95a52ebb 634 error ("inlined_to pointer is set but no predecessors found");
18c6ada9
JH
635 error_found = true;
636 }
637 if (node->global.inlined_to == node)
638 {
ab532386 639 error ("inlined_to pointer refers to itself");
18c6ada9
JH
640 error_found = true;
641 }
642
643 for (main_clone = cgraph_node (node->decl); main_clone;
644 main_clone = main_clone->next_clone)
645 if (main_clone == node)
646 break;
69fb1284 647 if (!cgraph_node (node->decl))
18c6ada9 648 {
69fb1284 649 error ("node not found in cgraph_hash");
18c6ada9
JH
650 error_found = true;
651 }
c22cacf3 652
18c6ada9
JH
653 if (node->analyzed
654 && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
655 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
656 {
e21aff8a
SB
657 if (this_cfun->cfg)
658 {
659 /* The nodes we're interested in are never shared, so walk
660 the tree ignoring duplicates. */
2dee695b 661 struct pointer_set_t *visited_nodes = pointer_set_create ();
e21aff8a
SB
662 /* Reach the trees by walking over the CFG, and note the
663 enclosing basic-blocks in the call edges. */
664 FOR_EACH_BB_FN (this_block, this_cfun)
665 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
e0704a46
JH
666 {
667 tree stmt = bsi_stmt (bsi);
668 tree call = get_call_expr_in (stmt);
669 tree decl;
670 if (call && (decl = get_callee_fndecl (call)))
671 {
672 struct cgraph_edge *e = cgraph_edge (node, stmt);
673 if (e)
674 {
675 if (e->aux)
676 {
ab532386 677 error ("shared call_stmt:");
e0704a46
JH
678 debug_generic_stmt (stmt);
679 error_found = true;
680 }
ea99e0be
JH
681 if (e->callee->decl != cgraph_node (decl)->decl
682 && e->inline_failed)
e0704a46 683 {
ab532386 684 error ("edge points to wrong declaration:");
e0704a46
JH
685 debug_tree (e->callee->decl);
686 fprintf (stderr," Instead of:");
687 debug_tree (decl);
688 }
689 e->aux = (void *)1;
690 }
691 else
692 {
ab532386 693 error ("missing callgraph edge for call stmt:");
e0704a46
JH
694 debug_generic_stmt (stmt);
695 error_found = true;
696 }
697 }
698 }
e21aff8a 699 pointer_set_destroy (visited_nodes);
e21aff8a
SB
700 }
701 else
702 /* No CFG available?! */
703 gcc_unreachable ();
704
18c6ada9
JH
705 for (e = node->callees; e; e = e->next_callee)
706 {
707 if (!e->aux)
708 {
ab532386 709 error ("edge %s->%s has no corresponding call_stmt",
18c6ada9
JH
710 cgraph_node_name (e->caller),
711 cgraph_node_name (e->callee));
e0704a46 712 debug_generic_stmt (e->call_stmt);
18c6ada9
JH
713 error_found = true;
714 }
715 e->aux = 0;
716 }
717 }
718 if (error_found)
719 {
720 dump_cgraph_node (stderr, node);
ab532386 721 internal_error ("verify_cgraph_node failed");
18c6ada9
JH
722 }
723 timevar_pop (TV_CGRAPH_VERIFY);
724}
725
726/* Verify whole cgraph structure. */
727void
728verify_cgraph (void)
729{
730 struct cgraph_node *node;
731
89480522
JH
732 if (sorrycount || errorcount)
733 return;
734
18c6ada9
JH
735 for (node = cgraph_nodes; node; node = node->next)
736 verify_cgraph_node (node);
737}
738
474eccc6
ILT
739/* Output all asm statements we have stored up to be output. */
740
741static void
742cgraph_output_pending_asms (void)
743{
744 struct cgraph_asm_node *can;
745
746 if (errorcount || sorrycount)
747 return;
748
749 for (can = cgraph_asm_nodes; can; can = can->next)
750 assemble_asm (can->asm_str);
751 cgraph_asm_nodes = NULL;
752}
753
e767b5be 754/* Analyze the function scheduled to be output. */
953ff289 755void
e767b5be
JH
756cgraph_analyze_function (struct cgraph_node *node)
757{
758 tree decl = node->decl;
759
25c84396 760 current_function_decl = decl;
e21aff8a
SB
761 push_cfun (DECL_STRUCT_FUNCTION (decl));
762 cgraph_lower_function (node);
6a84c098 763 node->analyzed = true;
e767b5be 764
7a388ee4
JH
765 if (!flag_unit_at_a_time)
766 {
767 bitmap_obstack_initialize (NULL);
768 tree_register_cfg_hooks ();
769 execute_pass_list (pass_early_local_passes.sub);
770 free_dominance_info (CDI_POST_DOMINATORS);
771 free_dominance_info (CDI_DOMINATORS);
772 bitmap_obstack_release (NULL);
773 }
e767b5be 774
e21aff8a 775 pop_cfun ();
d853a20e 776 current_function_decl = NULL;
e767b5be
JH
777}
778
386b46cf
JH
779/* Look for externally_visible and used attributes and mark cgraph nodes
780 accordingly.
781
782 We cannot mark the nodes at the point the attributes are processed (in
783 handle_*_attribute) because the copy of the declarations available at that
784 point may not be canonical. For example, in:
785
786 void f();
787 void f() __attribute__((used));
788
789 the declaration we see in handle_used_attribute will be the second
790 declaration -- but the front end will subsequently merge that declaration
791 with the original declaration and discard the second declaration.
792
793 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
794
795 void f() {}
796 void f() __attribute__((externally_visible));
797
798 is valid.
799
800 So, we walk the nodes at the end of the translation unit, applying the
801 attributes at that point. */
802
803static void
804process_function_and_variable_attributes (struct cgraph_node *first,
8a4a83ed 805 struct varpool_node *first_var)
386b46cf
JH
806{
807 struct cgraph_node *node;
8a4a83ed 808 struct varpool_node *vnode;
386b46cf
JH
809
810 for (node = cgraph_nodes; node != first; node = node->next)
811 {
812 tree decl = node->decl;
813 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
814 {
815 mark_decl_referenced (decl);
816 if (node->local.finalized)
817 cgraph_mark_needed_node (node);
818 }
819 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
820 {
343d4b27
JJ
821 if (! TREE_PUBLIC (node->decl))
822 warning (OPT_Wattributes,
823 "%J%<externally_visible%> attribute have effect only on public objects",
824 node->decl);
825 else
826 {
827 if (node->local.finalized)
828 cgraph_mark_needed_node (node);
829 node->local.externally_visible = true;
830 }
386b46cf
JH
831 }
832 }
8a4a83ed 833 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
386b46cf
JH
834 {
835 tree decl = vnode->decl;
836 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
837 {
838 mark_decl_referenced (decl);
839 if (vnode->finalized)
8a4a83ed 840 varpool_mark_needed_node (vnode);
386b46cf
JH
841 }
842 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
843 {
343d4b27
JJ
844 if (! TREE_PUBLIC (vnode->decl))
845 warning (OPT_Wattributes,
846 "%J%<externally_visible%> attribute have effect only on public objects",
847 vnode->decl);
848 else
849 {
850 if (vnode->finalized)
8a4a83ed 851 varpool_mark_needed_node (vnode);
343d4b27
JJ
852 vnode->externally_visible = true;
853 }
386b46cf
JH
854 }
855 }
856}
857
151e6f24
JH
858/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
859 each reachable functions) and build cgraph.
860 The function can be called multiple times after inserting new nodes
88512ba0 861 into beginning of queue. Just the new part of queue is re-scanned then. */
1c4a429a 862
151e6f24
JH
863static void
864cgraph_analyze_functions (void)
1c4a429a 865{
cd9c7bd2 866 /* Keep track of already processed nodes when called multiple times for
aabcd309 867 intermodule optimization. */
cd9c7bd2 868 static struct cgraph_node *first_analyzed;
61e00a5e 869 struct cgraph_node *first_processed = first_analyzed;
8a4a83ed 870 static struct varpool_node *first_analyzed_var;
151e6f24 871 struct cgraph_node *node, *next;
1c4a429a 872
61e00a5e
JH
873 process_function_and_variable_attributes (first_processed,
874 first_analyzed_var);
875 first_processed = cgraph_nodes;
8a4a83ed
JH
876 first_analyzed_var = varpool_nodes;
877 varpool_analyze_pending_decls ();
a194aa56 878 if (cgraph_dump_file)
1c4a429a 879 {
7d82fe7c 880 fprintf (cgraph_dump_file, "Initial entry points:");
cd9c7bd2 881 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1668aabc 882 if (node->needed && DECL_SAVED_TREE (node->decl))
a194aa56
JH
883 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
884 fprintf (cgraph_dump_file, "\n");
1c4a429a 885 }
151e6f24 886 cgraph_process_new_functions ();
1c4a429a 887
7660e67e
SB
888 /* Propagate reachability flag and lower representation of all reachable
889 functions. In the future, lowering will introduce new functions and
890 new entry points on the way (by template instantiation and virtual
891 method table generation for instance). */
1668aabc 892 while (cgraph_nodes_queue)
1c4a429a 893 {
e767b5be 894 struct cgraph_edge *edge;
1668aabc
JH
895 tree decl = cgraph_nodes_queue->decl;
896
897 node = cgraph_nodes_queue;
8bd87c4e 898 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
18c6ada9 899 node->next_needed = NULL;
1c4a429a 900
cd4dea62 901 /* ??? It is possible to create extern inline function and later using
9d203871 902 weak alias attribute to kill its body. See
cd4dea62
JH
903 gcc.c-torture/compile/20011119-1.c */
904 if (!DECL_SAVED_TREE (decl))
d71cc23f
JH
905 {
906 cgraph_reset_node (node);
907 continue;
908 }
cd4dea62 909
341c100f
NS
910 gcc_assert (!node->analyzed && node->reachable);
911 gcc_assert (DECL_SAVED_TREE (decl));
1c4a429a 912
e767b5be 913 cgraph_analyze_function (node);
8dafba3c 914
1c4a429a 915 for (edge = node->callees; edge; edge = edge->next_callee)
e767b5be 916 if (!edge->callee->reachable)
8dafba3c
RH
917 cgraph_mark_reachable_node (edge->callee);
918
61e00a5e
JH
919 /* We finalize local static variables during constructing callgraph
920 edges. Process their attributes too. */
921 process_function_and_variable_attributes (first_processed,
922 first_analyzed_var);
923 first_processed = cgraph_nodes;
8a4a83ed
JH
924 first_analyzed_var = varpool_nodes;
925 varpool_analyze_pending_decls ();
151e6f24 926 cgraph_process_new_functions ();
1c4a429a 927 }
8dafba3c 928
564738df 929 /* Collect entry points to the unit. */
a194aa56 930 if (cgraph_dump_file)
1668aabc 931 {
7d82fe7c 932 fprintf (cgraph_dump_file, "Unit entry points:");
cd9c7bd2 933 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1668aabc 934 if (node->needed && DECL_SAVED_TREE (node->decl))
a194aa56 935 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
7d82fe7c 936 fprintf (cgraph_dump_file, "\n\nInitial ");
e767b5be 937 dump_cgraph (cgraph_dump_file);
1668aabc 938 }
7660e67e 939
a194aa56
JH
940 if (cgraph_dump_file)
941 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1c4a429a 942
96fc428c 943 for (node = cgraph_nodes; node != first_analyzed; node = next)
1c4a429a
JH
944 {
945 tree decl = node->decl;
96fc428c 946 next = node->next;
1c4a429a 947
d71cc23f 948 if (node->local.finalized && !DECL_SAVED_TREE (decl))
c22cacf3 949 cgraph_reset_node (node);
d71cc23f 950
1c4a429a
JH
951 if (!node->reachable && DECL_SAVED_TREE (decl))
952 {
a194aa56
JH
953 if (cgraph_dump_file)
954 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
18c6ada9 955 cgraph_remove_node (node);
d71cc23f 956 continue;
1c4a429a 957 }
9b0436b7
JH
958 else
959 node->next_needed = NULL;
d71cc23f
JH
960 gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
961 gcc_assert (node->analyzed == node->local.finalized);
1c4a429a 962 }
a194aa56 963 if (cgraph_dump_file)
7d82fe7c
KC
964 {
965 fprintf (cgraph_dump_file, "\n\nReclaimed ");
966 dump_cgraph (cgraph_dump_file);
967 }
cd9c7bd2 968 first_analyzed = cgraph_nodes;
1c4a429a 969 ggc_collect ();
151e6f24
JH
970}
971
972/* Analyze the whole compilation unit once it is parsed completely. */
973
974void
975cgraph_finalize_compilation_unit (void)
976{
977 if (errorcount || sorrycount)
978 return;
979
980 finish_aliases_1 ();
981
982 if (!flag_unit_at_a_time)
983 {
984 cgraph_output_pending_asms ();
985 cgraph_assemble_pending_functions ();
986 varpool_output_debug_info ();
987 return;
988 }
989
990 if (!quiet_flag)
991 {
992 fprintf (stderr, "\nAnalyzing compilation unit\n");
993 fflush (stderr);
994 }
995
996 timevar_push (TV_CGRAPH);
997 cgraph_analyze_functions ();
a194aa56 998 timevar_pop (TV_CGRAPH);
1c4a429a 999}
1c4a429a
JH
1000/* Figure out what functions we want to assemble. */
1001
1002static void
db0e878d 1003cgraph_mark_functions_to_output (void)
1c4a429a
JH
1004{
1005 struct cgraph_node *node;
1006
1c4a429a
JH
1007 for (node = cgraph_nodes; node; node = node->next)
1008 {
1009 tree decl = node->decl;
b58b1157 1010 struct cgraph_edge *e;
c22cacf3 1011
341c100f 1012 gcc_assert (!node->output);
b58b1157
JH
1013
1014 for (e = node->callers; e; e = e->next_caller)
dc0bfe6a 1015 if (e->inline_failed)
b58b1157 1016 break;
1c4a429a 1017
7660e67e
SB
1018 /* We need to output all local functions that are used and not
1019 always inlined, as well as those that are reachable from
1020 outside the current compilation unit. */
1c4a429a 1021 if (DECL_SAVED_TREE (decl)
18c6ada9 1022 && !node->global.inlined_to
1c4a429a 1023 && (node->needed
b58b1157 1024 || (e && node->reachable))
6de9cd9a 1025 && !TREE_ASM_WRITTEN (decl)
1c4a429a
JH
1026 && !DECL_EXTERNAL (decl))
1027 node->output = 1;
341c100f 1028 else
1a2caa7a
NS
1029 {
1030 /* We should've reclaimed all functions that are not needed. */
1031#ifdef ENABLE_CHECKING
1032 if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
1033 && !DECL_EXTERNAL (decl))
1034 {
1035 dump_cgraph_node (stderr, node);
1036 internal_error ("failed to reclaim unneeded function");
1037 }
1038#endif
1039 gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
1040 || DECL_EXTERNAL (decl));
1041
1042 }
c22cacf3 1043
18d13f34
JH
1044 }
1045}
1046
1c4a429a 1047/* Expand function specified by NODE. */
7660e67e 1048
1c4a429a 1049static void
db0e878d 1050cgraph_expand_function (struct cgraph_node *node)
1c4a429a 1051{
6170a998
EB
1052 enum debug_info_type save_write_symbols = NO_DEBUG;
1053 const struct gcc_debug_hooks *save_debug_hooks = NULL;
1c4a429a
JH
1054 tree decl = node->decl;
1055
18c6ada9 1056 /* We ought to not compile any inline clones. */
341c100f 1057 gcc_assert (!node->global.inlined_to);
18c6ada9 1058
6b00c969
RH
1059 if (flag_unit_at_a_time)
1060 announce_function (decl);
18d13f34 1061
2dee695b 1062 gcc_assert (node->lowered);
776b966e 1063
6170a998
EB
1064 if (DECL_IGNORED_P (decl))
1065 {
1066 save_write_symbols = write_symbols;
1067 write_symbols = NO_DEBUG;
1068 save_debug_hooks = debug_hooks;
1069 debug_hooks = &do_nothing_debug_hooks;
1070 }
1071
a3546141 1072 /* Generate RTL for the body of DECL. */
ae2bcd98 1073 lang_hooks.callgraph.expand_function (decl);
18d13f34 1074
6de9cd9a
DN
1075 /* Make sure that BE didn't give up on compiling. */
1076 /* ??? Can happen with nested function of extern inline. */
341c100f 1077 gcc_assert (TREE_ASM_WRITTEN (node->decl));
18c6ada9 1078
6170a998
EB
1079 if (DECL_IGNORED_P (decl))
1080 {
1081 write_symbols = save_write_symbols;
1082 debug_hooks = save_debug_hooks;
1083 }
1084
1c4a429a 1085 current_function_decl = NULL;
89480522 1086 if (!cgraph_preserve_function_body_p (node->decl))
6de9cd9a 1087 {
3a40c18a 1088 cgraph_release_function_body (node);
229031d0 1089 /* Eliminate all call edges. This is important so the call_expr no longer
89480522 1090 points to the dead function body. */
2563c224 1091 cgraph_node_remove_callees (node);
6de9cd9a 1092 }
6b02a499
JH
1093
1094 cgraph_function_flags_ready = true;
1c4a429a
JH
1095}
1096
18c6ada9 1097/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
b58b1157
JH
1098
1099bool
18c6ada9 1100cgraph_inline_p (struct cgraph_edge *e, const char **reason)
b58b1157 1101{
18c6ada9
JH
1102 *reason = e->inline_failed;
1103 return !e->inline_failed;
b58b1157 1104}
18c6ada9 1105
6674a6ce 1106
6674a6ce 1107
db0e878d
AJ
1108/* Expand all functions that must be output.
1109
b58b1157
JH
1110 Attempt to topologically sort the nodes so function is output when
1111 all called functions are already assembled to allow data to be
a98ebe2e 1112 propagated across the callgraph. Use a stack to get smaller distance
d1a6adeb 1113 between a function and its callees (later we may choose to use a more
b58b1157
JH
1114 sophisticated algorithm for function reordering; we will likely want
1115 to use subsections to make the output functions appear in top-down
1116 order). */
1117
1118static void
a20af5b8 1119cgraph_expand_all_functions (void)
b58b1157
JH
1120{
1121 struct cgraph_node *node;
5ed6ace5 1122 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
18c6ada9 1123 int order_pos = 0, new_order_pos = 0;
b58b1157
JH
1124 int i;
1125
b58b1157 1126 order_pos = cgraph_postorder (order);
341c100f 1127 gcc_assert (order_pos == cgraph_n_nodes);
b58b1157 1128
1ae58c30 1129 /* Garbage collector may remove inline clones we eliminate during
18c6ada9
JH
1130 optimization. So we must be sure to not reference them. */
1131 for (i = 0; i < order_pos; i++)
1132 if (order[i]->output)
1133 order[new_order_pos++] = order[i];
1134
1135 for (i = new_order_pos - 1; i >= 0; i--)
b58b1157
JH
1136 {
1137 node = order[i];
1138 if (node->output)
1139 {
341c100f 1140 gcc_assert (node->reachable);
b58b1157
JH
1141 node->output = 0;
1142 cgraph_expand_function (node);
1143 }
1144 }
f45e0ad1 1145 cgraph_process_new_functions ();
50674e96 1146
b58b1157 1147 free (order);
50674e96 1148
b58b1157
JH
1149}
1150
474eccc6
ILT
1151/* This is used to sort the node types by the cgraph order number. */
1152
1153struct cgraph_order_sort
1154{
1155 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1156 union
1157 {
1158 struct cgraph_node *f;
8a4a83ed 1159 struct varpool_node *v;
474eccc6
ILT
1160 struct cgraph_asm_node *a;
1161 } u;
1162};
1163
1164/* Output all functions, variables, and asm statements in the order
1165 according to their order fields, which is the order in which they
1166 appeared in the file. This implements -fno-toplevel-reorder. In
1167 this mode we may output functions and variables which don't really
1168 need to be output. */
1169
1170static void
1171cgraph_output_in_order (void)
1172{
1173 int max;
1174 size_t size;
1175 struct cgraph_order_sort *nodes;
1176 int i;
1177 struct cgraph_node *pf;
8a4a83ed 1178 struct varpool_node *pv;
474eccc6
ILT
1179 struct cgraph_asm_node *pa;
1180
1181 max = cgraph_order;
1182 size = max * sizeof (struct cgraph_order_sort);
1183 nodes = (struct cgraph_order_sort *) alloca (size);
1184 memset (nodes, 0, size);
1185
8a4a83ed 1186 varpool_analyze_pending_decls ();
474eccc6
ILT
1187
1188 for (pf = cgraph_nodes; pf; pf = pf->next)
1189 {
1190 if (pf->output)
1191 {
1192 i = pf->order;
1193 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1194 nodes[i].kind = ORDER_FUNCTION;
1195 nodes[i].u.f = pf;
1196 }
1197 }
1198
8a4a83ed 1199 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
474eccc6
ILT
1200 {
1201 i = pv->order;
1202 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1203 nodes[i].kind = ORDER_VAR;
1204 nodes[i].u.v = pv;
1205 }
1206
1207 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1208 {
1209 i = pa->order;
1210 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1211 nodes[i].kind = ORDER_ASM;
1212 nodes[i].u.a = pa;
1213 }
474eccc6
ILT
1214
1215 for (i = 0; i < max; ++i)
1216 {
1217 switch (nodes[i].kind)
1218 {
1219 case ORDER_FUNCTION:
1220 nodes[i].u.f->output = 0;
1221 cgraph_expand_function (nodes[i].u.f);
1222 break;
1223
1224 case ORDER_VAR:
8a4a83ed 1225 varpool_assemble_decl (nodes[i].u.v);
474eccc6
ILT
1226 break;
1227
1228 case ORDER_ASM:
1229 assemble_asm (nodes[i].u.a->asm_str);
1230 break;
1231
1232 case ORDER_UNDEFINED:
1233 break;
1234
1235 default:
1236 gcc_unreachable ();
1237 }
1238 }
e7b9eb2c
ILT
1239
1240 cgraph_asm_nodes = NULL;
474eccc6
ILT
1241}
1242
18c6ada9
JH
1243/* Return true when function body of DECL still needs to be kept around
1244 for later re-use. */
1245bool
1246cgraph_preserve_function_body_p (tree decl)
1247{
1248 struct cgraph_node *node;
18c6ada9 1249 if (!cgraph_global_info_ready)
ba19ceae
JH
1250 return (flag_really_no_inline
1251 ? lang_hooks.tree_inlining.disregard_inline_limits (decl)
1252 : DECL_INLINE (decl));
18c6ada9
JH
1253 /* Look if there is any clone around. */
1254 for (node = cgraph_node (decl); node; node = node->next_clone)
1255 if (node->global.inlined_to)
1256 return true;
1257 return false;
1258}
1259
ef330312
PB
1260static void
1261ipa_passes (void)
1262{
1263 cfun = NULL;
04b201a2 1264 current_function_decl = NULL;
ef330312
PB
1265 tree_register_cfg_hooks ();
1266 bitmap_obstack_initialize (NULL);
1267 execute_ipa_pass_list (all_ipa_passes);
1268 bitmap_obstack_release (NULL);
1269}
1270
1c4a429a
JH
1271/* Perform simple optimizations based on callgraph. */
1272
1273void
db0e878d 1274cgraph_optimize (void)
1c4a429a 1275{
413803d3
VR
1276 if (errorcount || sorrycount)
1277 return;
1278
18c6ada9
JH
1279#ifdef ENABLE_CHECKING
1280 verify_cgraph ();
1281#endif
7be82279
JH
1282
1283 /* Call functions declared with the "constructor" or "destructor"
1284 attribute. */
1285 cgraph_build_cdtor_fns ();
4a46cbfb 1286 if (!flag_unit_at_a_time)
cd9c7bd2 1287 {
f45e0ad1
JH
1288 cgraph_assemble_pending_functions ();
1289 cgraph_process_new_functions ();
1290 cgraph_state = CGRAPH_STATE_FINISHED;
474eccc6 1291 cgraph_output_pending_asms ();
8a4a83ed
JH
1292 varpool_assemble_pending_decls ();
1293 varpool_output_debug_info ();
cd9c7bd2
JH
1294 return;
1295 }
857e7259 1296
cd9c7bd2
JH
1297 /* Frontend may output common variables after the unit has been finalized.
1298 It is safe to deal with them here as they are always zero initialized. */
8a4a83ed 1299 varpool_analyze_pending_decls ();
151e6f24 1300 cgraph_analyze_functions ();
857e7259 1301
a194aa56 1302 timevar_push (TV_CGRAPHOPT);
a5573239
JH
1303 if (pre_ipa_mem_report)
1304 {
1305 fprintf (stderr, "Memory consumption before IPA\n");
1306 dump_memory_report (false);
1307 }
b58b1157 1308 if (!quiet_flag)
a418679d 1309 fprintf (stderr, "Performing interprocedural optimizations\n");
f45e0ad1 1310 cgraph_state = CGRAPH_STATE_IPA;
7e2fe9d8
AP
1311
1312 /* Don't run the IPA passes if there was any error or sorry messages. */
1313 if (errorcount == 0 && sorrycount == 0)
1314 ipa_passes ();
1315
6b02a499
JH
1316 /* This pass remove bodies of extern inline functions we never inlined.
1317 Do this later so other IPA passes see what is really going on. */
1318 cgraph_remove_unreachable_nodes (false, dump_file);
dafc5b82 1319 cgraph_global_info_ready = true;
a194aa56
JH
1320 if (cgraph_dump_file)
1321 {
7d82fe7c 1322 fprintf (cgraph_dump_file, "Optimized ");
a194aa56 1323 dump_cgraph (cgraph_dump_file);
cd9c7bd2 1324 dump_varpool (cgraph_dump_file);
a194aa56 1325 }
a5573239
JH
1326 if (post_ipa_mem_report)
1327 {
7fa982e5 1328 fprintf (stderr, "Memory consumption after IPA\n");
a5573239
JH
1329 dump_memory_report (false);
1330 }
a194aa56 1331 timevar_pop (TV_CGRAPHOPT);
1c4a429a 1332
b58b1157 1333 /* Output everything. */
7d82fe7c
KC
1334 if (!quiet_flag)
1335 fprintf (stderr, "Assembling functions:\n");
18c6ada9
JH
1336#ifdef ENABLE_CHECKING
1337 verify_cgraph ();
1338#endif
474eccc6 1339
6674a6ce 1340 cgraph_mark_functions_to_output ();
cd9c7bd2 1341
f45e0ad1 1342 cgraph_state = CGRAPH_STATE_EXPANSION;
474eccc6
ILT
1343 if (!flag_toplevel_reorder)
1344 cgraph_output_in_order ();
1345 else
1346 {
1347 cgraph_output_pending_asms ();
1348
1349 cgraph_expand_all_functions ();
8a4a83ed 1350 varpool_remove_unreferenced_decls ();
474eccc6 1351
8a4a83ed
JH
1352 varpool_assemble_pending_decls ();
1353 varpool_output_debug_info ();
474eccc6 1354 }
f45e0ad1
JH
1355 cgraph_process_new_functions ();
1356 cgraph_state = CGRAPH_STATE_FINISHED;
cd9c7bd2 1357
a194aa56
JH
1358 if (cgraph_dump_file)
1359 {
7d82fe7c 1360 fprintf (cgraph_dump_file, "\nFinal ");
a194aa56
JH
1361 dump_cgraph (cgraph_dump_file);
1362 }
18c6ada9
JH
1363#ifdef ENABLE_CHECKING
1364 verify_cgraph ();
6de9cd9a
DN
1365 /* Double check that all inline clones are gone and that all
1366 function bodies have been released from memory. */
1367 if (flag_unit_at_a_time
6de9cd9a
DN
1368 && !(sorrycount || errorcount))
1369 {
1370 struct cgraph_node *node;
1371 bool error_found = false;
1372
1373 for (node = cgraph_nodes; node; node = node->next)
1374 if (node->analyzed
1375 && (node->global.inlined_to
c22cacf3 1376 || DECL_SAVED_TREE (node->decl)))
6de9cd9a
DN
1377 {
1378 error_found = true;
1379 dump_cgraph_node (stderr, node);
c22cacf3 1380 }
6de9cd9a 1381 if (error_found)
ab532386 1382 internal_error ("nodes with no released memory found");
6de9cd9a 1383 }
18c6ada9 1384#endif
1c4a429a 1385}
c9b9aa64 1386/* Generate and emit a static constructor or destructor. WHICH must be
c22cacf3 1387 one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
c9b9aa64
RH
1388 GENERIC statements. */
1389
1390void
35b6fdcf 1391cgraph_build_static_cdtor (char which, tree body, int priority)
c9b9aa64
RH
1392{
1393 static int counter = 0;
1394 char which_buf[16];
b785f485 1395 tree decl, name, resdecl;
c9b9aa64
RH
1396
1397 sprintf (which_buf, "%c_%d", which, counter++);
5880f14f 1398 name = get_file_function_name (which_buf);
c9b9aa64
RH
1399
1400 decl = build_decl (FUNCTION_DECL, name,
1401 build_function_type (void_type_node, void_list_node));
1402 current_function_decl = decl;
1403
b785f485
RH
1404 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1405 DECL_ARTIFICIAL (resdecl) = 1;
1406 DECL_IGNORED_P (resdecl) = 1;
1407 DECL_RESULT (decl) = resdecl;
1408
c9b9aa64
RH
1409 allocate_struct_function (decl);
1410
1411 TREE_STATIC (decl) = 1;
1412 TREE_USED (decl) = 1;
1413 DECL_ARTIFICIAL (decl) = 1;
1414 DECL_IGNORED_P (decl) = 1;
1415 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1416 DECL_SAVED_TREE (decl) = body;
1417 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1418 DECL_UNINLINABLE (decl) = 1;
1419
1420 DECL_INITIAL (decl) = make_node (BLOCK);
1421 TREE_USED (DECL_INITIAL (decl)) = 1;
1422
1423 DECL_SOURCE_LOCATION (decl) = input_location;
1424 cfun->function_end_locus = input_location;
1425
341c100f
NS
1426 switch (which)
1427 {
1428 case 'I':
1429 DECL_STATIC_CONSTRUCTOR (decl) = 1;
395a40e0 1430 decl_init_priority_insert (decl, priority);
341c100f
NS
1431 break;
1432 case 'D':
1433 DECL_STATIC_DESTRUCTOR (decl) = 1;
395a40e0 1434 decl_fini_priority_insert (decl, priority);
341c100f
NS
1435 break;
1436 default:
1437 gcc_unreachable ();
1438 }
c9b9aa64
RH
1439
1440 gimplify_function_tree (decl);
1441
f45e0ad1
JH
1442 cgraph_add_new_function (decl, false);
1443 cgraph_mark_needed_node (cgraph_node (decl));
c9b9aa64 1444}
9b3e897d
PB
1445
1446void
1447init_cgraph (void)
1448{
1449 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1450}
57fb5341 1451
c22cacf3 1452/* The edges representing the callers of the NEW_VERSION node were
57fb5341
RL
1453 fixed by cgraph_function_versioning (), now the call_expr in their
1454 respective tree code should be updated to call the NEW_VERSION. */
1455
1456static void
1457update_call_expr (struct cgraph_node *new_version)
1458{
1459 struct cgraph_edge *e;
1460
1461 gcc_assert (new_version);
1462 for (e = new_version->callers; e; e = e->next_caller)
1463 /* Update the call expr on the edges
1464 to call the new version. */
5039610b 1465 TREE_OPERAND (CALL_EXPR_FN (get_call_expr_in (e->call_stmt)), 0) = new_version->decl;
57fb5341
RL
1466}
1467
1468
1469/* Create a new cgraph node which is the new version of
1470 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1471 edges which should be redirected to point to
1472 NEW_VERSION. ALL the callees edges of OLD_VERSION
1473 are cloned to the new version node. Return the new
1474 version node. */
1475
1476static struct cgraph_node *
1477cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
b2c0ad40
KH
1478 tree new_decl,
1479 VEC(cgraph_edge_p,heap) *redirect_callers)
57fb5341
RL
1480 {
1481 struct cgraph_node *new_version;
1482 struct cgraph_edge *e, *new_e;
1483 struct cgraph_edge *next_callee;
1484 unsigned i;
1485
1486 gcc_assert (old_version);
c22cacf3 1487
57fb5341
RL
1488 new_version = cgraph_node (new_decl);
1489
1490 new_version->analyzed = true;
1491 new_version->local = old_version->local;
1492 new_version->global = old_version->global;
1493 new_version->rtl = new_version->rtl;
1494 new_version->reachable = true;
1495 new_version->count = old_version->count;
1496
1497 /* Clone the old node callees. Recursive calls are
1498 also cloned. */
1499 for (e = old_version->callees;e; e=e->next_callee)
1500 {
45a80bb9
JH
1501 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->frequency,
1502 e->loop_nest, true);
57fb5341
RL
1503 new_e->count = e->count;
1504 }
1505 /* Fix recursive calls.
1506 If OLD_VERSION has a recursive call after the
1507 previous edge cloning, the new version will have an edge
1508 pointing to the old version, which is wrong;
1509 Redirect it to point to the new version. */
1510 for (e = new_version->callees ; e; e = next_callee)
1511 {
1512 next_callee = e->next_callee;
1513 if (e->callee == old_version)
1514 cgraph_redirect_edge_callee (e, new_version);
c22cacf3 1515
57fb5341
RL
1516 if (!next_callee)
1517 break;
1518 }
b2c0ad40
KH
1519 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1520 {
1521 /* Redirect calls to the old version node to point to its new
1522 version. */
1523 cgraph_redirect_edge_callee (e, new_version);
1524 }
57fb5341
RL
1525
1526 return new_version;
1527 }
1528
1529 /* Perform function versioning.
c22cacf3 1530 Function versioning includes copying of the tree and
57fb5341
RL
1531 a callgraph update (creating a new cgraph node and updating
1532 its callees and callers).
1533
1534 REDIRECT_CALLERS varray includes the edges to be redirected
1535 to the new version.
1536
1537 TREE_MAP is a mapping of tree nodes we want to replace with
1538 new ones (according to results of prior analysis).
1539 OLD_VERSION_NODE is the node that is versioned.
1540 It returns the new version's cgraph node. */
1541
1542struct cgraph_node *
1543cgraph_function_versioning (struct cgraph_node *old_version_node,
b2c0ad40 1544 VEC(cgraph_edge_p,heap) *redirect_callers,
57fb5341
RL
1545 varray_type tree_map)
1546{
1547 tree old_decl = old_version_node->decl;
1548 struct cgraph_node *new_version_node = NULL;
1549 tree new_decl;
1550
1551 if (!tree_versionable_function_p (old_decl))
1552 return NULL;
1553
1554 /* Make a new FUNCTION_DECL tree node for the
1555 new version. */
1556 new_decl = copy_node (old_decl);
1557
1558 /* Create the new version's call-graph node.
1559 and update the edges of the new node. */
1560 new_version_node =
1561 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1562 redirect_callers);
1563
1564 /* Copy the OLD_VERSION_NODE function tree to the new version. */
ea99e0be 1565 tree_function_versioning (old_decl, new_decl, tree_map, false);
57fb5341
RL
1566 /* Update the call_expr on the edges to call the new version node. */
1567 update_call_expr (new_version_node);
1568
c22cacf3 1569 /* Update the new version's properties.
57fb5341 1570 Make The new version visible only within this translation unit.
c22cacf3 1571 ??? We cannot use COMDAT linkage because there is no
57fb5341
RL
1572 ABI support for this. */
1573 DECL_EXTERNAL (new_version_node->decl) = 0;
1574 DECL_ONE_ONLY (new_version_node->decl) = 0;
1575 TREE_PUBLIC (new_version_node->decl) = 0;
1576 DECL_COMDAT (new_version_node->decl) = 0;
1577 new_version_node->local.externally_visible = 0;
1578 new_version_node->local.local = 1;
1579 new_version_node->lowered = true;
1580 return new_version_node;
1581}
ea99e0be
JH
1582
1583/* Produce separate function body for inline clones so the offline copy can be
1584 modified without affecting them. */
1585struct cgraph_node *
1586save_inline_function_body (struct cgraph_node *node)
1587{
1588 struct cgraph_node *first_clone;
1589
1590 gcc_assert (node == cgraph_node (node->decl));
1591
1592 cgraph_lower_function (node);
1593
1594 /* In non-unit-at-a-time we construct full fledged clone we never output to
c0220ea4 1595 assembly file. This clone is pointed out by inline_decl of original function
ea99e0be
JH
1596 and inlining infrastructure knows how to deal with this. */
1597 if (!flag_unit_at_a_time)
1598 {
1599 struct cgraph_edge *e;
1600
45a80bb9
JH
1601 first_clone = cgraph_clone_node (node, node->count, 0, CGRAPH_FREQ_BASE,
1602 false);
ea99e0be
JH
1603 first_clone->needed = 0;
1604 first_clone->reachable = 1;
1605 /* Recursively clone all bodies. */
1606 for (e = first_clone->callees; e; e = e->next_callee)
1607 if (!e->inline_failed)
1608 cgraph_clone_inlined_nodes (e, true, false);
1609 }
1610 else
1611 first_clone = node->next_clone;
1612
1613 first_clone->decl = copy_node (node->decl);
1614 node->next_clone = NULL;
1615 if (!flag_unit_at_a_time)
1616 node->inline_decl = first_clone->decl;
1617 first_clone->prev_clone = NULL;
1618 cgraph_insert_node_to_hashtable (first_clone);
1619 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1620
1621 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1622 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1623
1624 DECL_EXTERNAL (first_clone->decl) = 0;
1625 DECL_ONE_ONLY (first_clone->decl) = 0;
1626 TREE_PUBLIC (first_clone->decl) = 0;
1627 DECL_COMDAT (first_clone->decl) = 0;
1628
1629 for (node = first_clone->next_clone; node; node = node->next_clone)
1630 node->decl = first_clone->decl;
1631#ifdef ENABLE_CHECKING
1632 verify_cgraph_node (first_clone);
1633#endif
1634 return first_clone;
1635}
7be82279
JH
1636
1637#include "gt-cgraphunit.h"