]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
* config.sub : Update from upstream sources.
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
a418679d 1/* Callgraph based interprocedural optimizations.
66647d44 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
cac67c08 3 Free Software Foundation, Inc.
1c4a429a
JH
4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
1c4a429a
JH
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
1c4a429a 21
18c6ada9 22/* This module implements main driver of compilation process as well as
a418679d 23 few basic interprocedural optimizers.
18c6ada9
JH
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
efe75b6f
JH
35 (There is one exception needed for implementing GCC extern inline
36 function.)
18c6ada9 37
8a4a83ed 38 - varpool_finalize_variable
18c6ada9 39
1ae58c30 40 This function has same behavior as the above but is used for static
18c6ada9
JH
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
efe75b6f
JH
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
18c6ada9 47
7e8b322a 48 In the the call-graph construction and local function
18c6ada9
JH
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
efe75b6f
JH
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
18c6ada9
JH
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
18c6ada9 62 - cgraph_mark_needed_node
8a4a83ed 63 - varpool_mark_needed_node
18c6ada9 64
efe75b6f
JH
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
dbb23ff7 69 used by C++ frontend to explicitly mark the keyed methods.
18c6ada9
JH
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
7e8b322a 80 Analyzing of all functions is deferred
18c6ada9
JH
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
e1990f69 91 The intra-procedural information is produced and its existence
18c6ada9
JH
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
7e8b322a 106 where reference has been optimized out. */
9b3e897d 107
6674a6ce 108
1c4a429a
JH
109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
c9b9aa64 114#include "rtl.h"
6674a6ce 115#include "tree-flow.h"
1c4a429a
JH
116#include "tree-inline.h"
117#include "langhooks.h"
0c58f841 118#include "pointer-set.h"
1c4a429a
JH
119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
dafc5b82 125#include "diagnostic.h"
a194aa56 126#include "timevar.h"
b58b1157
JH
127#include "params.h"
128#include "fibheap.h"
dc0bfe6a 129#include "intl.h"
902edd36 130#include "function.h"
57fb5341 131#include "ipa-prop.h"
726a989a
RB
132#include "gimple.h"
133#include "tree-iterator.h"
b4861090 134#include "tree-pass.h"
a406865a 135#include "tree-dump.h"
cd9c7bd2 136#include "output.h"
3baf459d 137#include "coverage.h"
b58b1157 138
a20af5b8 139static void cgraph_expand_all_functions (void);
db0e878d
AJ
140static void cgraph_mark_functions_to_output (void);
141static void cgraph_expand_function (struct cgraph_node *);
21c4a6a7 142static void cgraph_output_pending_asms (void);
a406865a 143static void cgraph_analyze_function (struct cgraph_node *);
7dff32e6 144
9b3e897d
PB
145static FILE *cgraph_dump_file;
146
873c7164
MM
147/* A vector of FUNCTION_DECLs declared as static constructors. */
148static GTY (()) VEC(tree, gc) *static_ctors;
149/* A vector of FUNCTION_DECLs declared as static destructors. */
150static GTY (()) VEC(tree, gc) *static_dtors;
7be82279
JH
151
152/* When target does not have ctors and dtors, we call all constructor
c80b4100 153 and destructor by special initialization/destruction function
7be82279
JH
154 recognized by collect2.
155
156 When we are going to build this function, collect all constructors and
157 destructors and turn them into normal functions. */
158
159static void
160record_cdtor_fn (tree fndecl)
161{
bd85fcee
JH
162 struct cgraph_node *node;
163 if (targetm.have_ctors_dtors
164 || (!DECL_STATIC_CONSTRUCTOR (fndecl)
165 && !DECL_STATIC_DESTRUCTOR (fndecl)))
7be82279
JH
166 return;
167
168 if (DECL_STATIC_CONSTRUCTOR (fndecl))
169 {
873c7164 170 VEC_safe_push (tree, gc, static_ctors, fndecl);
7be82279 171 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
7be82279
JH
172 }
173 if (DECL_STATIC_DESTRUCTOR (fndecl))
174 {
873c7164 175 VEC_safe_push (tree, gc, static_dtors, fndecl);
7be82279 176 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
7be82279 177 }
bd85fcee
JH
178 node = cgraph_node (fndecl);
179 node->local.disregard_inline_limits = 1;
180 cgraph_mark_reachable_node (node);
7be82279
JH
181}
182
873c7164
MM
183/* Define global constructors/destructor functions for the CDTORS, of
184 which they are LEN. The CDTORS are sorted by initialization
185 priority. If CTOR_P is true, these are constructors; otherwise,
186 they are destructors. */
187
7be82279 188static void
873c7164 189build_cdtor (bool ctor_p, tree *cdtors, size_t len)
7be82279 190{
873c7164 191 size_t i;
7be82279 192
873c7164
MM
193 i = 0;
194 while (i < len)
195 {
196 tree body;
197 tree fn;
198 priority_type priority;
199
200 priority = 0;
201 body = NULL_TREE;
202 /* Find the next batch of constructors/destructors with the same
203 initialization priority. */
204 do
205 {
206 priority_type p;
207 fn = cdtors[i];
208 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
209 if (!body)
210 priority = p;
211 else if (p != priority)
212 break;
db3927fb
AH
213 append_to_statement_list (build_function_call_expr (UNKNOWN_LOCATION,
214 fn, 0),
873c7164
MM
215 &body);
216 ++i;
217 }
218 while (i < len);
219 gcc_assert (body != NULL_TREE);
220 /* Generate a function to call all the function of like
221 priority. */
222 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
223 }
224}
225
226/* Comparison function for qsort. P1 and P2 are actually of type
227 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
228 used to determine the sort order. */
7be82279 229
873c7164
MM
230static int
231compare_ctor (const void *p1, const void *p2)
232{
233 tree f1;
234 tree f2;
235 int priority1;
236 int priority2;
237
238 f1 = *(const tree *)p1;
239 f2 = *(const tree *)p2;
240 priority1 = DECL_INIT_PRIORITY (f1);
241 priority2 = DECL_INIT_PRIORITY (f2);
242
243 if (priority1 < priority2)
244 return -1;
245 else if (priority1 > priority2)
246 return 1;
247 else
248 /* Ensure a stable sort. */
249 return (const tree *)p1 - (const tree *)p2;
250}
251
252/* Comparison function for qsort. P1 and P2 are actually of type
253 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
254 used to determine the sort order. */
7be82279 255
873c7164
MM
256static int
257compare_dtor (const void *p1, const void *p2)
258{
259 tree f1;
260 tree f2;
261 int priority1;
262 int priority2;
263
264 f1 = *(const tree *)p1;
265 f2 = *(const tree *)p2;
266 priority1 = DECL_FINI_PRIORITY (f1);
267 priority2 = DECL_FINI_PRIORITY (f2);
268
269 if (priority1 < priority2)
270 return -1;
271 else if (priority1 > priority2)
272 return 1;
273 else
274 /* Ensure a stable sort. */
275 return (const tree *)p1 - (const tree *)p2;
7be82279
JH
276}
277
278/* Generate functions to call static constructors and destructors
279 for targets that do not support .ctors/.dtors sections. These
280 functions have magic names which are detected by collect2. */
281
282static void
283cgraph_build_cdtor_fns (void)
284{
873c7164 285 if (!VEC_empty (tree, static_ctors))
7be82279 286 {
873c7164
MM
287 gcc_assert (!targetm.have_ctors_dtors);
288 qsort (VEC_address (tree, static_ctors),
289 VEC_length (tree, static_ctors),
290 sizeof (tree),
291 compare_ctor);
292 build_cdtor (/*ctor_p=*/true,
293 VEC_address (tree, static_ctors),
294 VEC_length (tree, static_ctors));
295 VEC_truncate (tree, static_ctors, 0);
7be82279 296 }
873c7164
MM
297
298 if (!VEC_empty (tree, static_dtors))
7be82279 299 {
873c7164
MM
300 gcc_assert (!targetm.have_ctors_dtors);
301 qsort (VEC_address (tree, static_dtors),
302 VEC_length (tree, static_dtors),
303 sizeof (tree),
304 compare_dtor);
305 build_cdtor (/*ctor_p=*/false,
306 VEC_address (tree, static_dtors),
307 VEC_length (tree, static_dtors));
308 VEC_truncate (tree, static_dtors, 0);
7be82279
JH
309 }
310}
311
8dafba3c
RH
312/* Determine if function DECL is needed. That is, visible to something
313 either outside this translation unit, something magic in the system
7e8b322a 314 configury. */
8dafba3c 315
d7f09764
DN
316bool
317cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
8dafba3c 318{
ce91e74c
JH
319 if (MAIN_NAME_P (DECL_NAME (decl))
320 && TREE_PUBLIC (decl))
321 {
322 node->local.externally_visible = true;
323 return true;
324 }
6de9cd9a 325
e7d6beb0 326 /* If the user told us it is used, then it must be so. */
386b46cf
JH
327 if (node->local.externally_visible)
328 return true;
329
e7d6beb0
JH
330 /* ??? If the assembler name is set by hand, it is possible to assemble
331 the name later after finalizing the function and the fact is noticed
332 in assemble_name then. This is arguably a bug. */
333 if (DECL_ASSEMBLER_NAME_SET_P (decl)
334 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
335 return true;
336
a1d31187
JH
337 /* With -fkeep-inline-functions we are keeping all inline functions except
338 for extern inline ones. */
339 if (flag_keep_inline_functions
340 && DECL_DECLARED_INLINE_P (decl)
b521dcbe
JH
341 && !DECL_EXTERNAL (decl)
342 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
a1d31187
JH
343 return true;
344
8dafba3c
RH
345 /* If we decided it was needed before, but at the time we didn't have
346 the body of the function available, then it's still needed. We have
347 to go back and re-check its dependencies now. */
348 if (node->needed)
349 return true;
350
351 /* Externally visible functions must be output. The exception is
c22cacf3 352 COMDAT functions that must be output only when they are needed.
04f77d0f
JH
353
354 When not optimizing, also output the static functions. (see
46f5f7f2 355 PR24561), but don't do so for always_inline functions, functions
b633db7b
JH
356 declared inline and nested functions. These was optimized out
357 in the original implementation and it is unclear whether we want
6fc0bb99 358 to change the behavior here. */
5d342ef9 359 if (((TREE_PUBLIC (decl)
b633db7b
JH
360 || (!optimize && !node->local.disregard_inline_limits
361 && !DECL_DECLARED_INLINE_P (decl)
362 && !node->origin))
5d342ef9 363 && !flag_whole_program)
ce91e74c 364 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
8dafba3c
RH
365 return true;
366
367 /* Constructors and destructors are reachable from the runtime by
368 some mechanism. */
369 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
370 return true;
371
8dafba3c
RH
372 return false;
373}
374
d60ab196 375/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
f45e0ad1
JH
376 functions into callgraph in a way so they look like ordinary reachable
377 functions inserted into callgraph already at construction time. */
378
379bool
380cgraph_process_new_functions (void)
381{
382 bool output = false;
383 tree fndecl;
384 struct cgraph_node *node;
385
386 /* Note that this queue may grow as its being processed, as the new
387 functions may generate new ones. */
388 while (cgraph_new_nodes)
389 {
390 node = cgraph_new_nodes;
391 fndecl = node->decl;
392 cgraph_new_nodes = cgraph_new_nodes->next_needed;
393 switch (cgraph_state)
394 {
395 case CGRAPH_STATE_CONSTRUCTION:
396 /* At construction time we just need to finalize function and move
397 it into reachable functions list. */
398
399 node->next_needed = NULL;
400 cgraph_finalize_function (fndecl, false);
401 cgraph_mark_reachable_node (node);
402 output = true;
403 break;
404
405 case CGRAPH_STATE_IPA:
7a388ee4 406 case CGRAPH_STATE_IPA_SSA:
f45e0ad1
JH
407 /* When IPA optimization already started, do all essential
408 transformations that has been already performed on the whole
409 cgraph but not on this function. */
410
726a989a 411 gimple_register_cfg_hooks ();
f45e0ad1
JH
412 if (!node->analyzed)
413 cgraph_analyze_function (node);
414 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
415 current_function_decl = fndecl;
1920df6c 416 compute_inline_parameters (node);
7a388ee4
JH
417 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
418 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
419 /* When not optimizing, be sure we run early local passes anyway
420 to expand OMP. */
421 || !optimize)
8ddbbcae 422 execute_pass_list (pass_early_local_passes.pass.sub);
f45e0ad1
JH
423 free_dominance_info (CDI_POST_DOMINATORS);
424 free_dominance_info (CDI_DOMINATORS);
425 pop_cfun ();
426 current_function_decl = NULL;
427 break;
428
429 case CGRAPH_STATE_EXPANSION:
430 /* Functions created during expansion shall be compiled
431 directly. */
257eb6e3 432 node->process = 0;
f45e0ad1
JH
433 cgraph_expand_function (node);
434 break;
435
436 default:
437 gcc_unreachable ();
438 break;
439 }
129a37fc 440 cgraph_call_function_insertion_hooks (node);
f45e0ad1
JH
441 }
442 return output;
443}
444
d71cc23f
JH
445/* As an GCC extension we allow redefinition of the function. The
446 semantics when both copies of bodies differ is not well defined.
447 We replace the old body with new body so in unit at a time mode
448 we always use new body, while in normal mode we may end up with
449 old body inlined into some functions and new body expanded and
450 inlined in others.
451
452 ??? It may make more sense to use one body for inlining and other
453 body for expanding the function but this is difficult to do. */
454
455static void
456cgraph_reset_node (struct cgraph_node *node)
457{
257eb6e3 458 /* If node->process is set, then we have already begun whole-unit analysis.
7e8b322a
JH
459 This is *not* testing for whether we've already emitted the function.
460 That case can be sort-of legitimately seen with real function redefinition
461 errors. I would argue that the front end should never present us with
462 such a case, but don't enforce that for now. */
257eb6e3 463 gcc_assert (!node->process);
d71cc23f
JH
464
465 /* Reset our data structures so we can analyze the function again. */
466 memset (&node->local, 0, sizeof (node->local));
467 memset (&node->global, 0, sizeof (node->global));
468 memset (&node->rtl, 0, sizeof (node->rtl));
469 node->analyzed = false;
470 node->local.redefined_extern_inline = true;
471 node->local.finalized = false;
472
d71cc23f
JH
473 cgraph_node_remove_callees (node);
474
475 /* We may need to re-queue the node for assembling in case
b86b3ea3
RG
476 we already proceeded it and ignored as not needed or got
477 a re-declaration in IMA mode. */
478 if (node->reachable)
d71cc23f
JH
479 {
480 struct cgraph_node *n;
481
482 for (n = cgraph_nodes_queue; n; n = n->next_needed)
483 if (n == node)
484 break;
485 if (!n)
486 node->reachable = 0;
487 }
488}
d853a20e 489
953ff289
DN
490static void
491cgraph_lower_function (struct cgraph_node *node)
492{
493 if (node->lowered)
494 return;
a406865a
RG
495
496 if (node->nested)
497 lower_nested_functions (node->decl);
498 gcc_assert (!node->nested);
499
953ff289
DN
500 tree_lowering_passes (node->decl);
501 node->lowered = true;
502}
503
6b00c969
RH
504/* DECL has been parsed. Take it, queue it, compile it at the whim of the
505 logic in effect. If NESTED is true, then our caller cannot stand to have
506 the garbage collector run at the moment. We would need to either create
507 a new GC context, or just not compile right now. */
1c4a429a
JH
508
509void
6b00c969 510cgraph_finalize_function (tree decl, bool nested)
1c4a429a
JH
511{
512 struct cgraph_node *node = cgraph_node (decl);
513
d853a20e 514 if (node->local.finalized)
d71cc23f 515 cgraph_reset_node (node);
6b00c969 516
6bad2617 517 node->pid = cgraph_max_pid ++;
d853a20e 518 notice_global_symbol (decl);
f6981e16 519 node->local.finalized = true;
e21aff8a 520 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
d88e5c37 521 node->finalized_by_frontend = true;
7be82279 522 record_cdtor_fn (node->decl);
1c4a429a 523
d7f09764 524 if (cgraph_decide_is_function_needed (node, decl))
8dafba3c
RH
525 cgraph_mark_needed_node (node);
526
ff5c4582 527 /* Since we reclaim unreachable nodes at the end of every language
e7d6beb0
JH
528 level unit, we need to be conservative about possible entry points
529 there. */
ce91e74c 530 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
e7d6beb0
JH
531 cgraph_mark_reachable_node (node);
532
8dafba3c 533 /* If we've not yet emitted decl, tell the debug info about it. */
6b00c969 534 if (!TREE_ASM_WRITTEN (decl))
8dafba3c 535 (*debug_hooks->deferred_inline_function) (decl);
d173e685 536
902edd36
JH
537 /* Possibly warn about unused parameters. */
538 if (warn_unused_parameter)
539 do_warn_unused_parameter (decl);
7e8b322a
JH
540
541 if (!nested)
542 ggc_collect ();
1c4a429a
JH
543}
544
f0c882ab
JH
545/* C99 extern inline keywords allow changing of declaration after function
546 has been finalized. We need to re-decide if we want to mark the function as
547 needed then. */
548
549void
550cgraph_mark_if_needed (tree decl)
551{
552 struct cgraph_node *node = cgraph_node (decl);
d7f09764 553 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
f0c882ab
JH
554 cgraph_mark_needed_node (node);
555}
556
9187e02d
JH
557/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
558static bool
559clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
560{
561 while (node != node2 && node2)
562 node2 = node2->clone_of;
563 return node2 != NULL;
564}
565
18c6ada9
JH
566/* Verify cgraph nodes of given cgraph node. */
567void
568verify_cgraph_node (struct cgraph_node *node)
569{
570 struct cgraph_edge *e;
e21aff8a 571 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
2bafad93 572 struct function *saved_cfun = cfun;
e21aff8a 573 basic_block this_block;
726a989a 574 gimple_stmt_iterator gsi;
e0704a46 575 bool error_found = false;
18c6ada9 576
5771bd91
RG
577 if (errorcount || sorrycount)
578 return;
579
18c6ada9 580 timevar_push (TV_CGRAPH_VERIFY);
2bafad93
JJ
581 /* debug_generic_stmt needs correct cfun */
582 set_cfun (this_cfun);
18c6ada9
JH
583 for (e = node->callees; e; e = e->next_callee)
584 if (e->aux)
585 {
ab532386 586 error ("aux field set for edge %s->%s",
4f1e4960
JM
587 identifier_to_locale (cgraph_node_name (e->caller)),
588 identifier_to_locale (cgraph_node_name (e->callee)));
18c6ada9
JH
589 error_found = true;
590 }
06191a23
JH
591 if (node->count < 0)
592 {
593 error ("Execution count is negative");
594 error_found = true;
595 }
18c6ada9
JH
596 for (e = node->callers; e; e = e->next_caller)
597 {
06191a23
JH
598 if (e->count < 0)
599 {
600 error ("caller edge count is negative");
601 error_found = true;
602 }
45a80bb9
JH
603 if (e->frequency < 0)
604 {
605 error ("caller edge frequency is negative");
606 error_found = true;
607 }
608 if (e->frequency > CGRAPH_FREQ_MAX)
609 {
610 error ("caller edge frequency is too large");
611 error_found = true;
612 }
18c6ada9
JH
613 if (!e->inline_failed)
614 {
615 if (node->global.inlined_to
616 != (e->caller->global.inlined_to
617 ? e->caller->global.inlined_to : e->caller))
618 {
ab532386 619 error ("inlined_to pointer is wrong");
18c6ada9
JH
620 error_found = true;
621 }
622 if (node->callers->next_caller)
623 {
ab532386 624 error ("multiple inline callers");
18c6ada9
JH
625 error_found = true;
626 }
627 }
628 else
629 if (node->global.inlined_to)
630 {
ab532386 631 error ("inlined_to pointer set for noninline callers");
18c6ada9
JH
632 error_found = true;
633 }
634 }
635 if (!node->callers && node->global.inlined_to)
636 {
95a52ebb 637 error ("inlined_to pointer is set but no predecessors found");
18c6ada9
JH
638 error_found = true;
639 }
640 if (node->global.inlined_to == node)
641 {
ab532386 642 error ("inlined_to pointer refers to itself");
18c6ada9
JH
643 error_found = true;
644 }
645
69fb1284 646 if (!cgraph_node (node->decl))
18c6ada9 647 {
69fb1284 648 error ("node not found in cgraph_hash");
18c6ada9
JH
649 error_found = true;
650 }
c22cacf3 651
9187e02d
JH
652 if (node->clone_of)
653 {
654 struct cgraph_node *n;
655 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
656 if (n == node)
657 break;
658 if (!n)
659 {
660 error ("node has wrong clone_of");
661 error_found = true;
662 }
663 }
664 if (node->clones)
665 {
666 struct cgraph_node *n;
667 for (n = node->clones; n; n = n->next_sibling_clone)
668 if (n->clone_of != node)
669 break;
670 if (n)
671 {
672 error ("node has wrong clone list");
673 error_found = true;
674 }
675 }
676 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
677 {
678 error ("node is in clone list but it is not clone");
679 error_found = true;
680 }
681 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
682 {
683 error ("node has wrong prev_clone pointer");
684 error_found = true;
685 }
686 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
687 {
688 error ("double linked list of clones corrupted");
689 error_found = true;
690 }
691
692 if (node->analyzed && gimple_has_body_p (node->decl)
726a989a 693 && !TREE_ASM_WRITTEN (node->decl)
d7f09764
DN
694 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
695 && !flag_wpa)
18c6ada9 696 {
e21aff8a
SB
697 if (this_cfun->cfg)
698 {
699 /* The nodes we're interested in are never shared, so walk
700 the tree ignoring duplicates. */
2dee695b 701 struct pointer_set_t *visited_nodes = pointer_set_create ();
e21aff8a
SB
702 /* Reach the trees by walking over the CFG, and note the
703 enclosing basic-blocks in the call edges. */
704 FOR_EACH_BB_FN (this_block, this_cfun)
726a989a
RB
705 for (gsi = gsi_start_bb (this_block);
706 !gsi_end_p (gsi);
707 gsi_next (&gsi))
e0704a46 708 {
726a989a 709 gimple stmt = gsi_stmt (gsi);
e0704a46 710 tree decl;
726a989a 711 if (is_gimple_call (stmt) && (decl = gimple_call_fndecl (stmt)))
e0704a46
JH
712 {
713 struct cgraph_edge *e = cgraph_edge (node, stmt);
714 if (e)
715 {
716 if (e->aux)
717 {
ab532386 718 error ("shared call_stmt:");
726a989a 719 debug_gimple_stmt (stmt);
e0704a46
JH
720 error_found = true;
721 }
9187e02d
JH
722 if (!clone_of_p (cgraph_node (decl), e->callee)
723 && !e->callee->global.inlined_to)
e0704a46 724 {
ab532386 725 error ("edge points to wrong declaration:");
e0704a46
JH
726 debug_tree (e->callee->decl);
727 fprintf (stderr," Instead of:");
728 debug_tree (decl);
729 }
730 e->aux = (void *)1;
731 }
732 else
733 {
ab532386 734 error ("missing callgraph edge for call stmt:");
726a989a 735 debug_gimple_stmt (stmt);
e0704a46
JH
736 error_found = true;
737 }
738 }
739 }
e21aff8a 740 pointer_set_destroy (visited_nodes);
e21aff8a
SB
741 }
742 else
743 /* No CFG available?! */
744 gcc_unreachable ();
745
18c6ada9
JH
746 for (e = node->callees; e; e = e->next_callee)
747 {
3e293154 748 if (!e->aux && !e->indirect_call)
18c6ada9 749 {
ab532386 750 error ("edge %s->%s has no corresponding call_stmt",
4f1e4960
JM
751 identifier_to_locale (cgraph_node_name (e->caller)),
752 identifier_to_locale (cgraph_node_name (e->callee)));
726a989a 753 debug_gimple_stmt (e->call_stmt);
18c6ada9
JH
754 error_found = true;
755 }
756 e->aux = 0;
757 }
758 }
759 if (error_found)
760 {
761 dump_cgraph_node (stderr, node);
ab532386 762 internal_error ("verify_cgraph_node failed");
18c6ada9 763 }
2bafad93 764 set_cfun (saved_cfun);
18c6ada9
JH
765 timevar_pop (TV_CGRAPH_VERIFY);
766}
767
768/* Verify whole cgraph structure. */
769void
770verify_cgraph (void)
771{
772 struct cgraph_node *node;
773
89480522
JH
774 if (sorrycount || errorcount)
775 return;
776
18c6ada9
JH
777 for (node = cgraph_nodes; node; node = node->next)
778 verify_cgraph_node (node);
779}
780
474eccc6
ILT
781/* Output all asm statements we have stored up to be output. */
782
783static void
784cgraph_output_pending_asms (void)
785{
786 struct cgraph_asm_node *can;
787
788 if (errorcount || sorrycount)
789 return;
790
791 for (can = cgraph_asm_nodes; can; can = can->next)
792 assemble_asm (can->asm_str);
793 cgraph_asm_nodes = NULL;
794}
795
e767b5be 796/* Analyze the function scheduled to be output. */
a406865a 797static void
e767b5be
JH
798cgraph_analyze_function (struct cgraph_node *node)
799{
a406865a 800 tree save = current_function_decl;
e767b5be
JH
801 tree decl = node->decl;
802
25c84396 803 current_function_decl = decl;
e21aff8a 804 push_cfun (DECL_STRUCT_FUNCTION (decl));
a406865a
RG
805
806 /* Make sure to gimplify bodies only once. During analyzing a
807 function we lower it, which will require gimplified nested
808 functions, so we can end up here with an already gimplified
809 body. */
810 if (!gimple_body (decl))
811 gimplify_function_tree (decl);
812 dump_function (TDI_generic, decl);
813
e21aff8a 814 cgraph_lower_function (node);
6a84c098 815 node->analyzed = true;
e767b5be 816
e21aff8a 817 pop_cfun ();
a406865a 818 current_function_decl = save;
e767b5be
JH
819}
820
386b46cf
JH
821/* Look for externally_visible and used attributes and mark cgraph nodes
822 accordingly.
823
824 We cannot mark the nodes at the point the attributes are processed (in
825 handle_*_attribute) because the copy of the declarations available at that
826 point may not be canonical. For example, in:
827
828 void f();
829 void f() __attribute__((used));
830
831 the declaration we see in handle_used_attribute will be the second
832 declaration -- but the front end will subsequently merge that declaration
833 with the original declaration and discard the second declaration.
834
835 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
836
837 void f() {}
838 void f() __attribute__((externally_visible));
839
840 is valid.
841
842 So, we walk the nodes at the end of the translation unit, applying the
843 attributes at that point. */
844
845static void
846process_function_and_variable_attributes (struct cgraph_node *first,
8a4a83ed 847 struct varpool_node *first_var)
386b46cf
JH
848{
849 struct cgraph_node *node;
8a4a83ed 850 struct varpool_node *vnode;
386b46cf
JH
851
852 for (node = cgraph_nodes; node != first; node = node->next)
853 {
854 tree decl = node->decl;
855 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
856 {
857 mark_decl_referenced (decl);
858 if (node->local.finalized)
859 cgraph_mark_needed_node (node);
860 }
861 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
862 {
343d4b27 863 if (! TREE_PUBLIC (node->decl))
c5d75364
MLI
864 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
865 "%<externally_visible%>"
866 " attribute have effect only on public objects");
343d4b27
JJ
867 else
868 {
869 if (node->local.finalized)
870 cgraph_mark_needed_node (node);
871 node->local.externally_visible = true;
872 }
386b46cf
JH
873 }
874 }
8a4a83ed 875 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
386b46cf
JH
876 {
877 tree decl = vnode->decl;
878 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
879 {
880 mark_decl_referenced (decl);
881 if (vnode->finalized)
8a4a83ed 882 varpool_mark_needed_node (vnode);
386b46cf
JH
883 }
884 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
885 {
343d4b27 886 if (! TREE_PUBLIC (vnode->decl))
c5d75364
MLI
887 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
888 "%<externally_visible%>"
889 " attribute have effect only on public objects");
343d4b27
JJ
890 else
891 {
892 if (vnode->finalized)
8a4a83ed 893 varpool_mark_needed_node (vnode);
343d4b27
JJ
894 vnode->externally_visible = true;
895 }
386b46cf
JH
896 }
897 }
898}
899
151e6f24
JH
900/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
901 each reachable functions) and build cgraph.
902 The function can be called multiple times after inserting new nodes
88512ba0 903 into beginning of queue. Just the new part of queue is re-scanned then. */
1c4a429a 904
151e6f24
JH
905static void
906cgraph_analyze_functions (void)
1c4a429a 907{
cd9c7bd2 908 /* Keep track of already processed nodes when called multiple times for
aabcd309 909 intermodule optimization. */
cd9c7bd2 910 static struct cgraph_node *first_analyzed;
61e00a5e 911 struct cgraph_node *first_processed = first_analyzed;
8a4a83ed 912 static struct varpool_node *first_analyzed_var;
151e6f24 913 struct cgraph_node *node, *next;
1c4a429a 914
61e00a5e
JH
915 process_function_and_variable_attributes (first_processed,
916 first_analyzed_var);
917 first_processed = cgraph_nodes;
8a4a83ed
JH
918 first_analyzed_var = varpool_nodes;
919 varpool_analyze_pending_decls ();
a194aa56 920 if (cgraph_dump_file)
1c4a429a 921 {
7d82fe7c 922 fprintf (cgraph_dump_file, "Initial entry points:");
cd9c7bd2 923 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
39ecc018 924 if (node->needed)
a194aa56
JH
925 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
926 fprintf (cgraph_dump_file, "\n");
1c4a429a 927 }
151e6f24 928 cgraph_process_new_functions ();
1c4a429a 929
7660e67e
SB
930 /* Propagate reachability flag and lower representation of all reachable
931 functions. In the future, lowering will introduce new functions and
932 new entry points on the way (by template instantiation and virtual
933 method table generation for instance). */
1668aabc 934 while (cgraph_nodes_queue)
1c4a429a 935 {
e767b5be 936 struct cgraph_edge *edge;
1668aabc
JH
937 tree decl = cgraph_nodes_queue->decl;
938
939 node = cgraph_nodes_queue;
8bd87c4e 940 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
18c6ada9 941 node->next_needed = NULL;
1c4a429a 942
cd4dea62 943 /* ??? It is possible to create extern inline function and later using
9d203871 944 weak alias attribute to kill its body. See
cd4dea62 945 gcc.c-torture/compile/20011119-1.c */
726a989a 946 if (!DECL_STRUCT_FUNCTION (decl))
d71cc23f
JH
947 {
948 cgraph_reset_node (node);
949 continue;
950 }
cd4dea62 951
d7f09764
DN
952 if (!node->analyzed)
953 cgraph_analyze_function (node);
8dafba3c 954
1c4a429a 955 for (edge = node->callees; edge; edge = edge->next_callee)
e767b5be 956 if (!edge->callee->reachable)
8dafba3c
RH
957 cgraph_mark_reachable_node (edge->callee);
958
6b20f353
DS
959 /* If decl is a clone of an abstract function, mark that abstract
960 function so that we don't release its body. The DECL_INITIAL() of that
961 abstract function declaration will be later needed to output debug info. */
962 if (DECL_ABSTRACT_ORIGIN (decl))
963 {
964 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
965 origin_node->abstract_and_needed = true;
966 }
967
61e00a5e
JH
968 /* We finalize local static variables during constructing callgraph
969 edges. Process their attributes too. */
970 process_function_and_variable_attributes (first_processed,
971 first_analyzed_var);
972 first_processed = cgraph_nodes;
8a4a83ed
JH
973 first_analyzed_var = varpool_nodes;
974 varpool_analyze_pending_decls ();
151e6f24 975 cgraph_process_new_functions ();
1c4a429a 976 }
8dafba3c 977
564738df 978 /* Collect entry points to the unit. */
a194aa56 979 if (cgraph_dump_file)
1668aabc 980 {
7d82fe7c 981 fprintf (cgraph_dump_file, "Unit entry points:");
cd9c7bd2 982 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
39ecc018 983 if (node->needed)
a194aa56 984 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
7d82fe7c 985 fprintf (cgraph_dump_file, "\n\nInitial ");
e767b5be 986 dump_cgraph (cgraph_dump_file);
1668aabc 987 }
7660e67e 988
a194aa56
JH
989 if (cgraph_dump_file)
990 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1c4a429a 991
96fc428c 992 for (node = cgraph_nodes; node != first_analyzed; node = next)
1c4a429a
JH
993 {
994 tree decl = node->decl;
96fc428c 995 next = node->next;
1c4a429a 996
39ecc018 997 if (node->local.finalized && !gimple_has_body_p (decl))
c22cacf3 998 cgraph_reset_node (node);
d71cc23f 999
39ecc018 1000 if (!node->reachable && gimple_has_body_p (decl))
1c4a429a 1001 {
a194aa56
JH
1002 if (cgraph_dump_file)
1003 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
18c6ada9 1004 cgraph_remove_node (node);
d71cc23f 1005 continue;
1c4a429a 1006 }
9b0436b7
JH
1007 else
1008 node->next_needed = NULL;
39ecc018 1009 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
d71cc23f 1010 gcc_assert (node->analyzed == node->local.finalized);
1c4a429a 1011 }
a194aa56 1012 if (cgraph_dump_file)
7d82fe7c
KC
1013 {
1014 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1015 dump_cgraph (cgraph_dump_file);
1016 }
cd9c7bd2 1017 first_analyzed = cgraph_nodes;
1c4a429a 1018 ggc_collect ();
151e6f24
JH
1019}
1020
5f1a9ebb
RG
1021
1022/* Emit thunks for every node in the cgraph.
1023 FIXME: We really ought to emit thunks only for functions that are needed. */
1024
1025static void
1026cgraph_emit_thunks (void)
1027{
1028 struct cgraph_node *n;
1029
1030 for (n = cgraph_nodes; n; n = n->next)
1031 {
1032 /* Only emit thunks on functions defined in this TU.
1033 Note that this may emit more thunks than strictly necessary.
1034 During optimization some nodes may disappear. It would be
1035 nice to only emit thunks only for the functions that will be
1036 emitted, but we cannot know that until the inliner and other
1037 IPA passes have run (see the sequencing of the call to
1038 cgraph_mark_functions_to_output in cgraph_optimize). */
90097c67
RG
1039 if (n->reachable
1040 && !DECL_EXTERNAL (n->decl))
5f1a9ebb
RG
1041 lang_hooks.callgraph.emit_associated_thunks (n->decl);
1042 }
1043}
1044
1045
151e6f24
JH
1046/* Analyze the whole compilation unit once it is parsed completely. */
1047
1048void
1049cgraph_finalize_compilation_unit (void)
1050{
90097c67
RG
1051 timevar_push (TV_CGRAPH);
1052
a406865a
RG
1053 /* Do not skip analyzing the functions if there were errors, we
1054 miss diagnostics for following functions otherwise. */
151e6f24 1055
5f1a9ebb 1056 /* Emit size functions we didn't inline. */
f82a627c 1057 finalize_size_functions ();
5f1a9ebb 1058
5f1a9ebb
RG
1059 /* Call functions declared with the "constructor" or "destructor"
1060 attribute. */
1061 cgraph_build_cdtor_fns ();
151e6f24 1062
90097c67
RG
1063 /* Mark alias targets necessary and emit diagnostics. */
1064 finish_aliases_1 ();
1065
151e6f24
JH
1066 if (!quiet_flag)
1067 {
1068 fprintf (stderr, "\nAnalyzing compilation unit\n");
1069 fflush (stderr);
1070 }
1071
90097c67
RG
1072 /* Gimplify and lower all functions, compute reachability and
1073 remove unreachable nodes. */
1074 cgraph_analyze_functions ();
1075
1076 /* Emit thunks for reachable nodes, if needed. */
1077 if (lang_hooks.callgraph.emit_associated_thunks)
1078 cgraph_emit_thunks ();
1079
5f1a9ebb
RG
1080 /* Mark alias targets necessary and emit diagnostics. */
1081 finish_aliases_1 ();
1082
90097c67 1083 /* Gimplify and lower thunks. */
151e6f24 1084 cgraph_analyze_functions ();
a406865a 1085
90097c67 1086 /* Finally drive the pass manager. */
a406865a 1087 cgraph_optimize ();
90097c67
RG
1088
1089 timevar_pop (TV_CGRAPH);
1c4a429a 1090}
3baf459d
DN
1091
1092
1c4a429a
JH
1093/* Figure out what functions we want to assemble. */
1094
1095static void
db0e878d 1096cgraph_mark_functions_to_output (void)
1c4a429a
JH
1097{
1098 struct cgraph_node *node;
1099
1c4a429a
JH
1100 for (node = cgraph_nodes; node; node = node->next)
1101 {
1102 tree decl = node->decl;
b58b1157 1103 struct cgraph_edge *e;
c22cacf3 1104
257eb6e3 1105 gcc_assert (!node->process);
b58b1157
JH
1106
1107 for (e = node->callers; e; e = e->next_caller)
dc0bfe6a 1108 if (e->inline_failed)
b58b1157 1109 break;
1c4a429a 1110
7660e67e
SB
1111 /* We need to output all local functions that are used and not
1112 always inlined, as well as those that are reachable from
1113 outside the current compilation unit. */
39ecc018 1114 if (node->analyzed
18c6ada9 1115 && !node->global.inlined_to
1c4a429a 1116 && (node->needed
b58b1157 1117 || (e && node->reachable))
6de9cd9a 1118 && !TREE_ASM_WRITTEN (decl)
1c4a429a 1119 && !DECL_EXTERNAL (decl))
257eb6e3 1120 node->process = 1;
341c100f 1121 else
1a2caa7a
NS
1122 {
1123 /* We should've reclaimed all functions that are not needed. */
1124#ifdef ENABLE_CHECKING
726a989a 1125 if (!node->global.inlined_to
39ecc018 1126 && gimple_has_body_p (decl)
1a2caa7a
NS
1127 && !DECL_EXTERNAL (decl))
1128 {
1129 dump_cgraph_node (stderr, node);
1130 internal_error ("failed to reclaim unneeded function");
1131 }
1132#endif
726a989a 1133 gcc_assert (node->global.inlined_to
39ecc018 1134 || !gimple_has_body_p (decl)
1a2caa7a
NS
1135 || DECL_EXTERNAL (decl));
1136
1137 }
c22cacf3 1138
18d13f34
JH
1139 }
1140}
1141
1c4a429a 1142/* Expand function specified by NODE. */
7660e67e 1143
1c4a429a 1144static void
db0e878d 1145cgraph_expand_function (struct cgraph_node *node)
1c4a429a
JH
1146{
1147 tree decl = node->decl;
1148
18c6ada9 1149 /* We ought to not compile any inline clones. */
341c100f 1150 gcc_assert (!node->global.inlined_to);
18c6ada9 1151
7e8b322a 1152 announce_function (decl);
257eb6e3 1153 node->process = 0;
18d13f34 1154
2dee695b 1155 gcc_assert (node->lowered);
776b966e 1156
a3546141 1157 /* Generate RTL for the body of DECL. */
e89d6010 1158 tree_rest_of_compilation (decl);
18d13f34 1159
6de9cd9a 1160 /* Make sure that BE didn't give up on compiling. */
f30cfcb1 1161 gcc_assert (TREE_ASM_WRITTEN (decl));
1c4a429a 1162 current_function_decl = NULL;
39ecc018
JH
1163 gcc_assert (!cgraph_preserve_function_body_p (decl));
1164 cgraph_release_function_body (node);
1165 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1166 points to the dead function body. */
1167 cgraph_node_remove_callees (node);
6b02a499
JH
1168
1169 cgraph_function_flags_ready = true;
1c4a429a
JH
1170}
1171
18c6ada9 1172/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
b58b1157
JH
1173
1174bool
61a05df1 1175cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
b58b1157 1176{
18c6ada9
JH
1177 *reason = e->inline_failed;
1178 return !e->inline_failed;
b58b1157 1179}
18c6ada9 1180
6674a6ce 1181
6674a6ce 1182
db0e878d
AJ
1183/* Expand all functions that must be output.
1184
b58b1157
JH
1185 Attempt to topologically sort the nodes so function is output when
1186 all called functions are already assembled to allow data to be
a98ebe2e 1187 propagated across the callgraph. Use a stack to get smaller distance
d1a6adeb 1188 between a function and its callees (later we may choose to use a more
b58b1157
JH
1189 sophisticated algorithm for function reordering; we will likely want
1190 to use subsections to make the output functions appear in top-down
1191 order). */
1192
1193static void
a20af5b8 1194cgraph_expand_all_functions (void)
b58b1157
JH
1195{
1196 struct cgraph_node *node;
5ed6ace5 1197 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
f30cfcb1 1198 int order_pos, new_order_pos = 0;
b58b1157
JH
1199 int i;
1200
b58b1157 1201 order_pos = cgraph_postorder (order);
341c100f 1202 gcc_assert (order_pos == cgraph_n_nodes);
b58b1157 1203
1ae58c30 1204 /* Garbage collector may remove inline clones we eliminate during
18c6ada9
JH
1205 optimization. So we must be sure to not reference them. */
1206 for (i = 0; i < order_pos; i++)
257eb6e3 1207 if (order[i]->process)
18c6ada9
JH
1208 order[new_order_pos++] = order[i];
1209
1210 for (i = new_order_pos - 1; i >= 0; i--)
b58b1157
JH
1211 {
1212 node = order[i];
257eb6e3 1213 if (node->process)
b58b1157 1214 {
341c100f 1215 gcc_assert (node->reachable);
257eb6e3 1216 node->process = 0;
b58b1157
JH
1217 cgraph_expand_function (node);
1218 }
1219 }
f45e0ad1 1220 cgraph_process_new_functions ();
50674e96 1221
b58b1157 1222 free (order);
50674e96 1223
b58b1157
JH
1224}
1225
474eccc6
ILT
1226/* This is used to sort the node types by the cgraph order number. */
1227
24b97832
ILT
1228enum cgraph_order_sort_kind
1229{
1230 ORDER_UNDEFINED = 0,
1231 ORDER_FUNCTION,
1232 ORDER_VAR,
1233 ORDER_ASM
1234};
1235
474eccc6
ILT
1236struct cgraph_order_sort
1237{
24b97832 1238 enum cgraph_order_sort_kind kind;
474eccc6
ILT
1239 union
1240 {
1241 struct cgraph_node *f;
8a4a83ed 1242 struct varpool_node *v;
474eccc6
ILT
1243 struct cgraph_asm_node *a;
1244 } u;
1245};
1246
1247/* Output all functions, variables, and asm statements in the order
1248 according to their order fields, which is the order in which they
1249 appeared in the file. This implements -fno-toplevel-reorder. In
1250 this mode we may output functions and variables which don't really
1251 need to be output. */
1252
1253static void
1254cgraph_output_in_order (void)
1255{
1256 int max;
1257 size_t size;
1258 struct cgraph_order_sort *nodes;
1259 int i;
1260 struct cgraph_node *pf;
8a4a83ed 1261 struct varpool_node *pv;
474eccc6
ILT
1262 struct cgraph_asm_node *pa;
1263
1264 max = cgraph_order;
1265 size = max * sizeof (struct cgraph_order_sort);
1266 nodes = (struct cgraph_order_sort *) alloca (size);
1267 memset (nodes, 0, size);
1268
8a4a83ed 1269 varpool_analyze_pending_decls ();
474eccc6
ILT
1270
1271 for (pf = cgraph_nodes; pf; pf = pf->next)
1272 {
257eb6e3 1273 if (pf->process)
474eccc6
ILT
1274 {
1275 i = pf->order;
1276 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1277 nodes[i].kind = ORDER_FUNCTION;
1278 nodes[i].u.f = pf;
1279 }
1280 }
1281
8a4a83ed 1282 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
474eccc6
ILT
1283 {
1284 i = pv->order;
1285 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1286 nodes[i].kind = ORDER_VAR;
1287 nodes[i].u.v = pv;
1288 }
1289
1290 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1291 {
1292 i = pa->order;
1293 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1294 nodes[i].kind = ORDER_ASM;
1295 nodes[i].u.a = pa;
1296 }
474eccc6 1297
7386e3ee
JH
1298 /* In toplevel reorder mode we output all statics; mark them as needed. */
1299 for (i = 0; i < max; ++i)
1300 {
1301 if (nodes[i].kind == ORDER_VAR)
1302 {
1303 varpool_mark_needed_node (nodes[i].u.v);
1304 }
1305 }
1306 varpool_empty_needed_queue ();
1307
474eccc6
ILT
1308 for (i = 0; i < max; ++i)
1309 {
1310 switch (nodes[i].kind)
1311 {
1312 case ORDER_FUNCTION:
257eb6e3 1313 nodes[i].u.f->process = 0;
474eccc6
ILT
1314 cgraph_expand_function (nodes[i].u.f);
1315 break;
1316
1317 case ORDER_VAR:
8a4a83ed 1318 varpool_assemble_decl (nodes[i].u.v);
474eccc6
ILT
1319 break;
1320
1321 case ORDER_ASM:
1322 assemble_asm (nodes[i].u.a->asm_str);
1323 break;
1324
1325 case ORDER_UNDEFINED:
1326 break;
1327
1328 default:
1329 gcc_unreachable ();
1330 }
1331 }
e7b9eb2c
ILT
1332
1333 cgraph_asm_nodes = NULL;
474eccc6
ILT
1334}
1335
18c6ada9
JH
1336/* Return true when function body of DECL still needs to be kept around
1337 for later re-use. */
1338bool
1339cgraph_preserve_function_body_p (tree decl)
1340{
1341 struct cgraph_node *node;
c37f4ba4
JH
1342
1343 gcc_assert (cgraph_global_info_ready);
18c6ada9 1344 /* Look if there is any clone around. */
9187e02d
JH
1345 node = cgraph_node (decl);
1346 if (node->clones)
1347 return true;
18c6ada9
JH
1348 return false;
1349}
1350
ef330312
PB
1351static void
1352ipa_passes (void)
1353{
db2960f4 1354 set_cfun (NULL);
04b201a2 1355 current_function_decl = NULL;
726a989a 1356 gimple_register_cfg_hooks ();
ef330312 1357 bitmap_obstack_initialize (NULL);
d7f09764 1358 execute_ipa_pass_list (all_small_ipa_passes);
3baf459d 1359
d7f09764
DN
1360 /* If pass_all_early_optimizations was not scheduled, the state of
1361 the cgraph will not be properly updated. Update it now. */
1362 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1363 cgraph_state = CGRAPH_STATE_IPA_SSA;
3baf459d 1364
d7f09764
DN
1365 if (!in_lto_p)
1366 {
1367 /* Generate coverage variables and constructors. */
1368 coverage_finish ();
1369
1370 /* Process new functions added. */
1371 set_cfun (NULL);
1372 current_function_decl = NULL;
1373 cgraph_process_new_functions ();
1374 }
1375
1376 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1377 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1378
1379 if (!in_lto_p)
1380 ipa_write_summaries ();
1381
1382 execute_ipa_pass_list (all_regular_ipa_passes);
3baf459d 1383
ef330312
PB
1384 bitmap_obstack_release (NULL);
1385}
1386
4537ec0c 1387
1c4a429a
JH
1388/* Perform simple optimizations based on callgraph. */
1389
d7f09764 1390void
db0e878d 1391cgraph_optimize (void)
1c4a429a 1392{
413803d3
VR
1393 if (errorcount || sorrycount)
1394 return;
1395
18c6ada9
JH
1396#ifdef ENABLE_CHECKING
1397 verify_cgraph ();
1398#endif
7be82279 1399
cd9c7bd2
JH
1400 /* Frontend may output common variables after the unit has been finalized.
1401 It is safe to deal with them here as they are always zero initialized. */
8a4a83ed 1402 varpool_analyze_pending_decls ();
857e7259 1403
a194aa56 1404 timevar_push (TV_CGRAPHOPT);
a5573239
JH
1405 if (pre_ipa_mem_report)
1406 {
1407 fprintf (stderr, "Memory consumption before IPA\n");
1408 dump_memory_report (false);
1409 }
b58b1157 1410 if (!quiet_flag)
a418679d 1411 fprintf (stderr, "Performing interprocedural optimizations\n");
f45e0ad1 1412 cgraph_state = CGRAPH_STATE_IPA;
f30cfcb1 1413
7e2fe9d8
AP
1414 /* Don't run the IPA passes if there was any error or sorry messages. */
1415 if (errorcount == 0 && sorrycount == 0)
1416 ipa_passes ();
1417
4537ec0c
DN
1418 /* Do nothing else if any IPA pass found errors. */
1419 if (errorcount || sorrycount)
1420 return;
1421
6b02a499
JH
1422 /* This pass remove bodies of extern inline functions we never inlined.
1423 Do this later so other IPA passes see what is really going on. */
1424 cgraph_remove_unreachable_nodes (false, dump_file);
dafc5b82 1425 cgraph_global_info_ready = true;
a194aa56
JH
1426 if (cgraph_dump_file)
1427 {
7d82fe7c 1428 fprintf (cgraph_dump_file, "Optimized ");
a194aa56 1429 dump_cgraph (cgraph_dump_file);
cd9c7bd2 1430 dump_varpool (cgraph_dump_file);
a194aa56 1431 }
a5573239
JH
1432 if (post_ipa_mem_report)
1433 {
7fa982e5 1434 fprintf (stderr, "Memory consumption after IPA\n");
a5573239
JH
1435 dump_memory_report (false);
1436 }
a194aa56 1437 timevar_pop (TV_CGRAPHOPT);
1c4a429a 1438
b58b1157 1439 /* Output everything. */
7d82fe7c
KC
1440 if (!quiet_flag)
1441 fprintf (stderr, "Assembling functions:\n");
18c6ada9
JH
1442#ifdef ENABLE_CHECKING
1443 verify_cgraph ();
1444#endif
474eccc6 1445
9187e02d 1446 cgraph_materialize_all_clones ();
6674a6ce 1447 cgraph_mark_functions_to_output ();
cd9c7bd2 1448
f45e0ad1 1449 cgraph_state = CGRAPH_STATE_EXPANSION;
474eccc6
ILT
1450 if (!flag_toplevel_reorder)
1451 cgraph_output_in_order ();
1452 else
1453 {
1454 cgraph_output_pending_asms ();
1455
1456 cgraph_expand_all_functions ();
8a4a83ed 1457 varpool_remove_unreferenced_decls ();
474eccc6 1458
8a4a83ed 1459 varpool_assemble_pending_decls ();
474eccc6 1460 }
f45e0ad1
JH
1461 cgraph_process_new_functions ();
1462 cgraph_state = CGRAPH_STATE_FINISHED;
cd9c7bd2 1463
a194aa56
JH
1464 if (cgraph_dump_file)
1465 {
7d82fe7c 1466 fprintf (cgraph_dump_file, "\nFinal ");
a194aa56
JH
1467 dump_cgraph (cgraph_dump_file);
1468 }
18c6ada9
JH
1469#ifdef ENABLE_CHECKING
1470 verify_cgraph ();
6de9cd9a
DN
1471 /* Double check that all inline clones are gone and that all
1472 function bodies have been released from memory. */
7e8b322a 1473 if (!(sorrycount || errorcount))
6de9cd9a
DN
1474 {
1475 struct cgraph_node *node;
1476 bool error_found = false;
1477
1478 for (node = cgraph_nodes; node; node = node->next)
1479 if (node->analyzed
1480 && (node->global.inlined_to
39ecc018 1481 || gimple_has_body_p (node->decl)))
6de9cd9a
DN
1482 {
1483 error_found = true;
1484 dump_cgraph_node (stderr, node);
c22cacf3 1485 }
6de9cd9a 1486 if (error_found)
f30cfcb1 1487 internal_error ("nodes with unreleased memory found");
6de9cd9a 1488 }
18c6ada9 1489#endif
1c4a429a 1490}
4537ec0c
DN
1491
1492
873c7164
MM
1493/* Generate and emit a static constructor or destructor. WHICH must
1494 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1495 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
fa10beec 1496 initialization priority for this constructor or destructor. */
c9b9aa64
RH
1497
1498void
35b6fdcf 1499cgraph_build_static_cdtor (char which, tree body, int priority)
c9b9aa64
RH
1500{
1501 static int counter = 0;
1502 char which_buf[16];
b785f485 1503 tree decl, name, resdecl;
c9b9aa64 1504
873c7164
MM
1505 /* The priority is encoded in the constructor or destructor name.
1506 collect2 will sort the names and arrange that they are called at
1507 program startup. */
1508 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
5880f14f 1509 name = get_file_function_name (which_buf);
c9b9aa64 1510
c2255bc4 1511 decl = build_decl (input_location, FUNCTION_DECL, name,
c9b9aa64
RH
1512 build_function_type (void_type_node, void_list_node));
1513 current_function_decl = decl;
1514
c2255bc4
AH
1515 resdecl = build_decl (input_location,
1516 RESULT_DECL, NULL_TREE, void_type_node);
b785f485 1517 DECL_ARTIFICIAL (resdecl) = 1;
b785f485 1518 DECL_RESULT (decl) = resdecl;
07485407 1519 DECL_CONTEXT (resdecl) = decl;
b785f485 1520
182e0d71 1521 allocate_struct_function (decl, false);
c9b9aa64
RH
1522
1523 TREE_STATIC (decl) = 1;
1524 TREE_USED (decl) = 1;
1525 DECL_ARTIFICIAL (decl) = 1;
c9b9aa64
RH
1526 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1527 DECL_SAVED_TREE (decl) = body;
1528 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1529 DECL_UNINLINABLE (decl) = 1;
1530
1531 DECL_INITIAL (decl) = make_node (BLOCK);
1532 TREE_USED (DECL_INITIAL (decl)) = 1;
1533
1534 DECL_SOURCE_LOCATION (decl) = input_location;
1535 cfun->function_end_locus = input_location;
1536
341c100f
NS
1537 switch (which)
1538 {
1539 case 'I':
1540 DECL_STATIC_CONSTRUCTOR (decl) = 1;
395a40e0 1541 decl_init_priority_insert (decl, priority);
341c100f
NS
1542 break;
1543 case 'D':
1544 DECL_STATIC_DESTRUCTOR (decl) = 1;
395a40e0 1545 decl_fini_priority_insert (decl, priority);
341c100f
NS
1546 break;
1547 default:
1548 gcc_unreachable ();
1549 }
c9b9aa64
RH
1550
1551 gimplify_function_tree (decl);
1552
f45e0ad1
JH
1553 cgraph_add_new_function (decl, false);
1554 cgraph_mark_needed_node (cgraph_node (decl));
cac67c08 1555 set_cfun (NULL);
c9b9aa64 1556}
9b3e897d
PB
1557
1558void
1559init_cgraph (void)
1560{
1561 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1562}
57fb5341 1563
c22cacf3 1564/* The edges representing the callers of the NEW_VERSION node were
57fb5341
RL
1565 fixed by cgraph_function_versioning (), now the call_expr in their
1566 respective tree code should be updated to call the NEW_VERSION. */
1567
1568static void
1569update_call_expr (struct cgraph_node *new_version)
1570{
1571 struct cgraph_edge *e;
1572
1573 gcc_assert (new_version);
726a989a
RB
1574
1575 /* Update the call expr on the edges to call the new version. */
57fb5341 1576 for (e = new_version->callers; e; e = e->next_caller)
c0ab1df3
AP
1577 {
1578 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
1579 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
1d65f45c 1580 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
c0ab1df3 1581 }
57fb5341
RL
1582}
1583
1584
1585/* Create a new cgraph node which is the new version of
1586 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1587 edges which should be redirected to point to
1588 NEW_VERSION. ALL the callees edges of OLD_VERSION
1589 are cloned to the new version node. Return the new
1590 version node. */
1591
1592static struct cgraph_node *
1593cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
b2c0ad40
KH
1594 tree new_decl,
1595 VEC(cgraph_edge_p,heap) *redirect_callers)
57fb5341
RL
1596 {
1597 struct cgraph_node *new_version;
1598 struct cgraph_edge *e, *new_e;
1599 struct cgraph_edge *next_callee;
1600 unsigned i;
1601
1602 gcc_assert (old_version);
c22cacf3 1603
57fb5341
RL
1604 new_version = cgraph_node (new_decl);
1605
1606 new_version->analyzed = true;
1607 new_version->local = old_version->local;
1608 new_version->global = old_version->global;
1609 new_version->rtl = new_version->rtl;
1610 new_version->reachable = true;
1611 new_version->count = old_version->count;
1612
1613 /* Clone the old node callees. Recursive calls are
1614 also cloned. */
1615 for (e = old_version->callees;e; e=e->next_callee)
1616 {
d7f09764
DN
1617 new_e = cgraph_clone_edge (e, new_version, e->call_stmt,
1618 e->lto_stmt_uid, 0, e->frequency,
45a80bb9 1619 e->loop_nest, true);
57fb5341
RL
1620 new_e->count = e->count;
1621 }
1622 /* Fix recursive calls.
1623 If OLD_VERSION has a recursive call after the
1624 previous edge cloning, the new version will have an edge
1625 pointing to the old version, which is wrong;
1626 Redirect it to point to the new version. */
1627 for (e = new_version->callees ; e; e = next_callee)
1628 {
1629 next_callee = e->next_callee;
1630 if (e->callee == old_version)
1631 cgraph_redirect_edge_callee (e, new_version);
c22cacf3 1632
57fb5341
RL
1633 if (!next_callee)
1634 break;
1635 }
b2c0ad40
KH
1636 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1637 {
1638 /* Redirect calls to the old version node to point to its new
1639 version. */
1640 cgraph_redirect_edge_callee (e, new_version);
1641 }
57fb5341
RL
1642
1643 return new_version;
1644 }
1645
1646 /* Perform function versioning.
c22cacf3 1647 Function versioning includes copying of the tree and
57fb5341
RL
1648 a callgraph update (creating a new cgraph node and updating
1649 its callees and callers).
1650
1651 REDIRECT_CALLERS varray includes the edges to be redirected
1652 to the new version.
1653
1654 TREE_MAP is a mapping of tree nodes we want to replace with
1655 new ones (according to results of prior analysis).
1656 OLD_VERSION_NODE is the node that is versioned.
c6f7cfc1
JH
1657 It returns the new version's cgraph node.
1658 ARGS_TO_SKIP lists arguments to be omitted from functions
1659 */
57fb5341
RL
1660
1661struct cgraph_node *
1662cgraph_function_versioning (struct cgraph_node *old_version_node,
b2c0ad40 1663 VEC(cgraph_edge_p,heap) *redirect_callers,
9187e02d 1664 VEC (ipa_replace_map_p,gc)* tree_map,
c6f7cfc1 1665 bitmap args_to_skip)
57fb5341
RL
1666{
1667 tree old_decl = old_version_node->decl;
1668 struct cgraph_node *new_version_node = NULL;
1669 tree new_decl;
1670
1671 if (!tree_versionable_function_p (old_decl))
1672 return NULL;
1673
1674 /* Make a new FUNCTION_DECL tree node for the
1675 new version. */
c6f7cfc1
JH
1676 if (!args_to_skip)
1677 new_decl = copy_node (old_decl);
1678 else
1679 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
57fb5341
RL
1680
1681 /* Create the new version's call-graph node.
1682 and update the edges of the new node. */
1683 new_version_node =
1684 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1685 redirect_callers);
1686
1687 /* Copy the OLD_VERSION_NODE function tree to the new version. */
c6f7cfc1 1688 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip);
57fb5341 1689
c22cacf3 1690 /* Update the new version's properties.
c0ab1df3
AP
1691 Make The new version visible only within this translation unit. Make sure
1692 that is not weak also.
c22cacf3 1693 ??? We cannot use COMDAT linkage because there is no
57fb5341
RL
1694 ABI support for this. */
1695 DECL_EXTERNAL (new_version_node->decl) = 0;
fc26fae3 1696 DECL_COMDAT_GROUP (new_version_node->decl) = NULL_TREE;
57fb5341
RL
1697 TREE_PUBLIC (new_version_node->decl) = 0;
1698 DECL_COMDAT (new_version_node->decl) = 0;
c0ab1df3 1699 DECL_WEAK (new_version_node->decl) = 0;
e6e1c050 1700 DECL_VIRTUAL_P (new_version_node->decl) = 0;
57fb5341
RL
1701 new_version_node->local.externally_visible = 0;
1702 new_version_node->local.local = 1;
1703 new_version_node->lowered = true;
e6e1c050 1704
c0ab1df3
AP
1705 /* Update the call_expr on the edges to call the new version node. */
1706 update_call_expr (new_version_node);
1707
129a37fc 1708 cgraph_call_function_insertion_hooks (new_version_node);
57fb5341
RL
1709 return new_version_node;
1710}
ea99e0be
JH
1711
1712/* Produce separate function body for inline clones so the offline copy can be
1713 modified without affecting them. */
1714struct cgraph_node *
1715save_inline_function_body (struct cgraph_node *node)
1716{
9187e02d 1717 struct cgraph_node *first_clone, *n;
ea99e0be
JH
1718
1719 gcc_assert (node == cgraph_node (node->decl));
1720
1721 cgraph_lower_function (node);
1722
9187e02d 1723 first_clone = node->clones;
ea99e0be
JH
1724
1725 first_clone->decl = copy_node (node->decl);
ea99e0be
JH
1726 cgraph_insert_node_to_hashtable (first_clone);
1727 gcc_assert (first_clone == cgraph_node (first_clone->decl));
9187e02d
JH
1728 if (first_clone->next_sibling_clone)
1729 {
1730 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
1731 n->clone_of = first_clone;
1732 n->clone_of = first_clone;
1733 n->next_sibling_clone = first_clone->clones;
1734 if (first_clone->clones)
1735 first_clone->clones->prev_sibling_clone = n;
1736 first_clone->clones = first_clone->next_sibling_clone;
1737 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
1738 first_clone->next_sibling_clone = NULL;
1739 gcc_assert (!first_clone->prev_sibling_clone);
1740 }
1741 first_clone->clone_of = NULL;
1742 node->clones = NULL;
1743
1744 if (first_clone->clones)
1745 for (n = first_clone->clones; n != first_clone;)
1746 {
1747 gcc_assert (n->decl == node->decl);
1748 n->decl = first_clone->decl;
1749 if (n->clones)
1750 n = n->clones;
1751 else if (n->next_sibling_clone)
1752 n = n->next_sibling_clone;
1753 else
1754 {
1755 while (n != first_clone && !n->next_sibling_clone)
1756 n = n->clone_of;
1757 if (n != first_clone)
1758 n = n->next_sibling_clone;
1759 }
1760 }
ea99e0be
JH
1761
1762 /* Copy the OLD_VERSION_NODE function tree to the new version. */
c6f7cfc1 1763 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL);
ea99e0be
JH
1764
1765 DECL_EXTERNAL (first_clone->decl) = 0;
fc26fae3 1766 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
ea99e0be
JH
1767 TREE_PUBLIC (first_clone->decl) = 0;
1768 DECL_COMDAT (first_clone->decl) = 0;
21ecdec5
JH
1769 VEC_free (ipa_opt_pass, heap,
1770 DECL_STRUCT_FUNCTION (first_clone->decl)->ipa_transforms_to_apply);
1771 DECL_STRUCT_FUNCTION (first_clone->decl)->ipa_transforms_to_apply = NULL;
ea99e0be 1772
ea99e0be
JH
1773#ifdef ENABLE_CHECKING
1774 verify_cgraph_node (first_clone);
1775#endif
1776 return first_clone;
1777}
7be82279 1778
9187e02d
JH
1779/* Given virtual clone, turn it into actual clone. */
1780static void
1781cgraph_materialize_clone (struct cgraph_node *node)
1782{
1783 bitmap_obstack_initialize (NULL);
1784 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1785 tree_function_versioning (node->clone_of->decl, node->decl,
1786 node->clone.tree_map, true,
1787 node->clone.args_to_skip);
08ad1d6d
JH
1788 if (cgraph_dump_file)
1789 {
1790 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
1791 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
1792 }
9187e02d
JH
1793
1794 /* Function is no longer clone. */
1795 if (node->next_sibling_clone)
1796 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1797 if (node->prev_sibling_clone)
1798 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1799 else
1800 node->clone_of->clones = node->next_sibling_clone;
1801 node->next_sibling_clone = NULL;
1802 node->prev_sibling_clone = NULL;
1803 node->clone_of = NULL;
1804 bitmap_obstack_release (NULL);
1805}
1806
1807/* Once all functions from compilation unit are in memory, produce all clones
1808 and update all calls.
1809 We might also do this on demand if we don't want to bring all functions to
1810 memory prior compilation, but current WHOPR implementation does that and it is
1811 is bit easier to keep everything right in this order. */
1812void
1813cgraph_materialize_all_clones (void)
1814{
1815 struct cgraph_node *node;
1816 bool stabilized = false;
1817
1818 if (cgraph_dump_file)
1819 fprintf (cgraph_dump_file, "Materializing clones\n");
1820#ifdef ENABLE_CHECKING
1821 verify_cgraph ();
1822#endif
1823
1824 /* We can also do topological order, but number of iterations should be
1825 bounded by number of IPA passes since single IPA pass is probably not
1826 going to create clones of clones it created itself. */
1827 while (!stabilized)
1828 {
1829 stabilized = true;
1830 for (node = cgraph_nodes; node; node = node->next)
1831 {
1832 if (node->clone_of && node->decl != node->clone_of->decl
1833 && !gimple_has_body_p (node->decl))
1834 {
1835 if (gimple_has_body_p (node->clone_of->decl))
1836 {
1837 if (cgraph_dump_file)
08ad1d6d
JH
1838 {
1839 fprintf (cgraph_dump_file, "clonning %s to %s\n",
1840 cgraph_node_name (node->clone_of),
1841 cgraph_node_name (node));
1842 if (node->clone.tree_map)
1843 {
1844 unsigned int i;
1845 fprintf (cgraph_dump_file, " replace map: ");
1846 for (i = 0; i < VEC_length (ipa_replace_map_p,
1847 node->clone.tree_map);
1848 i++)
1849 {
1850 struct ipa_replace_map *replace_info;
1851 replace_info = VEC_index (ipa_replace_map_p,
1852 node->clone.tree_map,
1853 i);
1854 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
1855 fprintf (cgraph_dump_file, " -> ");
1856 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
1857 fprintf (cgraph_dump_file, "%s%s;",
1858 replace_info->replace_p ? "(replace)":"",
1859 replace_info->ref_p ? "(ref)":"");
1860 }
1861 fprintf (cgraph_dump_file, "\n");
1862 }
1863 if (node->clone.args_to_skip)
1864 {
1865 fprintf (cgraph_dump_file, " args_to_skip: ");
1866 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
1867 }
1868 if (node->clone.args_to_skip)
1869 {
1870 fprintf (cgraph_dump_file, " combined_args_to_skip:");
1871 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
1872 }
1873 }
9187e02d
JH
1874 cgraph_materialize_clone (node);
1875 }
1876 else
1877 stabilized = false;
1878 }
1879 }
1880 }
1881 if (cgraph_dump_file)
1882 fprintf (cgraph_dump_file, "Updating call sites\n");
1883 for (node = cgraph_nodes; node; node = node->next)
1884 if (node->analyzed && gimple_has_body_p (node->decl)
1885 && (!node->clone_of || node->clone_of->decl != node->decl))
1886 {
1887 struct cgraph_edge *e;
1888
1889 current_function_decl = node->decl;
1890 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1891 for (e = node->callees; e; e = e->next_callee)
1892 {
1893 tree decl = gimple_call_fndecl (e->call_stmt);
9a23acef
JH
1894 /* When function gets inlined, indirect inlining might've invented
1895 new edge for orginally indirect stmt. Since we are not
1896 preserving clones in the original form, we must not update here
1897 since other inline clones don't need to contain call to the same
1898 call. Inliner will do the substitution for us later. */
1899 if (decl && decl != e->callee->decl)
9187e02d
JH
1900 {
1901 gimple new_stmt;
1902 gimple_stmt_iterator gsi;
1903
1904 if (cgraph_dump_file)
1905 {
1906 fprintf (cgraph_dump_file, "updating call of %s in %s:",
1907 cgraph_node_name (node),
1908 cgraph_node_name (e->callee));
1909 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
1910 }
1911
08ad1d6d 1912 if (e->callee->clone.combined_args_to_skip)
9187e02d 1913 new_stmt = gimple_call_copy_skip_args (e->call_stmt,
08ad1d6d 1914 e->callee->clone.combined_args_to_skip);
9187e02d
JH
1915 else
1916 new_stmt = e->call_stmt;
1917 if (gimple_vdef (new_stmt)
1918 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1919 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1920 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1921
1922 gsi = gsi_for_stmt (e->call_stmt);
1923 gsi_replace (&gsi, new_stmt, true);
1924
1925 /* Update EH information too, just in case. */
1d65f45c 1926 maybe_clean_or_replace_eh_stmt (e->call_stmt, new_stmt);
9187e02d
JH
1927
1928 cgraph_set_call_stmt_including_clones (node, e->call_stmt, new_stmt);
1929
1930 if (cgraph_dump_file)
1931 {
1932 fprintf (cgraph_dump_file, " updated to:");
1933 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
1934 }
1935 }
1936 }
1937 pop_cfun ();
1938 current_function_decl = NULL;
1939#ifdef ENABLE_CHECKING
1940 verify_cgraph_node (node);
1941#endif
1942 }
9a23acef
JH
1943#ifdef ENABLE_CHECKING
1944 verify_cgraph ();
1945#endif
9187e02d
JH
1946 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
1947}
1948
7be82279 1949#include "gt-cgraphunit.h"