]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
2010-05-29 H.J. Lu <hongjiu.lu@intel.com>
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
cd6bca02 1/* Callgraph based interprocedural optimizations.
028a99ef 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
e3a37aef 3 Free Software Foundation, Inc.
ae01b312 4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
ae01b312 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
ae01b312 21
b0cdf642 22/* This module implements main driver of compilation process as well as
cd6bca02 23 few basic interprocedural optimizers.
b0cdf642 24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
b326746d 35 (There is one exception needed for implementing GCC extern inline
36 function.)
b0cdf642 37
1d416bd7 38 - varpool_finalize_variable
b0cdf642 39
7bd28bba 40 This function has same behavior as the above but is used for static
b0cdf642 41 variables.
42
43 - cgraph_finalize_compilation_unit
44
b326746d 45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
b0cdf642 47
6329636b 48 In the the call-graph construction and local function
b0cdf642 49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
b326746d 52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
b0cdf642 54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
b0cdf642 62 - cgraph_mark_needed_node
1d416bd7 63 - varpool_mark_needed_node
b0cdf642 64
b326746d 65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
ccd2f3d1 69 used by C++ frontend to explicitly mark the keyed methods.
b0cdf642 70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
6329636b 80 Analyzing of all functions is deferred
b0cdf642 81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
ca67a72b 91 The intra-procedural information is produced and its existence
b0cdf642 92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
6329636b 106 where reference has been optimized out. */
121f3051 107
acc70efa 108
ae01b312 109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
b5530559 114#include "rtl.h"
acc70efa 115#include "tree-flow.h"
ae01b312 116#include "tree-inline.h"
117#include "langhooks.h"
c6224531 118#include "pointer-set.h"
ae01b312 119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
80a85d8a 125#include "diagnostic.h"
ce084dfc 126#include "tree-pretty-print.h"
127#include "gimple-pretty-print.h"
f79b6507 128#include "timevar.h"
d7c6d889 129#include "params.h"
130#include "fibheap.h"
611e5405 131#include "intl.h"
b69eb0ff 132#include "function.h"
b5d36404 133#include "ipa-prop.h"
75a70cf9 134#include "gimple.h"
135#include "tree-iterator.h"
f1e2a033 136#include "tree-pass.h"
bfec3452 137#include "tree-dump.h"
c1dcd13c 138#include "output.h"
9ed5b1f5 139#include "coverage.h"
c9036234 140#include "plugin.h"
d7c6d889 141
a6868229 142static void cgraph_expand_all_functions (void);
d9d9733a 143static void cgraph_mark_functions_to_output (void);
144static void cgraph_expand_function (struct cgraph_node *);
f788fff2 145static void cgraph_output_pending_asms (void);
bfec3452 146static void cgraph_analyze_function (struct cgraph_node *);
25bb88de 147
121f3051 148static FILE *cgraph_dump_file;
149
2c56f72e 150/* A vector of FUNCTION_DECLs declared as static constructors. */
151static GTY (()) VEC(tree, gc) *static_ctors;
152/* A vector of FUNCTION_DECLs declared as static destructors. */
153static GTY (()) VEC(tree, gc) *static_dtors;
a861fe52 154
28454517 155/* Used for vtable lookup in thunk adjusting. */
156static GTY (()) tree vtable_entry_type;
157
a861fe52 158/* When target does not have ctors and dtors, we call all constructor
310d2511 159 and destructor by special initialization/destruction function
48e1416a 160 recognized by collect2.
161
a861fe52 162 When we are going to build this function, collect all constructors and
163 destructors and turn them into normal functions. */
164
165static void
166record_cdtor_fn (tree fndecl)
167{
2de29097 168 struct cgraph_node *node;
169 if (targetm.have_ctors_dtors
170 || (!DECL_STATIC_CONSTRUCTOR (fndecl)
171 && !DECL_STATIC_DESTRUCTOR (fndecl)))
a861fe52 172 return;
173
174 if (DECL_STATIC_CONSTRUCTOR (fndecl))
175 {
2c56f72e 176 VEC_safe_push (tree, gc, static_ctors, fndecl);
a861fe52 177 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
a861fe52 178 }
179 if (DECL_STATIC_DESTRUCTOR (fndecl))
180 {
2c56f72e 181 VEC_safe_push (tree, gc, static_dtors, fndecl);
a861fe52 182 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
a861fe52 183 }
2de29097 184 node = cgraph_node (fndecl);
185 node->local.disregard_inline_limits = 1;
186 cgraph_mark_reachable_node (node);
a861fe52 187}
188
2c56f72e 189/* Define global constructors/destructor functions for the CDTORS, of
190 which they are LEN. The CDTORS are sorted by initialization
191 priority. If CTOR_P is true, these are constructors; otherwise,
192 they are destructors. */
193
a861fe52 194static void
2c56f72e 195build_cdtor (bool ctor_p, tree *cdtors, size_t len)
a861fe52 196{
2c56f72e 197 size_t i;
a861fe52 198
2c56f72e 199 i = 0;
200 while (i < len)
201 {
202 tree body;
203 tree fn;
204 priority_type priority;
205
206 priority = 0;
207 body = NULL_TREE;
208 /* Find the next batch of constructors/destructors with the same
209 initialization priority. */
210 do
211 {
212 priority_type p;
213 fn = cdtors[i];
214 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
215 if (!body)
216 priority = p;
217 else if (p != priority)
218 break;
389dd41b 219 append_to_statement_list (build_function_call_expr (UNKNOWN_LOCATION,
220 fn, 0),
2c56f72e 221 &body);
222 ++i;
223 }
224 while (i < len);
225 gcc_assert (body != NULL_TREE);
226 /* Generate a function to call all the function of like
227 priority. */
228 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
229 }
230}
231
232/* Comparison function for qsort. P1 and P2 are actually of type
233 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
234 used to determine the sort order. */
a861fe52 235
2c56f72e 236static int
237compare_ctor (const void *p1, const void *p2)
238{
239 tree f1;
240 tree f2;
241 int priority1;
242 int priority2;
243
244 f1 = *(const tree *)p1;
245 f2 = *(const tree *)p2;
246 priority1 = DECL_INIT_PRIORITY (f1);
247 priority2 = DECL_INIT_PRIORITY (f2);
48e1416a 248
2c56f72e 249 if (priority1 < priority2)
250 return -1;
251 else if (priority1 > priority2)
252 return 1;
253 else
254 /* Ensure a stable sort. */
255 return (const tree *)p1 - (const tree *)p2;
256}
257
258/* Comparison function for qsort. P1 and P2 are actually of type
259 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
260 used to determine the sort order. */
a861fe52 261
2c56f72e 262static int
263compare_dtor (const void *p1, const void *p2)
264{
265 tree f1;
266 tree f2;
267 int priority1;
268 int priority2;
269
270 f1 = *(const tree *)p1;
271 f2 = *(const tree *)p2;
272 priority1 = DECL_FINI_PRIORITY (f1);
273 priority2 = DECL_FINI_PRIORITY (f2);
48e1416a 274
2c56f72e 275 if (priority1 < priority2)
276 return -1;
277 else if (priority1 > priority2)
278 return 1;
279 else
280 /* Ensure a stable sort. */
281 return (const tree *)p1 - (const tree *)p2;
a861fe52 282}
283
284/* Generate functions to call static constructors and destructors
285 for targets that do not support .ctors/.dtors sections. These
286 functions have magic names which are detected by collect2. */
287
288static void
289cgraph_build_cdtor_fns (void)
290{
2c56f72e 291 if (!VEC_empty (tree, static_ctors))
a861fe52 292 {
2c56f72e 293 gcc_assert (!targetm.have_ctors_dtors);
294 qsort (VEC_address (tree, static_ctors),
48e1416a 295 VEC_length (tree, static_ctors),
2c56f72e 296 sizeof (tree),
297 compare_ctor);
298 build_cdtor (/*ctor_p=*/true,
299 VEC_address (tree, static_ctors),
48e1416a 300 VEC_length (tree, static_ctors));
2c56f72e 301 VEC_truncate (tree, static_ctors, 0);
a861fe52 302 }
2c56f72e 303
304 if (!VEC_empty (tree, static_dtors))
a861fe52 305 {
2c56f72e 306 gcc_assert (!targetm.have_ctors_dtors);
307 qsort (VEC_address (tree, static_dtors),
48e1416a 308 VEC_length (tree, static_dtors),
2c56f72e 309 sizeof (tree),
310 compare_dtor);
311 build_cdtor (/*ctor_p=*/false,
312 VEC_address (tree, static_dtors),
48e1416a 313 VEC_length (tree, static_dtors));
2c56f72e 314 VEC_truncate (tree, static_dtors, 0);
a861fe52 315 }
316}
317
2c0b522d 318/* Determine if function DECL is needed. That is, visible to something
319 either outside this translation unit, something magic in the system
6329636b 320 configury. */
2c0b522d 321
7bfefa9d 322bool
323cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
2c0b522d 324{
3f82b628 325 /* If the user told us it is used, then it must be so. */
05806473 326 if (node->local.externally_visible)
327 return true;
328
3f82b628 329 /* ??? If the assembler name is set by hand, it is possible to assemble
330 the name later after finalizing the function and the fact is noticed
331 in assemble_name then. This is arguably a bug. */
332 if (DECL_ASSEMBLER_NAME_SET_P (decl)
333 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
334 return true;
335
55680bef 336 /* With -fkeep-inline-functions we are keeping all inline functions except
337 for extern inline ones. */
338 if (flag_keep_inline_functions
339 && DECL_DECLARED_INLINE_P (decl)
316ef6d8 340 && !DECL_EXTERNAL (decl)
341 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
55680bef 342 return true;
343
2c0b522d 344 /* If we decided it was needed before, but at the time we didn't have
345 the body of the function available, then it's still needed. We have
346 to go back and re-check its dependencies now. */
347 if (node->needed)
348 return true;
349
350 /* Externally visible functions must be output. The exception is
a0c938f0 351 COMDAT functions that must be output only when they are needed.
8baa9d15 352
353 When not optimizing, also output the static functions. (see
95da6220 354 PR24561), but don't do so for always_inline functions, functions
d3d410e1 355 declared inline and nested functions. These was optimized out
356 in the original implementation and it is unclear whether we want
554f2707 357 to change the behavior here. */
bba7ddf8 358 if (((TREE_PUBLIC (decl)
d3d410e1 359 || (!optimize && !node->local.disregard_inline_limits
360 && !DECL_DECLARED_INLINE_P (decl)
361 && !node->origin))
59dd4830 362 && !flag_whole_program
363 && !flag_lto
364 && !flag_whopr)
62eec3b4 365 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
2c0b522d 366 return true;
367
368 /* Constructors and destructors are reachable from the runtime by
369 some mechanism. */
370 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
371 return true;
372
2c0b522d 373 return false;
374}
375
bdc40eb8 376/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
523c1122 377 functions into callgraph in a way so they look like ordinary reachable
378 functions inserted into callgraph already at construction time. */
379
380bool
381cgraph_process_new_functions (void)
382{
383 bool output = false;
384 tree fndecl;
385 struct cgraph_node *node;
386
0cddb138 387 varpool_analyze_pending_decls ();
523c1122 388 /* Note that this queue may grow as its being processed, as the new
389 functions may generate new ones. */
390 while (cgraph_new_nodes)
391 {
392 node = cgraph_new_nodes;
393 fndecl = node->decl;
394 cgraph_new_nodes = cgraph_new_nodes->next_needed;
395 switch (cgraph_state)
396 {
397 case CGRAPH_STATE_CONSTRUCTION:
398 /* At construction time we just need to finalize function and move
399 it into reachable functions list. */
400
401 node->next_needed = NULL;
402 cgraph_finalize_function (fndecl, false);
403 cgraph_mark_reachable_node (node);
404 output = true;
405 break;
406
407 case CGRAPH_STATE_IPA:
f517b36e 408 case CGRAPH_STATE_IPA_SSA:
523c1122 409 /* When IPA optimization already started, do all essential
410 transformations that has been already performed on the whole
411 cgraph but not on this function. */
412
75a70cf9 413 gimple_register_cfg_hooks ();
523c1122 414 if (!node->analyzed)
415 cgraph_analyze_function (node);
416 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
417 current_function_decl = fndecl;
9c1bff7a 418 compute_inline_parameters (node);
f517b36e 419 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
420 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
421 /* When not optimizing, be sure we run early local passes anyway
422 to expand OMP. */
423 || !optimize)
20099e35 424 execute_pass_list (pass_early_local_passes.pass.sub);
523c1122 425 free_dominance_info (CDI_POST_DOMINATORS);
426 free_dominance_info (CDI_DOMINATORS);
427 pop_cfun ();
428 current_function_decl = NULL;
429 break;
430
431 case CGRAPH_STATE_EXPANSION:
432 /* Functions created during expansion shall be compiled
433 directly. */
09fc9532 434 node->process = 0;
523c1122 435 cgraph_expand_function (node);
436 break;
437
438 default:
439 gcc_unreachable ();
440 break;
441 }
50828ed8 442 cgraph_call_function_insertion_hooks (node);
0cddb138 443 varpool_analyze_pending_decls ();
523c1122 444 }
445 return output;
446}
447
9b8fb23a 448/* As an GCC extension we allow redefinition of the function. The
449 semantics when both copies of bodies differ is not well defined.
450 We replace the old body with new body so in unit at a time mode
451 we always use new body, while in normal mode we may end up with
452 old body inlined into some functions and new body expanded and
453 inlined in others.
454
455 ??? It may make more sense to use one body for inlining and other
456 body for expanding the function but this is difficult to do. */
457
458static void
459cgraph_reset_node (struct cgraph_node *node)
460{
09fc9532 461 /* If node->process is set, then we have already begun whole-unit analysis.
6329636b 462 This is *not* testing for whether we've already emitted the function.
463 That case can be sort-of legitimately seen with real function redefinition
464 errors. I would argue that the front end should never present us with
465 such a case, but don't enforce that for now. */
09fc9532 466 gcc_assert (!node->process);
9b8fb23a 467
468 /* Reset our data structures so we can analyze the function again. */
469 memset (&node->local, 0, sizeof (node->local));
470 memset (&node->global, 0, sizeof (node->global));
471 memset (&node->rtl, 0, sizeof (node->rtl));
472 node->analyzed = false;
473 node->local.redefined_extern_inline = true;
474 node->local.finalized = false;
475
9b8fb23a 476 cgraph_node_remove_callees (node);
477
478 /* We may need to re-queue the node for assembling in case
46beef9a 479 we already proceeded it and ignored as not needed or got
480 a re-declaration in IMA mode. */
481 if (node->reachable)
9b8fb23a 482 {
483 struct cgraph_node *n;
484
485 for (n = cgraph_nodes_queue; n; n = n->next_needed)
486 if (n == node)
487 break;
488 if (!n)
489 node->reachable = 0;
490 }
491}
c08871a9 492
1e8e9920 493static void
494cgraph_lower_function (struct cgraph_node *node)
495{
496 if (node->lowered)
497 return;
bfec3452 498
499 if (node->nested)
500 lower_nested_functions (node->decl);
501 gcc_assert (!node->nested);
502
1e8e9920 503 tree_lowering_passes (node->decl);
504 node->lowered = true;
505}
506
28df663b 507/* DECL has been parsed. Take it, queue it, compile it at the whim of the
508 logic in effect. If NESTED is true, then our caller cannot stand to have
509 the garbage collector run at the moment. We would need to either create
510 a new GC context, or just not compile right now. */
ae01b312 511
512void
28df663b 513cgraph_finalize_function (tree decl, bool nested)
ae01b312 514{
515 struct cgraph_node *node = cgraph_node (decl);
516
c08871a9 517 if (node->local.finalized)
9b8fb23a 518 cgraph_reset_node (node);
28df663b 519
167b550b 520 node->pid = cgraph_max_pid ++;
c08871a9 521 notice_global_symbol (decl);
79bb87b4 522 node->local.finalized = true;
e27482aa 523 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
22c748a1 524 node->finalized_by_frontend = true;
a861fe52 525 record_cdtor_fn (node->decl);
ae01b312 526
7bfefa9d 527 if (cgraph_decide_is_function_needed (node, decl))
2c0b522d 528 cgraph_mark_needed_node (node);
529
ecda6e51 530 /* Since we reclaim unreachable nodes at the end of every language
3f82b628 531 level unit, we need to be conservative about possible entry points
532 there. */
62eec3b4 533 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
3f82b628 534 cgraph_mark_reachable_node (node);
535
2c0b522d 536 /* If we've not yet emitted decl, tell the debug info about it. */
28df663b 537 if (!TREE_ASM_WRITTEN (decl))
2c0b522d 538 (*debug_hooks->deferred_inline_function) (decl);
4e8871a0 539
b69eb0ff 540 /* Possibly warn about unused parameters. */
541 if (warn_unused_parameter)
542 do_warn_unused_parameter (decl);
6329636b 543
544 if (!nested)
545 ggc_collect ();
ae01b312 546}
547
0da03d11 548/* C99 extern inline keywords allow changing of declaration after function
549 has been finalized. We need to re-decide if we want to mark the function as
550 needed then. */
551
552void
553cgraph_mark_if_needed (tree decl)
554{
555 struct cgraph_node *node = cgraph_node (decl);
7bfefa9d 556 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
0da03d11 557 cgraph_mark_needed_node (node);
558}
559
f2d608d8 560#ifdef ENABLE_CHECKING
ccf4ab6b 561/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
562static bool
563clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
564{
565 while (node != node2 && node2)
566 node2 = node2->clone_of;
567 return node2 != NULL;
568}
f2d608d8 569#endif
ccf4ab6b 570
b0cdf642 571/* Verify cgraph nodes of given cgraph node. */
572void
573verify_cgraph_node (struct cgraph_node *node)
574{
575 struct cgraph_edge *e;
e27482aa 576 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
117ef3d7 577 struct function *saved_cfun = cfun;
e27482aa 578 basic_block this_block;
75a70cf9 579 gimple_stmt_iterator gsi;
9bfec7c2 580 bool error_found = false;
b0cdf642 581
852f689e 582 if (seen_error ())
bd09cd3e 583 return;
584
b0cdf642 585 timevar_push (TV_CGRAPH_VERIFY);
117ef3d7 586 /* debug_generic_stmt needs correct cfun */
587 set_cfun (this_cfun);
b0cdf642 588 for (e = node->callees; e; e = e->next_callee)
589 if (e->aux)
590 {
0a81f5a0 591 error ("aux field set for edge %s->%s",
abd3e6b5 592 identifier_to_locale (cgraph_node_name (e->caller)),
593 identifier_to_locale (cgraph_node_name (e->callee)));
b0cdf642 594 error_found = true;
595 }
a2cb9b3b 596 if (node->count < 0)
597 {
598 error ("Execution count is negative");
599 error_found = true;
600 }
59dd4830 601 if (node->global.inlined_to && node->local.externally_visible)
602 {
603 error ("Externally visible inline clone");
604 error_found = true;
605 }
606 if (node->global.inlined_to && node->address_taken)
607 {
608 error ("Inline clone with address taken");
609 error_found = true;
610 }
611 if (node->global.inlined_to && node->needed)
612 {
613 error ("Inline clone is needed");
614 error_found = true;
615 }
799c8711 616 for (e = node->indirect_calls; e; e = e->next_callee)
617 {
618 if (e->aux)
619 {
620 error ("aux field set for indirect edge from %s",
621 identifier_to_locale (cgraph_node_name (e->caller)));
622 error_found = true;
623 }
624 if (!e->indirect_unknown_callee
625 || !e->indirect_info)
626 {
627 error ("An indirect edge from %s is not marked as indirect or has "
628 "associated indirect_info, the corresponding statement is: ",
629 identifier_to_locale (cgraph_node_name (e->caller)));
630 debug_gimple_stmt (e->call_stmt);
631 error_found = true;
632 }
633 }
b0cdf642 634 for (e = node->callers; e; e = e->next_caller)
635 {
a2cb9b3b 636 if (e->count < 0)
637 {
638 error ("caller edge count is negative");
639 error_found = true;
640 }
4ae20857 641 if (e->frequency < 0)
642 {
643 error ("caller edge frequency is negative");
644 error_found = true;
645 }
646 if (e->frequency > CGRAPH_FREQ_MAX)
647 {
648 error ("caller edge frequency is too large");
649 error_found = true;
650 }
1c094d2f 651 if (gimple_has_body_p (e->caller->decl)
652 && !e->caller->global.inlined_to
653 && (e->frequency
654 != compute_call_stmt_bb_frequency (e->caller->decl,
655 gimple_bb (e->call_stmt))))
656 {
657 error ("caller edge frequency %i does not match BB freqency %i",
658 e->frequency,
659 compute_call_stmt_bb_frequency (e->caller->decl,
660 gimple_bb (e->call_stmt)));
661 error_found = true;
662 }
b0cdf642 663 if (!e->inline_failed)
664 {
665 if (node->global.inlined_to
666 != (e->caller->global.inlined_to
667 ? e->caller->global.inlined_to : e->caller))
668 {
0a81f5a0 669 error ("inlined_to pointer is wrong");
b0cdf642 670 error_found = true;
671 }
672 if (node->callers->next_caller)
673 {
0a81f5a0 674 error ("multiple inline callers");
b0cdf642 675 error_found = true;
676 }
677 }
678 else
679 if (node->global.inlined_to)
680 {
0a81f5a0 681 error ("inlined_to pointer set for noninline callers");
b0cdf642 682 error_found = true;
683 }
684 }
685 if (!node->callers && node->global.inlined_to)
686 {
5cd75817 687 error ("inlined_to pointer is set but no predecessors found");
b0cdf642 688 error_found = true;
689 }
690 if (node->global.inlined_to == node)
691 {
0a81f5a0 692 error ("inlined_to pointer refers to itself");
b0cdf642 693 error_found = true;
694 }
695
0f6439b9 696 if (!cgraph_node (node->decl))
b0cdf642 697 {
0f6439b9 698 error ("node not found in cgraph_hash");
b0cdf642 699 error_found = true;
700 }
a0c938f0 701
ccf4ab6b 702 if (node->clone_of)
703 {
704 struct cgraph_node *n;
705 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
706 if (n == node)
707 break;
708 if (!n)
709 {
710 error ("node has wrong clone_of");
711 error_found = true;
712 }
713 }
714 if (node->clones)
715 {
716 struct cgraph_node *n;
717 for (n = node->clones; n; n = n->next_sibling_clone)
718 if (n->clone_of != node)
719 break;
720 if (n)
721 {
722 error ("node has wrong clone list");
723 error_found = true;
724 }
725 }
726 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
727 {
728 error ("node is in clone list but it is not clone");
729 error_found = true;
730 }
731 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
732 {
733 error ("node has wrong prev_clone pointer");
734 error_found = true;
735 }
736 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
737 {
738 error ("double linked list of clones corrupted");
739 error_found = true;
740 }
c524ac5d 741 if (node->same_comdat_group)
742 {
743 struct cgraph_node *n = node->same_comdat_group;
744
745 if (!DECL_ONE_ONLY (node->decl))
746 {
747 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
748 error_found = true;
749 }
750 if (n == node)
751 {
752 error ("node is alone in a comdat group");
753 error_found = true;
754 }
755 do
756 {
757 if (!n->same_comdat_group)
758 {
759 error ("same_comdat_group is not a circular list");
760 error_found = true;
761 break;
762 }
763 n = n->same_comdat_group;
764 }
765 while (n != node);
766 }
ccf4ab6b 767
768 if (node->analyzed && gimple_has_body_p (node->decl)
75a70cf9 769 && !TREE_ASM_WRITTEN (node->decl)
7bfefa9d 770 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
771 && !flag_wpa)
b0cdf642 772 {
e27482aa 773 if (this_cfun->cfg)
774 {
775 /* The nodes we're interested in are never shared, so walk
776 the tree ignoring duplicates. */
e7c352d1 777 struct pointer_set_t *visited_nodes = pointer_set_create ();
e27482aa 778 /* Reach the trees by walking over the CFG, and note the
779 enclosing basic-blocks in the call edges. */
780 FOR_EACH_BB_FN (this_block, this_cfun)
75a70cf9 781 for (gsi = gsi_start_bb (this_block);
782 !gsi_end_p (gsi);
783 gsi_next (&gsi))
9bfec7c2 784 {
75a70cf9 785 gimple stmt = gsi_stmt (gsi);
799c8711 786 if (is_gimple_call (stmt))
9bfec7c2 787 {
788 struct cgraph_edge *e = cgraph_edge (node, stmt);
799c8711 789 tree decl = gimple_call_fndecl (stmt);
9bfec7c2 790 if (e)
791 {
792 if (e->aux)
793 {
0a81f5a0 794 error ("shared call_stmt:");
75a70cf9 795 debug_gimple_stmt (stmt);
9bfec7c2 796 error_found = true;
797 }
799c8711 798 if (!e->indirect_unknown_callee)
28454517 799 {
799c8711 800 if (e->callee->same_body_alias)
801 {
802 error ("edge points to same body alias:");
803 debug_tree (e->callee->decl);
804 error_found = true;
805 }
e748b31d 806#ifdef ENABLE_CHECKING
807 else if (!e->callee->global.inlined_to
799c8711 808 && decl
e748b31d 809 && cgraph_get_node (decl)
810 && (e->callee->former_clone_of
811 != cgraph_get_node (decl)->decl)
799c8711 812 && !clone_of_p (cgraph_node (decl),
813 e->callee))
814 {
815 error ("edge points to wrong declaration:");
816 debug_tree (e->callee->decl);
817 fprintf (stderr," Instead of:");
818 debug_tree (decl);
819 error_found = true;
820 }
e748b31d 821#endif
28454517 822 }
799c8711 823 else if (decl)
9bfec7c2 824 {
799c8711 825 error ("an indirect edge with unknown callee "
826 "corresponding to a call_stmt with "
827 "a known declaration:");
ee3f5fc0 828 error_found = true;
799c8711 829 debug_gimple_stmt (e->call_stmt);
9bfec7c2 830 }
831 e->aux = (void *)1;
832 }
799c8711 833 else if (decl)
9bfec7c2 834 {
0a81f5a0 835 error ("missing callgraph edge for call stmt:");
75a70cf9 836 debug_gimple_stmt (stmt);
9bfec7c2 837 error_found = true;
838 }
839 }
840 }
e27482aa 841 pointer_set_destroy (visited_nodes);
e27482aa 842 }
843 else
844 /* No CFG available?! */
845 gcc_unreachable ();
846
b0cdf642 847 for (e = node->callees; e; e = e->next_callee)
848 {
799c8711 849 if (!e->aux)
b0cdf642 850 {
0a81f5a0 851 error ("edge %s->%s has no corresponding call_stmt",
abd3e6b5 852 identifier_to_locale (cgraph_node_name (e->caller)),
853 identifier_to_locale (cgraph_node_name (e->callee)));
75a70cf9 854 debug_gimple_stmt (e->call_stmt);
b0cdf642 855 error_found = true;
856 }
857 e->aux = 0;
858 }
799c8711 859 for (e = node->indirect_calls; e; e = e->next_callee)
860 {
861 if (!e->aux)
862 {
863 error ("an indirect edge from %s has no corresponding call_stmt",
864 identifier_to_locale (cgraph_node_name (e->caller)));
865 debug_gimple_stmt (e->call_stmt);
866 error_found = true;
867 }
868 e->aux = 0;
869 }
b0cdf642 870 }
871 if (error_found)
872 {
873 dump_cgraph_node (stderr, node);
0a81f5a0 874 internal_error ("verify_cgraph_node failed");
b0cdf642 875 }
117ef3d7 876 set_cfun (saved_cfun);
b0cdf642 877 timevar_pop (TV_CGRAPH_VERIFY);
878}
879
880/* Verify whole cgraph structure. */
881void
882verify_cgraph (void)
883{
884 struct cgraph_node *node;
885
852f689e 886 if (seen_error ())
8ec2a798 887 return;
888
b0cdf642 889 for (node = cgraph_nodes; node; node = node->next)
890 verify_cgraph_node (node);
891}
892
56af936e 893/* Output all asm statements we have stored up to be output. */
894
895static void
896cgraph_output_pending_asms (void)
897{
898 struct cgraph_asm_node *can;
899
852f689e 900 if (seen_error ())
56af936e 901 return;
902
903 for (can = cgraph_asm_nodes; can; can = can->next)
904 assemble_asm (can->asm_str);
905 cgraph_asm_nodes = NULL;
906}
907
0785e435 908/* Analyze the function scheduled to be output. */
bfec3452 909static void
0785e435 910cgraph_analyze_function (struct cgraph_node *node)
911{
bfec3452 912 tree save = current_function_decl;
0785e435 913 tree decl = node->decl;
914
ec1e35b2 915 current_function_decl = decl;
e27482aa 916 push_cfun (DECL_STRUCT_FUNCTION (decl));
bfec3452 917
6816d0c4 918 assign_assembler_name_if_neeeded (node->decl);
919
bfec3452 920 /* Make sure to gimplify bodies only once. During analyzing a
921 function we lower it, which will require gimplified nested
922 functions, so we can end up here with an already gimplified
923 body. */
924 if (!gimple_body (decl))
925 gimplify_function_tree (decl);
926 dump_function (TDI_generic, decl);
927
e27482aa 928 cgraph_lower_function (node);
6e8d6e86 929 node->analyzed = true;
0785e435 930
e27482aa 931 pop_cfun ();
bfec3452 932 current_function_decl = save;
0785e435 933}
934
05806473 935/* Look for externally_visible and used attributes and mark cgraph nodes
936 accordingly.
937
938 We cannot mark the nodes at the point the attributes are processed (in
939 handle_*_attribute) because the copy of the declarations available at that
940 point may not be canonical. For example, in:
941
942 void f();
943 void f() __attribute__((used));
944
945 the declaration we see in handle_used_attribute will be the second
946 declaration -- but the front end will subsequently merge that declaration
947 with the original declaration and discard the second declaration.
948
949 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
950
951 void f() {}
952 void f() __attribute__((externally_visible));
953
954 is valid.
955
956 So, we walk the nodes at the end of the translation unit, applying the
957 attributes at that point. */
958
959static void
960process_function_and_variable_attributes (struct cgraph_node *first,
1d416bd7 961 struct varpool_node *first_var)
05806473 962{
963 struct cgraph_node *node;
1d416bd7 964 struct varpool_node *vnode;
05806473 965
966 for (node = cgraph_nodes; node != first; node = node->next)
967 {
968 tree decl = node->decl;
83a23b05 969 if (DECL_PRESERVE_P (decl))
0b49f8f8 970 cgraph_mark_needed_node (node);
05806473 971 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
972 {
ba12ea31 973 if (! TREE_PUBLIC (node->decl))
712d2297 974 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
975 "%<externally_visible%>"
976 " attribute have effect only on public objects");
59dd4830 977 else if (node->local.finalized)
978 cgraph_mark_needed_node (node);
05806473 979 }
980 }
1d416bd7 981 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
05806473 982 {
983 tree decl = vnode->decl;
83a23b05 984 if (DECL_PRESERVE_P (decl))
05806473 985 {
22671757 986 vnode->force_output = true;
05806473 987 if (vnode->finalized)
1d416bd7 988 varpool_mark_needed_node (vnode);
05806473 989 }
990 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
991 {
ba12ea31 992 if (! TREE_PUBLIC (vnode->decl))
712d2297 993 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
994 "%<externally_visible%>"
995 " attribute have effect only on public objects");
59dd4830 996 else if (vnode->finalized)
997 varpool_mark_needed_node (vnode);
05806473 998 }
999 }
1000}
1001
aeeb194b 1002/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
1003 each reachable functions) and build cgraph.
1004 The function can be called multiple times after inserting new nodes
0d424440 1005 into beginning of queue. Just the new part of queue is re-scanned then. */
ae01b312 1006
aeeb194b 1007static void
1008cgraph_analyze_functions (void)
ae01b312 1009{
c1dcd13c 1010 /* Keep track of already processed nodes when called multiple times for
06b27565 1011 intermodule optimization. */
c1dcd13c 1012 static struct cgraph_node *first_analyzed;
c17d0de1 1013 struct cgraph_node *first_processed = first_analyzed;
1d416bd7 1014 static struct varpool_node *first_analyzed_var;
aeeb194b 1015 struct cgraph_node *node, *next;
ae01b312 1016
c17d0de1 1017 process_function_and_variable_attributes (first_processed,
1018 first_analyzed_var);
1019 first_processed = cgraph_nodes;
1d416bd7 1020 first_analyzed_var = varpool_nodes;
1021 varpool_analyze_pending_decls ();
f79b6507 1022 if (cgraph_dump_file)
ae01b312 1023 {
e4200070 1024 fprintf (cgraph_dump_file, "Initial entry points:");
c1dcd13c 1025 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 1026 if (node->needed)
f79b6507 1027 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1028 fprintf (cgraph_dump_file, "\n");
ae01b312 1029 }
aeeb194b 1030 cgraph_process_new_functions ();
ae01b312 1031
e6d2b2d8 1032 /* Propagate reachability flag and lower representation of all reachable
1033 functions. In the future, lowering will introduce new functions and
1034 new entry points on the way (by template instantiation and virtual
1035 method table generation for instance). */
3d7bfc56 1036 while (cgraph_nodes_queue)
ae01b312 1037 {
0785e435 1038 struct cgraph_edge *edge;
3d7bfc56 1039 tree decl = cgraph_nodes_queue->decl;
1040
1041 node = cgraph_nodes_queue;
d87976fb 1042 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 1043 node->next_needed = NULL;
ae01b312 1044
638531ad 1045 /* ??? It is possible to create extern inline function and later using
bbd5cba2 1046 weak alias attribute to kill its body. See
638531ad 1047 gcc.c-torture/compile/20011119-1.c */
75a70cf9 1048 if (!DECL_STRUCT_FUNCTION (decl))
9b8fb23a 1049 {
1050 cgraph_reset_node (node);
1051 continue;
1052 }
638531ad 1053
7bfefa9d 1054 if (!node->analyzed)
1055 cgraph_analyze_function (node);
2c0b522d 1056
ae01b312 1057 for (edge = node->callees; edge; edge = edge->next_callee)
0785e435 1058 if (!edge->callee->reachable)
2c0b522d 1059 cgraph_mark_reachable_node (edge->callee);
1060
61c2c7b1 1061 if (node->same_comdat_group)
1062 {
1063 for (next = node->same_comdat_group;
1064 next != node;
1065 next = next->same_comdat_group)
1066 cgraph_mark_reachable_node (next);
1067 }
1068
d544ceff 1069 /* If decl is a clone of an abstract function, mark that abstract
1070 function so that we don't release its body. The DECL_INITIAL() of that
1071 abstract function declaration will be later needed to output debug info. */
1072 if (DECL_ABSTRACT_ORIGIN (decl))
1073 {
1074 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
1075 origin_node->abstract_and_needed = true;
1076 }
1077
c17d0de1 1078 /* We finalize local static variables during constructing callgraph
1079 edges. Process their attributes too. */
1080 process_function_and_variable_attributes (first_processed,
1081 first_analyzed_var);
1082 first_processed = cgraph_nodes;
1d416bd7 1083 first_analyzed_var = varpool_nodes;
1084 varpool_analyze_pending_decls ();
aeeb194b 1085 cgraph_process_new_functions ();
ae01b312 1086 }
2c0b522d 1087
aa5e06c7 1088 /* Collect entry points to the unit. */
f79b6507 1089 if (cgraph_dump_file)
3d7bfc56 1090 {
e4200070 1091 fprintf (cgraph_dump_file, "Unit entry points:");
c1dcd13c 1092 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 1093 if (node->needed)
f79b6507 1094 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
e4200070 1095 fprintf (cgraph_dump_file, "\n\nInitial ");
0785e435 1096 dump_cgraph (cgraph_dump_file);
3d7bfc56 1097 }
e6d2b2d8 1098
f79b6507 1099 if (cgraph_dump_file)
1100 fprintf (cgraph_dump_file, "\nReclaiming functions:");
ae01b312 1101
f4ec5ce1 1102 for (node = cgraph_nodes; node != first_analyzed; node = next)
ae01b312 1103 {
1104 tree decl = node->decl;
f4ec5ce1 1105 next = node->next;
ae01b312 1106
1a1a827a 1107 if (node->local.finalized && !gimple_has_body_p (decl))
a0c938f0 1108 cgraph_reset_node (node);
9b8fb23a 1109
1a1a827a 1110 if (!node->reachable && gimple_has_body_p (decl))
ae01b312 1111 {
f79b6507 1112 if (cgraph_dump_file)
1113 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
b0cdf642 1114 cgraph_remove_node (node);
9b8fb23a 1115 continue;
ae01b312 1116 }
bc5cab3b 1117 else
1118 node->next_needed = NULL;
1a1a827a 1119 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
9b8fb23a 1120 gcc_assert (node->analyzed == node->local.finalized);
ae01b312 1121 }
f79b6507 1122 if (cgraph_dump_file)
e4200070 1123 {
1124 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1125 dump_cgraph (cgraph_dump_file);
1126 }
c1dcd13c 1127 first_analyzed = cgraph_nodes;
ae01b312 1128 ggc_collect ();
aeeb194b 1129}
1130
8f69fd82 1131
aeeb194b 1132/* Analyze the whole compilation unit once it is parsed completely. */
1133
1134void
1135cgraph_finalize_compilation_unit (void)
1136{
9929334e 1137 timevar_push (TV_CGRAPH);
1138
bfec3452 1139 /* Do not skip analyzing the functions if there were errors, we
1140 miss diagnostics for following functions otherwise. */
aeeb194b 1141
8f69fd82 1142 /* Emit size functions we didn't inline. */
4189e677 1143 finalize_size_functions ();
8f69fd82 1144
8f69fd82 1145 /* Call functions declared with the "constructor" or "destructor"
1146 attribute. */
1147 cgraph_build_cdtor_fns ();
aeeb194b 1148
9929334e 1149 /* Mark alias targets necessary and emit diagnostics. */
1150 finish_aliases_1 ();
1151
aeeb194b 1152 if (!quiet_flag)
1153 {
1154 fprintf (stderr, "\nAnalyzing compilation unit\n");
1155 fflush (stderr);
1156 }
1157
9929334e 1158 /* Gimplify and lower all functions, compute reachability and
1159 remove unreachable nodes. */
1160 cgraph_analyze_functions ();
1161
8f69fd82 1162 /* Mark alias targets necessary and emit diagnostics. */
1163 finish_aliases_1 ();
1164
9929334e 1165 /* Gimplify and lower thunks. */
aeeb194b 1166 cgraph_analyze_functions ();
bfec3452 1167
9929334e 1168 /* Finally drive the pass manager. */
bfec3452 1169 cgraph_optimize ();
9929334e 1170
1171 timevar_pop (TV_CGRAPH);
ae01b312 1172}
9ed5b1f5 1173
1174
ae01b312 1175/* Figure out what functions we want to assemble. */
1176
1177static void
d9d9733a 1178cgraph_mark_functions_to_output (void)
ae01b312 1179{
1180 struct cgraph_node *node;
61c2c7b1 1181#ifdef ENABLE_CHECKING
1182 bool check_same_comdat_groups = false;
1183
1184 for (node = cgraph_nodes; node; node = node->next)
1185 gcc_assert (!node->process);
1186#endif
ae01b312 1187
ae01b312 1188 for (node = cgraph_nodes; node; node = node->next)
1189 {
1190 tree decl = node->decl;
d7c6d889 1191 struct cgraph_edge *e;
a0c938f0 1192
61c2c7b1 1193 gcc_assert (!node->process || node->same_comdat_group);
1194 if (node->process)
1195 continue;
d7c6d889 1196
1197 for (e = node->callers; e; e = e->next_caller)
611e5405 1198 if (e->inline_failed)
d7c6d889 1199 break;
ae01b312 1200
e6d2b2d8 1201 /* We need to output all local functions that are used and not
1202 always inlined, as well as those that are reachable from
1203 outside the current compilation unit. */
1a1a827a 1204 if (node->analyzed
b0cdf642 1205 && !node->global.inlined_to
08843223 1206 && (node->needed || node->reachable_from_other_partition
cdedc740 1207 || node->address_taken
d7c6d889 1208 || (e && node->reachable))
4ee9c684 1209 && !TREE_ASM_WRITTEN (decl)
ae01b312 1210 && !DECL_EXTERNAL (decl))
61c2c7b1 1211 {
1212 node->process = 1;
1213 if (node->same_comdat_group)
1214 {
1215 struct cgraph_node *next;
1216 for (next = node->same_comdat_group;
1217 next != node;
1218 next = next->same_comdat_group)
1219 next->process = 1;
1220 }
1221 }
1222 else if (node->same_comdat_group)
1223 {
1224#ifdef ENABLE_CHECKING
1225 check_same_comdat_groups = true;
1226#endif
1227 }
cc636d56 1228 else
9cee7c3f 1229 {
1230 /* We should've reclaimed all functions that are not needed. */
1231#ifdef ENABLE_CHECKING
75a70cf9 1232 if (!node->global.inlined_to
1a1a827a 1233 && gimple_has_body_p (decl)
08843223 1234 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1235 are inside partition, we can end up not removing the body since we no longer
1236 have analyzed node pointing to it. */
1237 && !node->in_other_partition
9cee7c3f 1238 && !DECL_EXTERNAL (decl))
1239 {
1240 dump_cgraph_node (stderr, node);
1241 internal_error ("failed to reclaim unneeded function");
1242 }
1243#endif
75a70cf9 1244 gcc_assert (node->global.inlined_to
1a1a827a 1245 || !gimple_has_body_p (decl)
08843223 1246 || node->in_other_partition
9cee7c3f 1247 || DECL_EXTERNAL (decl));
1248
1249 }
a0c938f0 1250
961e3b13 1251 }
61c2c7b1 1252#ifdef ENABLE_CHECKING
1253 if (check_same_comdat_groups)
1254 for (node = cgraph_nodes; node; node = node->next)
1255 if (node->same_comdat_group && !node->process)
1256 {
1257 tree decl = node->decl;
1258 if (!node->global.inlined_to
1259 && gimple_has_body_p (decl)
08843223 1260 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1261 are inside partition, we can end up not removing the body since we no longer
1262 have analyzed node pointing to it. */
1263 && !node->in_other_partition
61c2c7b1 1264 && !DECL_EXTERNAL (decl))
1265 {
1266 dump_cgraph_node (stderr, node);
1267 internal_error ("failed to reclaim unneeded function");
1268 }
1269 }
1270#endif
961e3b13 1271}
1272
28454517 1273/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1274 in lowered gimple form.
1275
1276 Set current_function_decl and cfun to newly constructed empty function body.
1277 return basic block in the function body. */
1278
1279static basic_block
1280init_lowered_empty_function (tree decl)
1281{
1282 basic_block bb;
1283
1284 current_function_decl = decl;
1285 allocate_struct_function (decl, false);
1286 gimple_register_cfg_hooks ();
1287 init_empty_tree_cfg ();
1288 init_tree_ssa (cfun);
1289 init_ssa_operands ();
1290 cfun->gimple_df->in_ssa_p = true;
1291 DECL_INITIAL (decl) = make_node (BLOCK);
1292
1293 DECL_SAVED_TREE (decl) = error_mark_node;
1294 cfun->curr_properties |=
1295 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1296 PROP_ssa);
1297
1298 /* Create BB for body of the function and connect it properly. */
1299 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1300 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1301 make_edge (bb, EXIT_BLOCK_PTR, 0);
1302
1303 return bb;
1304}
1305
1306/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1307 offset indicated by VIRTUAL_OFFSET, if that is
1308 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1309 zero for a result adjusting thunk. */
1310
1311static tree
1312thunk_adjust (gimple_stmt_iterator * bsi,
1313 tree ptr, bool this_adjusting,
1314 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1315{
1316 gimple stmt;
1317 tree ret;
1318
55d6cb23 1319 if (this_adjusting
1320 && fixed_offset != 0)
28454517 1321 {
1322 stmt = gimple_build_assign (ptr,
1323 fold_build2_loc (input_location,
1324 POINTER_PLUS_EXPR,
1325 TREE_TYPE (ptr), ptr,
1326 size_int (fixed_offset)));
1327 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1328 }
1329
1330 /* If there's a virtual offset, look up that value in the vtable and
1331 adjust the pointer again. */
1332 if (virtual_offset)
1333 {
1334 tree vtabletmp;
1335 tree vtabletmp2;
1336 tree vtabletmp3;
1337 tree offsettmp;
1338
1339 if (!vtable_entry_type)
1340 {
1341 tree vfunc_type = make_node (FUNCTION_TYPE);
1342 TREE_TYPE (vfunc_type) = integer_type_node;
1343 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1344 layout_type (vfunc_type);
1345
1346 vtable_entry_type = build_pointer_type (vfunc_type);
1347 }
1348
1349 vtabletmp =
1350 create_tmp_var (build_pointer_type
1351 (build_pointer_type (vtable_entry_type)), "vptr");
1352
1353 /* The vptr is always at offset zero in the object. */
1354 stmt = gimple_build_assign (vtabletmp,
1355 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1356 ptr));
1357 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1358 mark_symbols_for_renaming (stmt);
1359 find_referenced_vars_in (stmt);
1360
1361 /* Form the vtable address. */
1362 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1363 "vtableaddr");
1364 stmt = gimple_build_assign (vtabletmp2,
1365 build1 (INDIRECT_REF,
1366 TREE_TYPE (vtabletmp2), vtabletmp));
1367 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1368 mark_symbols_for_renaming (stmt);
1369 find_referenced_vars_in (stmt);
1370
1371 /* Find the entry with the vcall offset. */
1372 stmt = gimple_build_assign (vtabletmp2,
1373 fold_build2_loc (input_location,
1374 POINTER_PLUS_EXPR,
1375 TREE_TYPE (vtabletmp2),
1376 vtabletmp2,
1377 fold_convert (sizetype,
1378 virtual_offset)));
1379 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1380
1381 /* Get the offset itself. */
1382 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1383 "vcalloffset");
1384 stmt = gimple_build_assign (vtabletmp3,
1385 build1 (INDIRECT_REF,
1386 TREE_TYPE (vtabletmp3),
1387 vtabletmp2));
1388 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1389 mark_symbols_for_renaming (stmt);
1390 find_referenced_vars_in (stmt);
1391
1392 /* Cast to sizetype. */
1393 offsettmp = create_tmp_var (sizetype, "offset");
1394 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1395 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1396 mark_symbols_for_renaming (stmt);
1397 find_referenced_vars_in (stmt);
1398
1399 /* Adjust the `this' pointer. */
1400 ptr = fold_build2_loc (input_location,
1401 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1402 offsettmp);
1403 }
1404
55d6cb23 1405 if (!this_adjusting
1406 && fixed_offset != 0)
28454517 1407 /* Adjust the pointer by the constant. */
1408 {
1409 tree ptrtmp;
1410
1411 if (TREE_CODE (ptr) == VAR_DECL)
1412 ptrtmp = ptr;
1413 else
1414 {
1415 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1416 stmt = gimple_build_assign (ptrtmp, ptr);
1417 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1418 mark_symbols_for_renaming (stmt);
1419 find_referenced_vars_in (stmt);
1420 }
1421 ptr = fold_build2_loc (input_location,
1422 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1423 size_int (fixed_offset));
1424 }
1425
1426 /* Emit the statement and gimplify the adjustment expression. */
1427 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1428 stmt = gimple_build_assign (ret, ptr);
1429 mark_symbols_for_renaming (stmt);
1430 find_referenced_vars_in (stmt);
1431 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1432
1433 return ret;
1434}
1435
1436/* Produce assembler for thunk NODE. */
1437
1438static void
1439assemble_thunk (struct cgraph_node *node)
1440{
1441 bool this_adjusting = node->thunk.this_adjusting;
1442 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1443 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1444 tree virtual_offset = NULL;
1445 tree alias = node->thunk.alias;
1446 tree thunk_fndecl = node->decl;
1447 tree a = DECL_ARGUMENTS (thunk_fndecl);
1448
1449 current_function_decl = thunk_fndecl;
1450
1451 if (this_adjusting
1452 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1453 virtual_value, alias))
1454 {
1455 const char *fnname;
1456 tree fn_block;
1457
1458 DECL_RESULT (thunk_fndecl)
1459 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1460 RESULT_DECL, 0, integer_type_node);
22ea3b47 1461 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
28454517 1462
1463 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1464 create one. */
1465 fn_block = make_node (BLOCK);
1466 BLOCK_VARS (fn_block) = a;
1467 DECL_INITIAL (thunk_fndecl) = fn_block;
1468 init_function_start (thunk_fndecl);
1469 cfun->is_thunk = 1;
1470 assemble_start_function (thunk_fndecl, fnname);
1471
1472 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1473 fixed_offset, virtual_value, alias);
1474
1475 assemble_end_function (thunk_fndecl, fnname);
1476 init_insn_lengths ();
1477 free_after_compilation (cfun);
1478 set_cfun (NULL);
1479 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1480 }
1481 else
1482 {
1483 tree restype;
1484 basic_block bb, then_bb, else_bb, return_bb;
1485 gimple_stmt_iterator bsi;
1486 int nargs = 0;
1487 tree arg;
1488 int i;
1489 tree resdecl;
1490 tree restmp = NULL;
1491 VEC(tree, heap) *vargs;
1492
1493 gimple call;
1494 gimple ret;
1495
1496 DECL_IGNORED_P (thunk_fndecl) = 1;
1497 bitmap_obstack_initialize (NULL);
1498
1499 if (node->thunk.virtual_offset_p)
1500 virtual_offset = size_int (virtual_value);
1501
1502 /* Build the return declaration for the function. */
1503 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1504 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1505 {
1506 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1507 DECL_ARTIFICIAL (resdecl) = 1;
1508 DECL_IGNORED_P (resdecl) = 1;
1509 DECL_RESULT (thunk_fndecl) = resdecl;
1510 }
1511 else
1512 resdecl = DECL_RESULT (thunk_fndecl);
1513
1514 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1515
1516 bsi = gsi_start_bb (bb);
1517
1518 /* Build call to the function being thunked. */
1519 if (!VOID_TYPE_P (restype))
1520 {
1521 if (!is_gimple_reg_type (restype))
1522 {
1523 restmp = resdecl;
1524 cfun->local_decls = tree_cons (NULL_TREE, restmp, cfun->local_decls);
1525 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1526 }
1527 else
1528 restmp = create_tmp_var_raw (restype, "retval");
1529 }
1530
1531 for (arg = a; arg; arg = TREE_CHAIN (arg))
1532 nargs++;
1533 vargs = VEC_alloc (tree, heap, nargs);
1534 if (this_adjusting)
1535 VEC_quick_push (tree, vargs,
1536 thunk_adjust (&bsi,
1537 a, 1, fixed_offset,
1538 virtual_offset));
1539 else
1540 VEC_quick_push (tree, vargs, a);
1541 for (i = 1, arg = TREE_CHAIN (a); i < nargs; i++, arg = TREE_CHAIN (arg))
1542 VEC_quick_push (tree, vargs, arg);
1543 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1544 VEC_free (tree, heap, vargs);
1545 gimple_call_set_cannot_inline (call, true);
1546 gimple_call_set_from_thunk (call, true);
1547 if (restmp)
1548 gimple_call_set_lhs (call, restmp);
1549 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1550 mark_symbols_for_renaming (call);
1551 find_referenced_vars_in (call);
1552 update_stmt (call);
1553
1554 if (restmp && !this_adjusting)
1555 {
57ab8ec3 1556 tree true_label = NULL_TREE;
28454517 1557
1558 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1559 {
1560 gimple stmt;
1561 /* If the return type is a pointer, we need to
1562 protect against NULL. We know there will be an
1563 adjustment, because that's why we're emitting a
1564 thunk. */
1565 then_bb = create_basic_block (NULL, (void *) 0, bb);
1566 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1567 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1568 remove_edge (single_succ_edge (bb));
1569 true_label = gimple_block_label (then_bb);
28454517 1570 stmt = gimple_build_cond (NE_EXPR, restmp,
1571 fold_convert (TREE_TYPE (restmp),
1572 integer_zero_node),
1573 NULL_TREE, NULL_TREE);
1574 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1575 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1576 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1577 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1578 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1579 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1580 bsi = gsi_last_bb (then_bb);
1581 }
1582
1583 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1584 fixed_offset, virtual_offset);
1585 if (true_label)
1586 {
1587 gimple stmt;
1588 bsi = gsi_last_bb (else_bb);
1589 stmt = gimple_build_assign (restmp, fold_convert (TREE_TYPE (restmp),
1590 integer_zero_node));
1591 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1592 bsi = gsi_last_bb (return_bb);
1593 }
1594 }
1595 else
1596 gimple_call_set_tail (call, true);
1597
1598 /* Build return value. */
1599 ret = gimple_build_return (restmp);
1600 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1601
1602 delete_unreachable_blocks ();
1603 update_ssa (TODO_update_ssa);
1604
1605 cgraph_remove_same_body_alias (node);
1606 /* Since we want to emit the thunk, we explicitly mark its name as
1607 referenced. */
28454517 1608 cgraph_add_new_function (thunk_fndecl, true);
1609 bitmap_obstack_release (NULL);
1610 }
1611 current_function_decl = NULL;
1612}
1613
ae01b312 1614/* Expand function specified by NODE. */
e6d2b2d8 1615
ae01b312 1616static void
d9d9733a 1617cgraph_expand_function (struct cgraph_node *node)
ae01b312 1618{
1619 tree decl = node->decl;
1620
b0cdf642 1621 /* We ought to not compile any inline clones. */
cc636d56 1622 gcc_assert (!node->global.inlined_to);
b0cdf642 1623
6329636b 1624 announce_function (decl);
09fc9532 1625 node->process = 0;
961e3b13 1626
e7c352d1 1627 gcc_assert (node->lowered);
f8deefc1 1628
794da2bb 1629 /* Generate RTL for the body of DECL. */
84e10000 1630 tree_rest_of_compilation (decl);
961e3b13 1631
4ee9c684 1632 /* Make sure that BE didn't give up on compiling. */
c04e3894 1633 gcc_assert (TREE_ASM_WRITTEN (decl));
ae01b312 1634 current_function_decl = NULL;
ed772161 1635 if (node->same_body)
1636 {
28454517 1637 struct cgraph_node *alias, *next;
ed772161 1638 bool saved_alias = node->alias;
28454517 1639 for (alias = node->same_body;
1640 alias && alias->next; alias = alias->next)
1641 ;
1642 /* Walk aliases in the order they were created; it is possible that
1643 thunks reffers to the aliases made earlier. */
1644 for (; alias; alias = next)
1645 {
1646 next = alias->previous;
1647 if (!alias->thunk.thunk_p)
1648 assemble_alias (alias->decl,
1649 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1650 else
1651 assemble_thunk (alias);
1652 }
ed772161 1653 node->alias = saved_alias;
1654 }
1a1a827a 1655 gcc_assert (!cgraph_preserve_function_body_p (decl));
1656 cgraph_release_function_body (node);
1657 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1658 points to the dead function body. */
1659 cgraph_node_remove_callees (node);
e1be32b8 1660
1661 cgraph_function_flags_ready = true;
ae01b312 1662}
1663
b0cdf642 1664/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
d7c6d889 1665
1666bool
326a9581 1667cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
d7c6d889 1668{
b0cdf642 1669 *reason = e->inline_failed;
1670 return !e->inline_failed;
d7c6d889 1671}
b0cdf642 1672
acc70efa 1673
acc70efa 1674
d9d9733a 1675/* Expand all functions that must be output.
1676
d7c6d889 1677 Attempt to topologically sort the nodes so function is output when
1678 all called functions are already assembled to allow data to be
91c82c20 1679 propagated across the callgraph. Use a stack to get smaller distance
3927afe0 1680 between a function and its callees (later we may choose to use a more
d7c6d889 1681 sophisticated algorithm for function reordering; we will likely want
1682 to use subsections to make the output functions appear in top-down
1683 order). */
1684
1685static void
a6868229 1686cgraph_expand_all_functions (void)
d7c6d889 1687{
1688 struct cgraph_node *node;
4c36ffe6 1689 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
c04e3894 1690 int order_pos, new_order_pos = 0;
d7c6d889 1691 int i;
1692
d7c6d889 1693 order_pos = cgraph_postorder (order);
cc636d56 1694 gcc_assert (order_pos == cgraph_n_nodes);
d7c6d889 1695
7bd28bba 1696 /* Garbage collector may remove inline clones we eliminate during
b0cdf642 1697 optimization. So we must be sure to not reference them. */
1698 for (i = 0; i < order_pos; i++)
09fc9532 1699 if (order[i]->process)
b0cdf642 1700 order[new_order_pos++] = order[i];
1701
1702 for (i = new_order_pos - 1; i >= 0; i--)
d7c6d889 1703 {
1704 node = order[i];
09fc9532 1705 if (node->process)
d7c6d889 1706 {
cc636d56 1707 gcc_assert (node->reachable);
09fc9532 1708 node->process = 0;
d7c6d889 1709 cgraph_expand_function (node);
1710 }
1711 }
523c1122 1712 cgraph_process_new_functions ();
773c5ba7 1713
d7c6d889 1714 free (order);
773c5ba7 1715
d7c6d889 1716}
1717
56af936e 1718/* This is used to sort the node types by the cgraph order number. */
1719
0b09525f 1720enum cgraph_order_sort_kind
1721{
1722 ORDER_UNDEFINED = 0,
1723 ORDER_FUNCTION,
1724 ORDER_VAR,
1725 ORDER_ASM
1726};
1727
56af936e 1728struct cgraph_order_sort
1729{
0b09525f 1730 enum cgraph_order_sort_kind kind;
56af936e 1731 union
1732 {
1733 struct cgraph_node *f;
1d416bd7 1734 struct varpool_node *v;
56af936e 1735 struct cgraph_asm_node *a;
1736 } u;
1737};
1738
1739/* Output all functions, variables, and asm statements in the order
1740 according to their order fields, which is the order in which they
1741 appeared in the file. This implements -fno-toplevel-reorder. In
1742 this mode we may output functions and variables which don't really
1743 need to be output. */
1744
1745static void
1746cgraph_output_in_order (void)
1747{
1748 int max;
56af936e 1749 struct cgraph_order_sort *nodes;
1750 int i;
1751 struct cgraph_node *pf;
1d416bd7 1752 struct varpool_node *pv;
56af936e 1753 struct cgraph_asm_node *pa;
1754
1755 max = cgraph_order;
3e1cde87 1756 nodes = XCNEWVEC (struct cgraph_order_sort, max);
56af936e 1757
1d416bd7 1758 varpool_analyze_pending_decls ();
56af936e 1759
1760 for (pf = cgraph_nodes; pf; pf = pf->next)
1761 {
09fc9532 1762 if (pf->process)
56af936e 1763 {
1764 i = pf->order;
1765 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1766 nodes[i].kind = ORDER_FUNCTION;
1767 nodes[i].u.f = pf;
1768 }
1769 }
1770
1d416bd7 1771 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
56af936e 1772 {
1773 i = pv->order;
1774 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1775 nodes[i].kind = ORDER_VAR;
1776 nodes[i].u.v = pv;
1777 }
1778
1779 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1780 {
1781 i = pa->order;
1782 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1783 nodes[i].kind = ORDER_ASM;
1784 nodes[i].u.a = pa;
1785 }
56af936e 1786
304e5318 1787 /* In toplevel reorder mode we output all statics; mark them as needed. */
1788 for (i = 0; i < max; ++i)
1789 {
1790 if (nodes[i].kind == ORDER_VAR)
1791 {
1792 varpool_mark_needed_node (nodes[i].u.v);
1793 }
1794 }
1795 varpool_empty_needed_queue ();
1796
56af936e 1797 for (i = 0; i < max; ++i)
1798 {
1799 switch (nodes[i].kind)
1800 {
1801 case ORDER_FUNCTION:
09fc9532 1802 nodes[i].u.f->process = 0;
56af936e 1803 cgraph_expand_function (nodes[i].u.f);
1804 break;
1805
1806 case ORDER_VAR:
1d416bd7 1807 varpool_assemble_decl (nodes[i].u.v);
56af936e 1808 break;
1809
1810 case ORDER_ASM:
1811 assemble_asm (nodes[i].u.a->asm_str);
1812 break;
1813
1814 case ORDER_UNDEFINED:
1815 break;
1816
1817 default:
1818 gcc_unreachable ();
1819 }
1820 }
4b4ea2db 1821
1822 cgraph_asm_nodes = NULL;
3e1cde87 1823 free (nodes);
56af936e 1824}
1825
b0cdf642 1826/* Return true when function body of DECL still needs to be kept around
1827 for later re-use. */
1828bool
1829cgraph_preserve_function_body_p (tree decl)
1830{
1831 struct cgraph_node *node;
8d8c4c8d 1832
1833 gcc_assert (cgraph_global_info_ready);
b0cdf642 1834 /* Look if there is any clone around. */
ccf4ab6b 1835 node = cgraph_node (decl);
1836 if (node->clones)
1837 return true;
b0cdf642 1838 return false;
1839}
1840
77fce4cd 1841static void
1842ipa_passes (void)
1843{
87d4aa85 1844 set_cfun (NULL);
4b14adf9 1845 current_function_decl = NULL;
75a70cf9 1846 gimple_register_cfg_hooks ();
77fce4cd 1847 bitmap_obstack_initialize (NULL);
59dd4830 1848
c9036234 1849 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1850
59dd4830 1851 if (!in_lto_p)
1852 execute_ipa_pass_list (all_small_ipa_passes);
9ed5b1f5 1853
7bfefa9d 1854 /* If pass_all_early_optimizations was not scheduled, the state of
1855 the cgraph will not be properly updated. Update it now. */
1856 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1857 cgraph_state = CGRAPH_STATE_IPA_SSA;
9ed5b1f5 1858
7bfefa9d 1859 if (!in_lto_p)
1860 {
1861 /* Generate coverage variables and constructors. */
1862 coverage_finish ();
1863
1864 /* Process new functions added. */
1865 set_cfun (NULL);
1866 current_function_decl = NULL;
1867 cgraph_process_new_functions ();
7bfefa9d 1868
c9036234 1869 execute_ipa_summary_passes
1870 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
8867b500 1871 }
23433d72 1872
1873 /* Some targets need to handle LTO assembler output specially. */
1874 if (flag_generate_lto)
1875 targetm.asm_out.lto_start ();
1876
7bfefa9d 1877 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1878
1879 if (!in_lto_p)
1880 ipa_write_summaries ();
1881
23433d72 1882 if (flag_generate_lto)
1883 targetm.asm_out.lto_end ();
1884
8867b500 1885 if (!flag_ltrans)
1886 execute_ipa_pass_list (all_regular_ipa_passes);
c9036234 1887 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
9ed5b1f5 1888
77fce4cd 1889 bitmap_obstack_release (NULL);
1890}
1891
34e5cced 1892
ae01b312 1893/* Perform simple optimizations based on callgraph. */
1894
7bfefa9d 1895void
d9d9733a 1896cgraph_optimize (void)
ae01b312 1897{
852f689e 1898 if (seen_error ())
cb2b5570 1899 return;
1900
b0cdf642 1901#ifdef ENABLE_CHECKING
1902 verify_cgraph ();
1903#endif
a861fe52 1904
c1dcd13c 1905 /* Frontend may output common variables after the unit has been finalized.
1906 It is safe to deal with them here as they are always zero initialized. */
1d416bd7 1907 varpool_analyze_pending_decls ();
e9f08e82 1908
f79b6507 1909 timevar_push (TV_CGRAPHOPT);
51949610 1910 if (pre_ipa_mem_report)
1911 {
1912 fprintf (stderr, "Memory consumption before IPA\n");
1913 dump_memory_report (false);
1914 }
d7c6d889 1915 if (!quiet_flag)
cd6bca02 1916 fprintf (stderr, "Performing interprocedural optimizations\n");
523c1122 1917 cgraph_state = CGRAPH_STATE_IPA;
c04e3894 1918
be4d0974 1919 /* Don't run the IPA passes if there was any error or sorry messages. */
852f689e 1920 if (!seen_error ())
be4d0974 1921 ipa_passes ();
1922
34e5cced 1923 /* Do nothing else if any IPA pass found errors. */
852f689e 1924 if (seen_error ())
021c1c18 1925 {
1926 timevar_pop (TV_CGRAPHOPT);
1927 return;
1928 }
34e5cced 1929
e1be32b8 1930 /* This pass remove bodies of extern inline functions we never inlined.
1931 Do this later so other IPA passes see what is really going on. */
1932 cgraph_remove_unreachable_nodes (false, dump_file);
80a85d8a 1933 cgraph_global_info_ready = true;
f79b6507 1934 if (cgraph_dump_file)
1935 {
e4200070 1936 fprintf (cgraph_dump_file, "Optimized ");
f79b6507 1937 dump_cgraph (cgraph_dump_file);
c1dcd13c 1938 dump_varpool (cgraph_dump_file);
f79b6507 1939 }
51949610 1940 if (post_ipa_mem_report)
1941 {
defa2fa6 1942 fprintf (stderr, "Memory consumption after IPA\n");
51949610 1943 dump_memory_report (false);
1944 }
f79b6507 1945 timevar_pop (TV_CGRAPHOPT);
ae01b312 1946
d7c6d889 1947 /* Output everything. */
47306a5d 1948 (*debug_hooks->assembly_start) ();
e4200070 1949 if (!quiet_flag)
1950 fprintf (stderr, "Assembling functions:\n");
b0cdf642 1951#ifdef ENABLE_CHECKING
1952 verify_cgraph ();
1953#endif
56af936e 1954
ccf4ab6b 1955 cgraph_materialize_all_clones ();
acc70efa 1956 cgraph_mark_functions_to_output ();
c1dcd13c 1957
523c1122 1958 cgraph_state = CGRAPH_STATE_EXPANSION;
56af936e 1959 if (!flag_toplevel_reorder)
1960 cgraph_output_in_order ();
1961 else
1962 {
1963 cgraph_output_pending_asms ();
1964
1965 cgraph_expand_all_functions ();
1d416bd7 1966 varpool_remove_unreferenced_decls ();
56af936e 1967
1d416bd7 1968 varpool_assemble_pending_decls ();
56af936e 1969 }
523c1122 1970 cgraph_process_new_functions ();
1971 cgraph_state = CGRAPH_STATE_FINISHED;
c1dcd13c 1972
f79b6507 1973 if (cgraph_dump_file)
1974 {
e4200070 1975 fprintf (cgraph_dump_file, "\nFinal ");
f79b6507 1976 dump_cgraph (cgraph_dump_file);
1977 }
b0cdf642 1978#ifdef ENABLE_CHECKING
1979 verify_cgraph ();
4ee9c684 1980 /* Double check that all inline clones are gone and that all
1981 function bodies have been released from memory. */
852f689e 1982 if (!seen_error ())
4ee9c684 1983 {
1984 struct cgraph_node *node;
1985 bool error_found = false;
1986
1987 for (node = cgraph_nodes; node; node = node->next)
1988 if (node->analyzed
1989 && (node->global.inlined_to
1a1a827a 1990 || gimple_has_body_p (node->decl)))
4ee9c684 1991 {
1992 error_found = true;
1993 dump_cgraph_node (stderr, node);
a0c938f0 1994 }
4ee9c684 1995 if (error_found)
c04e3894 1996 internal_error ("nodes with unreleased memory found");
4ee9c684 1997 }
b0cdf642 1998#endif
ae01b312 1999}
34e5cced 2000
2001
2c56f72e 2002/* Generate and emit a static constructor or destructor. WHICH must
2003 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
2004 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
f0b5f617 2005 initialization priority for this constructor or destructor. */
b5530559 2006
2007void
c5344b58 2008cgraph_build_static_cdtor (char which, tree body, int priority)
b5530559 2009{
2010 static int counter = 0;
2011 char which_buf[16];
540edea7 2012 tree decl, name, resdecl;
b5530559 2013
2c56f72e 2014 /* The priority is encoded in the constructor or destructor name.
2015 collect2 will sort the names and arrange that they are called at
2016 program startup. */
2017 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
db85cc4f 2018 name = get_file_function_name (which_buf);
b5530559 2019
e60a6f7b 2020 decl = build_decl (input_location, FUNCTION_DECL, name,
b5530559 2021 build_function_type (void_type_node, void_list_node));
2022 current_function_decl = decl;
2023
e60a6f7b 2024 resdecl = build_decl (input_location,
2025 RESULT_DECL, NULL_TREE, void_type_node);
540edea7 2026 DECL_ARTIFICIAL (resdecl) = 1;
540edea7 2027 DECL_RESULT (decl) = resdecl;
8e5b4ed6 2028 DECL_CONTEXT (resdecl) = decl;
540edea7 2029
80f2ef47 2030 allocate_struct_function (decl, false);
b5530559 2031
2032 TREE_STATIC (decl) = 1;
2033 TREE_USED (decl) = 1;
2034 DECL_ARTIFICIAL (decl) = 1;
b5530559 2035 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
2036 DECL_SAVED_TREE (decl) = body;
b1be8e04 2037 if (!targetm.have_ctors_dtors)
2038 {
2039 TREE_PUBLIC (decl) = 1;
2040 DECL_PRESERVE_P (decl) = 1;
2041 }
b5530559 2042 DECL_UNINLINABLE (decl) = 1;
2043
2044 DECL_INITIAL (decl) = make_node (BLOCK);
2045 TREE_USED (DECL_INITIAL (decl)) = 1;
2046
2047 DECL_SOURCE_LOCATION (decl) = input_location;
2048 cfun->function_end_locus = input_location;
2049
cc636d56 2050 switch (which)
2051 {
2052 case 'I':
2053 DECL_STATIC_CONSTRUCTOR (decl) = 1;
64c2e9b0 2054 decl_init_priority_insert (decl, priority);
cc636d56 2055 break;
2056 case 'D':
2057 DECL_STATIC_DESTRUCTOR (decl) = 1;
64c2e9b0 2058 decl_fini_priority_insert (decl, priority);
cc636d56 2059 break;
2060 default:
2061 gcc_unreachable ();
2062 }
b5530559 2063
2064 gimplify_function_tree (decl);
2065
523c1122 2066 cgraph_add_new_function (decl, false);
2067 cgraph_mark_needed_node (cgraph_node (decl));
e3a37aef 2068 set_cfun (NULL);
b5530559 2069}
121f3051 2070
2071void
2072init_cgraph (void)
2073{
2074 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
2075}
b5d36404 2076
a0c938f0 2077/* The edges representing the callers of the NEW_VERSION node were
b5d36404 2078 fixed by cgraph_function_versioning (), now the call_expr in their
2079 respective tree code should be updated to call the NEW_VERSION. */
2080
2081static void
2082update_call_expr (struct cgraph_node *new_version)
2083{
2084 struct cgraph_edge *e;
2085
2086 gcc_assert (new_version);
75a70cf9 2087
2088 /* Update the call expr on the edges to call the new version. */
b5d36404 2089 for (e = new_version->callers; e; e = e->next_caller)
e03a95e7 2090 {
2091 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2092 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
e38def9c 2093 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
e03a95e7 2094 }
b5d36404 2095}
2096
2097
2098/* Create a new cgraph node which is the new version of
2099 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2100 edges which should be redirected to point to
2101 NEW_VERSION. ALL the callees edges of OLD_VERSION
2102 are cloned to the new version node. Return the new
2103 version node. */
2104
2105static struct cgraph_node *
2106cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
4460a647 2107 tree new_decl,
2108 VEC(cgraph_edge_p,heap) *redirect_callers)
a70a5e2c 2109{
b5d36404 2110 struct cgraph_node *new_version;
32936803 2111 struct cgraph_edge *e;
b5d36404 2112 unsigned i;
2113
2114 gcc_assert (old_version);
a0c938f0 2115
b5d36404 2116 new_version = cgraph_node (new_decl);
2117
2118 new_version->analyzed = true;
2119 new_version->local = old_version->local;
a70a5e2c 2120 new_version->local.externally_visible = false;
2121 new_version->local.local = true;
2122 new_version->local.vtable_method = false;
b5d36404 2123 new_version->global = old_version->global;
2124 new_version->rtl = new_version->rtl;
2125 new_version->reachable = true;
2126 new_version->count = old_version->count;
2127
a70a5e2c 2128 for (e = old_version->callees; e; e=e->next_callee)
2129 cgraph_clone_edge (e, new_version, e->call_stmt,
2130 e->lto_stmt_uid, REG_BR_PROB_BASE,
2131 CGRAPH_FREQ_BASE,
2132 e->loop_nest, true);
2133 for (e = old_version->indirect_calls; e; e=e->next_callee)
2134 cgraph_clone_edge (e, new_version, e->call_stmt,
2135 e->lto_stmt_uid, REG_BR_PROB_BASE,
2136 CGRAPH_FREQ_BASE,
2137 e->loop_nest, true);
4460a647 2138 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
2139 {
2140 /* Redirect calls to the old version node to point to its new
2141 version. */
2142 cgraph_redirect_edge_callee (e, new_version);
2143 }
b5d36404 2144
2145 return new_version;
2146 }
2147
2148 /* Perform function versioning.
a0c938f0 2149 Function versioning includes copying of the tree and
b5d36404 2150 a callgraph update (creating a new cgraph node and updating
2151 its callees and callers).
2152
2153 REDIRECT_CALLERS varray includes the edges to be redirected
2154 to the new version.
2155
2156 TREE_MAP is a mapping of tree nodes we want to replace with
2157 new ones (according to results of prior analysis).
2158 OLD_VERSION_NODE is the node that is versioned.
48e1416a 2159 It returns the new version's cgraph node.
5afe38fe 2160 ARGS_TO_SKIP lists arguments to be omitted from functions
2161 */
b5d36404 2162
2163struct cgraph_node *
2164cgraph_function_versioning (struct cgraph_node *old_version_node,
4460a647 2165 VEC(cgraph_edge_p,heap) *redirect_callers,
ccf4ab6b 2166 VEC (ipa_replace_map_p,gc)* tree_map,
a70a5e2c 2167 bitmap args_to_skip,
2168 const char *clone_name)
b5d36404 2169{
2170 tree old_decl = old_version_node->decl;
2171 struct cgraph_node *new_version_node = NULL;
2172 tree new_decl;
2173
2174 if (!tree_versionable_function_p (old_decl))
2175 return NULL;
2176
2177 /* Make a new FUNCTION_DECL tree node for the
2178 new version. */
5afe38fe 2179 if (!args_to_skip)
2180 new_decl = copy_node (old_decl);
2181 else
2182 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
b5d36404 2183
df0b8dfb 2184 cgraph_make_decl_local (new_decl);
2185 /* Generate a new name for the new version. */
2186 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2187 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2188 SET_DECL_RTL (new_decl, NULL);
2189
b5d36404 2190 /* Create the new version's call-graph node.
2191 and update the edges of the new node. */
2192 new_version_node =
2193 cgraph_copy_node_for_versioning (old_version_node, new_decl,
2194 redirect_callers);
2195
2196 /* Copy the OLD_VERSION_NODE function tree to the new version. */
5afe38fe 2197 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip);
b5d36404 2198
a0c938f0 2199 /* Update the new version's properties.
e03a95e7 2200 Make The new version visible only within this translation unit. Make sure
2201 that is not weak also.
a0c938f0 2202 ??? We cannot use COMDAT linkage because there is no
b5d36404 2203 ABI support for this. */
6137cc9f 2204 cgraph_make_decl_local (new_version_node->decl);
f014e39d 2205 DECL_VIRTUAL_P (new_version_node->decl) = 0;
b5d36404 2206 new_version_node->local.externally_visible = 0;
2207 new_version_node->local.local = 1;
2208 new_version_node->lowered = true;
f014e39d 2209
e03a95e7 2210 /* Update the call_expr on the edges to call the new version node. */
2211 update_call_expr (new_version_node);
48e1416a 2212
50828ed8 2213 cgraph_call_function_insertion_hooks (new_version_node);
b5d36404 2214 return new_version_node;
2215}
469679ab 2216
2217/* Produce separate function body for inline clones so the offline copy can be
2218 modified without affecting them. */
2219struct cgraph_node *
2220save_inline_function_body (struct cgraph_node *node)
2221{
ccf4ab6b 2222 struct cgraph_node *first_clone, *n;
469679ab 2223
2224 gcc_assert (node == cgraph_node (node->decl));
2225
2226 cgraph_lower_function (node);
2227
ccf4ab6b 2228 first_clone = node->clones;
469679ab 2229
2230 first_clone->decl = copy_node (node->decl);
469679ab 2231 cgraph_insert_node_to_hashtable (first_clone);
2232 gcc_assert (first_clone == cgraph_node (first_clone->decl));
ccf4ab6b 2233 if (first_clone->next_sibling_clone)
2234 {
2235 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2236 n->clone_of = first_clone;
2237 n->clone_of = first_clone;
2238 n->next_sibling_clone = first_clone->clones;
2239 if (first_clone->clones)
2240 first_clone->clones->prev_sibling_clone = n;
2241 first_clone->clones = first_clone->next_sibling_clone;
2242 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2243 first_clone->next_sibling_clone = NULL;
2244 gcc_assert (!first_clone->prev_sibling_clone);
2245 }
2246 first_clone->clone_of = NULL;
2247 node->clones = NULL;
2248
2249 if (first_clone->clones)
2250 for (n = first_clone->clones; n != first_clone;)
2251 {
2252 gcc_assert (n->decl == node->decl);
2253 n->decl = first_clone->decl;
2254 if (n->clones)
2255 n = n->clones;
2256 else if (n->next_sibling_clone)
2257 n = n->next_sibling_clone;
2258 else
2259 {
2260 while (n != first_clone && !n->next_sibling_clone)
2261 n = n->clone_of;
2262 if (n != first_clone)
2263 n = n->next_sibling_clone;
2264 }
2265 }
469679ab 2266
2267 /* Copy the OLD_VERSION_NODE function tree to the new version. */
5afe38fe 2268 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL);
469679ab 2269
2270 DECL_EXTERNAL (first_clone->decl) = 0;
ecd88073 2271 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
469679ab 2272 TREE_PUBLIC (first_clone->decl) = 0;
2273 DECL_COMDAT (first_clone->decl) = 0;
7fe9b425 2274 VEC_free (ipa_opt_pass, heap,
6d1cc52c 2275 first_clone->ipa_transforms_to_apply);
2276 first_clone->ipa_transforms_to_apply = NULL;
469679ab 2277
469679ab 2278#ifdef ENABLE_CHECKING
2279 verify_cgraph_node (first_clone);
2280#endif
2281 return first_clone;
2282}
a861fe52 2283
ccf4ab6b 2284/* Given virtual clone, turn it into actual clone. */
2285static void
2286cgraph_materialize_clone (struct cgraph_node *node)
2287{
2288 bitmap_obstack_initialize (NULL);
e748b31d 2289#ifdef ENABLE_CHECKING
2290 node->former_clone_of = node->clone_of->decl;
2291 if (node->clone_of->former_clone_of)
2292 node->former_clone_of = node->clone_of->former_clone_of;
2293#endif
ccf4ab6b 2294 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2295 tree_function_versioning (node->clone_of->decl, node->decl,
2296 node->clone.tree_map, true,
2297 node->clone.args_to_skip);
e20422ea 2298 if (cgraph_dump_file)
2299 {
2300 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2301 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2302 }
ccf4ab6b 2303
2304 /* Function is no longer clone. */
2305 if (node->next_sibling_clone)
2306 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2307 if (node->prev_sibling_clone)
2308 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2309 else
2310 node->clone_of->clones = node->next_sibling_clone;
2311 node->next_sibling_clone = NULL;
2312 node->prev_sibling_clone = NULL;
6d1cc52c 2313 if (!node->clone_of->analyzed && !node->clone_of->clones)
7d6a1ec8 2314 {
2315 cgraph_release_function_body (node->clone_of);
2316 cgraph_node_remove_callees (node->clone_of);
2317 ipa_remove_all_references (&node->clone_of->ref_list);
2318 }
ccf4ab6b 2319 node->clone_of = NULL;
2320 bitmap_obstack_release (NULL);
2321}
2322
c596d830 2323/* If necessary, change the function declaration in the call statement
2324 associated with E so that it corresponds to the edge callee. */
2325
2326gimple
2327cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2328{
2329 tree decl = gimple_call_fndecl (e->call_stmt);
2330 gimple new_stmt;
2331 gimple_stmt_iterator gsi;
2332
2333 if (!decl || decl == e->callee->decl
2334 /* Don't update call from same body alias to the real function. */
2335 || cgraph_get_node (decl) == cgraph_get_node (e->callee->decl))
2336 return e->call_stmt;
2337
e748b31d 2338 gcc_assert (!cgraph_node (decl)->clone.combined_args_to_skip);
2339
c596d830 2340 if (cgraph_dump_file)
2341 {
2342 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2343 cgraph_node_name (e->caller), e->caller->uid,
2344 cgraph_node_name (e->callee), e->callee->uid);
2345 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
e748b31d 2346 if (e->callee->clone.combined_args_to_skip)
2347 {
2348 fprintf (cgraph_dump_file, " combined args to skip: ");
2349 dump_bitmap (cgraph_dump_file, e->callee->clone.combined_args_to_skip);
2350 }
c596d830 2351 }
2352
2353 if (e->callee->clone.combined_args_to_skip)
2354 new_stmt = gimple_call_copy_skip_args (e->call_stmt,
2355 e->callee->clone.combined_args_to_skip);
2356 else
2357 new_stmt = e->call_stmt;
2358 if (gimple_vdef (new_stmt)
2359 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2360 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2361 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2362
2363 gsi = gsi_for_stmt (e->call_stmt);
2364 gsi_replace (&gsi, new_stmt, true);
e32916b6 2365 update_stmt (new_stmt);
c596d830 2366
2367 /* Update EH information too, just in case. */
2368 maybe_clean_or_replace_eh_stmt (e->call_stmt, new_stmt);
2369
2370 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2371
2372 if (cgraph_dump_file)
2373 {
2374 fprintf (cgraph_dump_file, " updated to:");
2375 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2376 }
2377 return new_stmt;
2378}
2379
ccf4ab6b 2380/* Once all functions from compilation unit are in memory, produce all clones
c596d830 2381 and update all calls. We might also do this on demand if we don't want to
2382 bring all functions to memory prior compilation, but current WHOPR
2383 implementation does that and it is is bit easier to keep everything right in
2384 this order. */
ccf4ab6b 2385void
2386cgraph_materialize_all_clones (void)
2387{
2388 struct cgraph_node *node;
2389 bool stabilized = false;
2390
2391 if (cgraph_dump_file)
2392 fprintf (cgraph_dump_file, "Materializing clones\n");
2393#ifdef ENABLE_CHECKING
2394 verify_cgraph ();
2395#endif
2396
2397 /* We can also do topological order, but number of iterations should be
2398 bounded by number of IPA passes since single IPA pass is probably not
2399 going to create clones of clones it created itself. */
2400 while (!stabilized)
2401 {
2402 stabilized = true;
2403 for (node = cgraph_nodes; node; node = node->next)
2404 {
2405 if (node->clone_of && node->decl != node->clone_of->decl
2406 && !gimple_has_body_p (node->decl))
2407 {
2408 if (gimple_has_body_p (node->clone_of->decl))
2409 {
2410 if (cgraph_dump_file)
e20422ea 2411 {
2412 fprintf (cgraph_dump_file, "clonning %s to %s\n",
2413 cgraph_node_name (node->clone_of),
2414 cgraph_node_name (node));
2415 if (node->clone.tree_map)
2416 {
2417 unsigned int i;
2418 fprintf (cgraph_dump_file, " replace map: ");
2419 for (i = 0; i < VEC_length (ipa_replace_map_p,
2420 node->clone.tree_map);
2421 i++)
2422 {
2423 struct ipa_replace_map *replace_info;
2424 replace_info = VEC_index (ipa_replace_map_p,
2425 node->clone.tree_map,
2426 i);
2427 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2428 fprintf (cgraph_dump_file, " -> ");
2429 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2430 fprintf (cgraph_dump_file, "%s%s;",
2431 replace_info->replace_p ? "(replace)":"",
2432 replace_info->ref_p ? "(ref)":"");
2433 }
2434 fprintf (cgraph_dump_file, "\n");
2435 }
2436 if (node->clone.args_to_skip)
2437 {
2438 fprintf (cgraph_dump_file, " args_to_skip: ");
2439 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2440 }
2441 if (node->clone.args_to_skip)
2442 {
2443 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2444 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2445 }
2446 }
ccf4ab6b 2447 cgraph_materialize_clone (node);
a510bd8d 2448 stabilized = false;
ccf4ab6b 2449 }
ccf4ab6b 2450 }
2451 }
2452 }
ee3f5fc0 2453 for (node = cgraph_nodes; node; node = node->next)
2454 if (!node->analyzed && node->callees)
2455 cgraph_node_remove_callees (node);
c596d830 2456 if (cgraph_dump_file)
2457 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
947781ac 2458#ifdef ENABLE_CHECKING
2459 verify_cgraph ();
2460#endif
ccf4ab6b 2461 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2462}
2463
a861fe52 2464#include "gt-cgraphunit.h"