]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
Fix ChangeLog to mention last commit to transfer.c
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
cd6bca02 1/* Callgraph based interprocedural optimizations.
028a99ef 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
e3a37aef 3 Free Software Foundation, Inc.
ae01b312 4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
ae01b312 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
ae01b312 21
b0cdf642 22/* This module implements main driver of compilation process as well as
cd6bca02 23 few basic interprocedural optimizers.
b0cdf642 24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
b326746d 35 (There is one exception needed for implementing GCC extern inline
36 function.)
b0cdf642 37
1d416bd7 38 - varpool_finalize_variable
b0cdf642 39
7bd28bba 40 This function has same behavior as the above but is used for static
b0cdf642 41 variables.
42
43 - cgraph_finalize_compilation_unit
44
b326746d 45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
b0cdf642 47
6329636b 48 In the the call-graph construction and local function
b0cdf642 49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
b326746d 52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
b0cdf642 54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
b0cdf642 62 - cgraph_mark_needed_node
1d416bd7 63 - varpool_mark_needed_node
b0cdf642 64
b326746d 65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
ccd2f3d1 69 used by C++ frontend to explicitly mark the keyed methods.
b0cdf642 70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
6329636b 80 Analyzing of all functions is deferred
b0cdf642 81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
ca67a72b 91 The intra-procedural information is produced and its existence
b0cdf642 92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
6329636b 106 where reference has been optimized out. */
121f3051 107
acc70efa 108
ae01b312 109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
b5530559 114#include "rtl.h"
acc70efa 115#include "tree-flow.h"
ae01b312 116#include "tree-inline.h"
117#include "langhooks.h"
c6224531 118#include "pointer-set.h"
ae01b312 119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
80a85d8a 125#include "diagnostic.h"
ce084dfc 126#include "tree-pretty-print.h"
127#include "gimple-pretty-print.h"
f79b6507 128#include "timevar.h"
d7c6d889 129#include "params.h"
130#include "fibheap.h"
611e5405 131#include "intl.h"
b69eb0ff 132#include "function.h"
b5d36404 133#include "ipa-prop.h"
75a70cf9 134#include "gimple.h"
135#include "tree-iterator.h"
f1e2a033 136#include "tree-pass.h"
bfec3452 137#include "tree-dump.h"
c1dcd13c 138#include "output.h"
9ed5b1f5 139#include "coverage.h"
c9036234 140#include "plugin.h"
d7c6d889 141
a6868229 142static void cgraph_expand_all_functions (void);
d9d9733a 143static void cgraph_mark_functions_to_output (void);
144static void cgraph_expand_function (struct cgraph_node *);
f788fff2 145static void cgraph_output_pending_asms (void);
bfec3452 146static void cgraph_analyze_function (struct cgraph_node *);
25bb88de 147
ecb08119 148FILE *cgraph_dump_file;
121f3051 149
2c56f72e 150/* A vector of FUNCTION_DECLs declared as static constructors. */
151static GTY (()) VEC(tree, gc) *static_ctors;
152/* A vector of FUNCTION_DECLs declared as static destructors. */
153static GTY (()) VEC(tree, gc) *static_dtors;
a861fe52 154
28454517 155/* Used for vtable lookup in thunk adjusting. */
156static GTY (()) tree vtable_entry_type;
157
a861fe52 158/* When target does not have ctors and dtors, we call all constructor
310d2511 159 and destructor by special initialization/destruction function
48e1416a 160 recognized by collect2.
161
a861fe52 162 When we are going to build this function, collect all constructors and
163 destructors and turn them into normal functions. */
164
165static void
166record_cdtor_fn (tree fndecl)
167{
2de29097 168 struct cgraph_node *node;
169 if (targetm.have_ctors_dtors
170 || (!DECL_STATIC_CONSTRUCTOR (fndecl)
171 && !DECL_STATIC_DESTRUCTOR (fndecl)))
a861fe52 172 return;
173
174 if (DECL_STATIC_CONSTRUCTOR (fndecl))
175 {
2c56f72e 176 VEC_safe_push (tree, gc, static_ctors, fndecl);
a861fe52 177 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
a861fe52 178 }
179 if (DECL_STATIC_DESTRUCTOR (fndecl))
180 {
2c56f72e 181 VEC_safe_push (tree, gc, static_dtors, fndecl);
a861fe52 182 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
a861fe52 183 }
2de29097 184 node = cgraph_node (fndecl);
185 node->local.disregard_inline_limits = 1;
186 cgraph_mark_reachable_node (node);
a861fe52 187}
188
2c56f72e 189/* Define global constructors/destructor functions for the CDTORS, of
190 which they are LEN. The CDTORS are sorted by initialization
191 priority. If CTOR_P is true, these are constructors; otherwise,
192 they are destructors. */
193
a861fe52 194static void
2c56f72e 195build_cdtor (bool ctor_p, tree *cdtors, size_t len)
a861fe52 196{
2c56f72e 197 size_t i;
a861fe52 198
2c56f72e 199 i = 0;
200 while (i < len)
201 {
202 tree body;
203 tree fn;
204 priority_type priority;
205
206 priority = 0;
207 body = NULL_TREE;
208 /* Find the next batch of constructors/destructors with the same
209 initialization priority. */
210 do
211 {
212 priority_type p;
414c3a2c 213 tree call;
2c56f72e 214 fn = cdtors[i];
215 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
216 if (!body)
217 priority = p;
218 else if (p != priority)
219 break;
414c3a2c 220 call = build_call_expr (fn, 0);
221 append_to_statement_list (call, &body);
2c56f72e 222 ++i;
223 }
224 while (i < len);
225 gcc_assert (body != NULL_TREE);
226 /* Generate a function to call all the function of like
227 priority. */
228 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
229 }
230}
231
232/* Comparison function for qsort. P1 and P2 are actually of type
233 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
234 used to determine the sort order. */
a861fe52 235
2c56f72e 236static int
237compare_ctor (const void *p1, const void *p2)
238{
239 tree f1;
240 tree f2;
241 int priority1;
242 int priority2;
243
244 f1 = *(const tree *)p1;
245 f2 = *(const tree *)p2;
246 priority1 = DECL_INIT_PRIORITY (f1);
247 priority2 = DECL_INIT_PRIORITY (f2);
48e1416a 248
2c56f72e 249 if (priority1 < priority2)
250 return -1;
251 else if (priority1 > priority2)
252 return 1;
253 else
254 /* Ensure a stable sort. */
255 return (const tree *)p1 - (const tree *)p2;
256}
257
258/* Comparison function for qsort. P1 and P2 are actually of type
259 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
260 used to determine the sort order. */
a861fe52 261
2c56f72e 262static int
263compare_dtor (const void *p1, const void *p2)
264{
265 tree f1;
266 tree f2;
267 int priority1;
268 int priority2;
269
270 f1 = *(const tree *)p1;
271 f2 = *(const tree *)p2;
272 priority1 = DECL_FINI_PRIORITY (f1);
273 priority2 = DECL_FINI_PRIORITY (f2);
48e1416a 274
2c56f72e 275 if (priority1 < priority2)
276 return -1;
277 else if (priority1 > priority2)
278 return 1;
279 else
280 /* Ensure a stable sort. */
281 return (const tree *)p1 - (const tree *)p2;
a861fe52 282}
283
284/* Generate functions to call static constructors and destructors
285 for targets that do not support .ctors/.dtors sections. These
286 functions have magic names which are detected by collect2. */
287
288static void
289cgraph_build_cdtor_fns (void)
290{
2c56f72e 291 if (!VEC_empty (tree, static_ctors))
a861fe52 292 {
2c56f72e 293 gcc_assert (!targetm.have_ctors_dtors);
294 qsort (VEC_address (tree, static_ctors),
48e1416a 295 VEC_length (tree, static_ctors),
2c56f72e 296 sizeof (tree),
297 compare_ctor);
298 build_cdtor (/*ctor_p=*/true,
299 VEC_address (tree, static_ctors),
48e1416a 300 VEC_length (tree, static_ctors));
2c56f72e 301 VEC_truncate (tree, static_ctors, 0);
a861fe52 302 }
2c56f72e 303
304 if (!VEC_empty (tree, static_dtors))
a861fe52 305 {
2c56f72e 306 gcc_assert (!targetm.have_ctors_dtors);
307 qsort (VEC_address (tree, static_dtors),
48e1416a 308 VEC_length (tree, static_dtors),
2c56f72e 309 sizeof (tree),
310 compare_dtor);
311 build_cdtor (/*ctor_p=*/false,
312 VEC_address (tree, static_dtors),
48e1416a 313 VEC_length (tree, static_dtors));
2c56f72e 314 VEC_truncate (tree, static_dtors, 0);
a861fe52 315 }
316}
317
2c0b522d 318/* Determine if function DECL is needed. That is, visible to something
319 either outside this translation unit, something magic in the system
6329636b 320 configury. */
2c0b522d 321
7bfefa9d 322bool
323cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
2c0b522d 324{
3f82b628 325 /* If the user told us it is used, then it must be so. */
05806473 326 if (node->local.externally_visible)
327 return true;
328
3f82b628 329 /* ??? If the assembler name is set by hand, it is possible to assemble
330 the name later after finalizing the function and the fact is noticed
331 in assemble_name then. This is arguably a bug. */
332 if (DECL_ASSEMBLER_NAME_SET_P (decl)
333 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
334 return true;
335
55680bef 336 /* With -fkeep-inline-functions we are keeping all inline functions except
337 for extern inline ones. */
338 if (flag_keep_inline_functions
339 && DECL_DECLARED_INLINE_P (decl)
316ef6d8 340 && !DECL_EXTERNAL (decl)
341 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
55680bef 342 return true;
343
2c0b522d 344 /* If we decided it was needed before, but at the time we didn't have
345 the body of the function available, then it's still needed. We have
346 to go back and re-check its dependencies now. */
347 if (node->needed)
348 return true;
349
350 /* Externally visible functions must be output. The exception is
a0c938f0 351 COMDAT functions that must be output only when they are needed.
8baa9d15 352
353 When not optimizing, also output the static functions. (see
95da6220 354 PR24561), but don't do so for always_inline functions, functions
0f9238c0 355 declared inline and nested functions. These were optimized out
d3d410e1 356 in the original implementation and it is unclear whether we want
554f2707 357 to change the behavior here. */
bba7ddf8 358 if (((TREE_PUBLIC (decl)
0f9238c0 359 || (!optimize
360 && !node->local.disregard_inline_limits
d3d410e1 361 && !DECL_DECLARED_INLINE_P (decl)
0f9238c0 362 && !(DECL_CONTEXT (decl)
363 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
59dd4830 364 && !flag_whole_program
365 && !flag_lto
366 && !flag_whopr)
62eec3b4 367 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
2c0b522d 368 return true;
369
370 /* Constructors and destructors are reachable from the runtime by
371 some mechanism. */
372 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
373 return true;
374
2c0b522d 375 return false;
376}
377
bdc40eb8 378/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
523c1122 379 functions into callgraph in a way so they look like ordinary reachable
380 functions inserted into callgraph already at construction time. */
381
382bool
383cgraph_process_new_functions (void)
384{
385 bool output = false;
386 tree fndecl;
387 struct cgraph_node *node;
388
0cddb138 389 varpool_analyze_pending_decls ();
523c1122 390 /* Note that this queue may grow as its being processed, as the new
391 functions may generate new ones. */
392 while (cgraph_new_nodes)
393 {
394 node = cgraph_new_nodes;
395 fndecl = node->decl;
396 cgraph_new_nodes = cgraph_new_nodes->next_needed;
397 switch (cgraph_state)
398 {
399 case CGRAPH_STATE_CONSTRUCTION:
400 /* At construction time we just need to finalize function and move
401 it into reachable functions list. */
402
403 node->next_needed = NULL;
404 cgraph_finalize_function (fndecl, false);
405 cgraph_mark_reachable_node (node);
406 output = true;
407 break;
408
409 case CGRAPH_STATE_IPA:
f517b36e 410 case CGRAPH_STATE_IPA_SSA:
523c1122 411 /* When IPA optimization already started, do all essential
412 transformations that has been already performed on the whole
413 cgraph but not on this function. */
414
75a70cf9 415 gimple_register_cfg_hooks ();
523c1122 416 if (!node->analyzed)
417 cgraph_analyze_function (node);
418 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
419 current_function_decl = fndecl;
9c1bff7a 420 compute_inline_parameters (node);
f517b36e 421 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
422 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
423 /* When not optimizing, be sure we run early local passes anyway
424 to expand OMP. */
425 || !optimize)
20099e35 426 execute_pass_list (pass_early_local_passes.pass.sub);
523c1122 427 free_dominance_info (CDI_POST_DOMINATORS);
428 free_dominance_info (CDI_DOMINATORS);
429 pop_cfun ();
430 current_function_decl = NULL;
431 break;
432
433 case CGRAPH_STATE_EXPANSION:
434 /* Functions created during expansion shall be compiled
435 directly. */
09fc9532 436 node->process = 0;
523c1122 437 cgraph_expand_function (node);
438 break;
439
440 default:
441 gcc_unreachable ();
442 break;
443 }
50828ed8 444 cgraph_call_function_insertion_hooks (node);
0cddb138 445 varpool_analyze_pending_decls ();
523c1122 446 }
447 return output;
448}
449
9b8fb23a 450/* As an GCC extension we allow redefinition of the function. The
451 semantics when both copies of bodies differ is not well defined.
452 We replace the old body with new body so in unit at a time mode
453 we always use new body, while in normal mode we may end up with
454 old body inlined into some functions and new body expanded and
455 inlined in others.
456
457 ??? It may make more sense to use one body for inlining and other
458 body for expanding the function but this is difficult to do. */
459
460static void
461cgraph_reset_node (struct cgraph_node *node)
462{
09fc9532 463 /* If node->process is set, then we have already begun whole-unit analysis.
6329636b 464 This is *not* testing for whether we've already emitted the function.
465 That case can be sort-of legitimately seen with real function redefinition
466 errors. I would argue that the front end should never present us with
467 such a case, but don't enforce that for now. */
09fc9532 468 gcc_assert (!node->process);
9b8fb23a 469
470 /* Reset our data structures so we can analyze the function again. */
471 memset (&node->local, 0, sizeof (node->local));
472 memset (&node->global, 0, sizeof (node->global));
473 memset (&node->rtl, 0, sizeof (node->rtl));
474 node->analyzed = false;
475 node->local.redefined_extern_inline = true;
476 node->local.finalized = false;
477
9b8fb23a 478 cgraph_node_remove_callees (node);
479
480 /* We may need to re-queue the node for assembling in case
46beef9a 481 we already proceeded it and ignored as not needed or got
482 a re-declaration in IMA mode. */
483 if (node->reachable)
9b8fb23a 484 {
485 struct cgraph_node *n;
486
487 for (n = cgraph_nodes_queue; n; n = n->next_needed)
488 if (n == node)
489 break;
490 if (!n)
491 node->reachable = 0;
492 }
493}
c08871a9 494
1e8e9920 495static void
496cgraph_lower_function (struct cgraph_node *node)
497{
498 if (node->lowered)
499 return;
bfec3452 500
501 if (node->nested)
502 lower_nested_functions (node->decl);
503 gcc_assert (!node->nested);
504
1e8e9920 505 tree_lowering_passes (node->decl);
506 node->lowered = true;
507}
508
28df663b 509/* DECL has been parsed. Take it, queue it, compile it at the whim of the
510 logic in effect. If NESTED is true, then our caller cannot stand to have
511 the garbage collector run at the moment. We would need to either create
512 a new GC context, or just not compile right now. */
ae01b312 513
514void
28df663b 515cgraph_finalize_function (tree decl, bool nested)
ae01b312 516{
517 struct cgraph_node *node = cgraph_node (decl);
518
c08871a9 519 if (node->local.finalized)
9b8fb23a 520 cgraph_reset_node (node);
28df663b 521
167b550b 522 node->pid = cgraph_max_pid ++;
c08871a9 523 notice_global_symbol (decl);
79bb87b4 524 node->local.finalized = true;
e27482aa 525 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
22c748a1 526 node->finalized_by_frontend = true;
a861fe52 527 record_cdtor_fn (node->decl);
ae01b312 528
7bfefa9d 529 if (cgraph_decide_is_function_needed (node, decl))
2c0b522d 530 cgraph_mark_needed_node (node);
531
ecda6e51 532 /* Since we reclaim unreachable nodes at the end of every language
3f82b628 533 level unit, we need to be conservative about possible entry points
534 there. */
62eec3b4 535 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
3f82b628 536 cgraph_mark_reachable_node (node);
537
2c0b522d 538 /* If we've not yet emitted decl, tell the debug info about it. */
28df663b 539 if (!TREE_ASM_WRITTEN (decl))
2c0b522d 540 (*debug_hooks->deferred_inline_function) (decl);
4e8871a0 541
b69eb0ff 542 /* Possibly warn about unused parameters. */
543 if (warn_unused_parameter)
544 do_warn_unused_parameter (decl);
6329636b 545
546 if (!nested)
547 ggc_collect ();
ae01b312 548}
549
0da03d11 550/* C99 extern inline keywords allow changing of declaration after function
551 has been finalized. We need to re-decide if we want to mark the function as
552 needed then. */
553
554void
555cgraph_mark_if_needed (tree decl)
556{
557 struct cgraph_node *node = cgraph_node (decl);
7bfefa9d 558 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
0da03d11 559 cgraph_mark_needed_node (node);
560}
561
f2d608d8 562#ifdef ENABLE_CHECKING
ccf4ab6b 563/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
564static bool
565clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
566{
567 while (node != node2 && node2)
568 node2 = node2->clone_of;
569 return node2 != NULL;
570}
f2d608d8 571#endif
ccf4ab6b 572
b0cdf642 573/* Verify cgraph nodes of given cgraph node. */
4b987fac 574DEBUG_FUNCTION void
b0cdf642 575verify_cgraph_node (struct cgraph_node *node)
576{
577 struct cgraph_edge *e;
e27482aa 578 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
117ef3d7 579 struct function *saved_cfun = cfun;
e27482aa 580 basic_block this_block;
75a70cf9 581 gimple_stmt_iterator gsi;
9bfec7c2 582 bool error_found = false;
b0cdf642 583
852f689e 584 if (seen_error ())
bd09cd3e 585 return;
586
b0cdf642 587 timevar_push (TV_CGRAPH_VERIFY);
117ef3d7 588 /* debug_generic_stmt needs correct cfun */
589 set_cfun (this_cfun);
b0cdf642 590 for (e = node->callees; e; e = e->next_callee)
591 if (e->aux)
592 {
0a81f5a0 593 error ("aux field set for edge %s->%s",
abd3e6b5 594 identifier_to_locale (cgraph_node_name (e->caller)),
595 identifier_to_locale (cgraph_node_name (e->callee)));
b0cdf642 596 error_found = true;
597 }
a2cb9b3b 598 if (node->count < 0)
599 {
600 error ("Execution count is negative");
601 error_found = true;
602 }
59dd4830 603 if (node->global.inlined_to && node->local.externally_visible)
604 {
605 error ("Externally visible inline clone");
606 error_found = true;
607 }
608 if (node->global.inlined_to && node->address_taken)
609 {
610 error ("Inline clone with address taken");
611 error_found = true;
612 }
613 if (node->global.inlined_to && node->needed)
614 {
615 error ("Inline clone is needed");
616 error_found = true;
617 }
799c8711 618 for (e = node->indirect_calls; e; e = e->next_callee)
619 {
620 if (e->aux)
621 {
622 error ("aux field set for indirect edge from %s",
623 identifier_to_locale (cgraph_node_name (e->caller)));
624 error_found = true;
625 }
626 if (!e->indirect_unknown_callee
627 || !e->indirect_info)
628 {
629 error ("An indirect edge from %s is not marked as indirect or has "
630 "associated indirect_info, the corresponding statement is: ",
631 identifier_to_locale (cgraph_node_name (e->caller)));
632 debug_gimple_stmt (e->call_stmt);
633 error_found = true;
634 }
635 }
b0cdf642 636 for (e = node->callers; e; e = e->next_caller)
637 {
a2cb9b3b 638 if (e->count < 0)
639 {
640 error ("caller edge count is negative");
641 error_found = true;
642 }
4ae20857 643 if (e->frequency < 0)
644 {
645 error ("caller edge frequency is negative");
646 error_found = true;
647 }
648 if (e->frequency > CGRAPH_FREQ_MAX)
649 {
650 error ("caller edge frequency is too large");
651 error_found = true;
652 }
1c094d2f 653 if (gimple_has_body_p (e->caller->decl)
654 && !e->caller->global.inlined_to
655 && (e->frequency
656 != compute_call_stmt_bb_frequency (e->caller->decl,
657 gimple_bb (e->call_stmt))))
658 {
659 error ("caller edge frequency %i does not match BB freqency %i",
660 e->frequency,
661 compute_call_stmt_bb_frequency (e->caller->decl,
662 gimple_bb (e->call_stmt)));
663 error_found = true;
664 }
b0cdf642 665 if (!e->inline_failed)
666 {
667 if (node->global.inlined_to
668 != (e->caller->global.inlined_to
669 ? e->caller->global.inlined_to : e->caller))
670 {
0a81f5a0 671 error ("inlined_to pointer is wrong");
b0cdf642 672 error_found = true;
673 }
674 if (node->callers->next_caller)
675 {
0a81f5a0 676 error ("multiple inline callers");
b0cdf642 677 error_found = true;
678 }
679 }
680 else
681 if (node->global.inlined_to)
682 {
0a81f5a0 683 error ("inlined_to pointer set for noninline callers");
b0cdf642 684 error_found = true;
685 }
686 }
687 if (!node->callers && node->global.inlined_to)
688 {
5cd75817 689 error ("inlined_to pointer is set but no predecessors found");
b0cdf642 690 error_found = true;
691 }
692 if (node->global.inlined_to == node)
693 {
0a81f5a0 694 error ("inlined_to pointer refers to itself");
b0cdf642 695 error_found = true;
696 }
697
0f6439b9 698 if (!cgraph_node (node->decl))
b0cdf642 699 {
0f6439b9 700 error ("node not found in cgraph_hash");
b0cdf642 701 error_found = true;
702 }
a0c938f0 703
ccf4ab6b 704 if (node->clone_of)
705 {
706 struct cgraph_node *n;
707 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
708 if (n == node)
709 break;
710 if (!n)
711 {
712 error ("node has wrong clone_of");
713 error_found = true;
714 }
715 }
716 if (node->clones)
717 {
718 struct cgraph_node *n;
719 for (n = node->clones; n; n = n->next_sibling_clone)
720 if (n->clone_of != node)
721 break;
722 if (n)
723 {
724 error ("node has wrong clone list");
725 error_found = true;
726 }
727 }
728 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
729 {
730 error ("node is in clone list but it is not clone");
731 error_found = true;
732 }
733 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
734 {
735 error ("node has wrong prev_clone pointer");
736 error_found = true;
737 }
738 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
739 {
740 error ("double linked list of clones corrupted");
741 error_found = true;
742 }
c524ac5d 743 if (node->same_comdat_group)
744 {
745 struct cgraph_node *n = node->same_comdat_group;
746
747 if (!DECL_ONE_ONLY (node->decl))
748 {
749 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
750 error_found = true;
751 }
752 if (n == node)
753 {
754 error ("node is alone in a comdat group");
755 error_found = true;
756 }
757 do
758 {
759 if (!n->same_comdat_group)
760 {
761 error ("same_comdat_group is not a circular list");
762 error_found = true;
763 break;
764 }
765 n = n->same_comdat_group;
766 }
767 while (n != node);
768 }
ccf4ab6b 769
770 if (node->analyzed && gimple_has_body_p (node->decl)
75a70cf9 771 && !TREE_ASM_WRITTEN (node->decl)
7bfefa9d 772 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
773 && !flag_wpa)
b0cdf642 774 {
e27482aa 775 if (this_cfun->cfg)
776 {
777 /* The nodes we're interested in are never shared, so walk
778 the tree ignoring duplicates. */
e7c352d1 779 struct pointer_set_t *visited_nodes = pointer_set_create ();
e27482aa 780 /* Reach the trees by walking over the CFG, and note the
781 enclosing basic-blocks in the call edges. */
782 FOR_EACH_BB_FN (this_block, this_cfun)
75a70cf9 783 for (gsi = gsi_start_bb (this_block);
784 !gsi_end_p (gsi);
785 gsi_next (&gsi))
9bfec7c2 786 {
75a70cf9 787 gimple stmt = gsi_stmt (gsi);
799c8711 788 if (is_gimple_call (stmt))
9bfec7c2 789 {
790 struct cgraph_edge *e = cgraph_edge (node, stmt);
799c8711 791 tree decl = gimple_call_fndecl (stmt);
9bfec7c2 792 if (e)
793 {
794 if (e->aux)
795 {
0a81f5a0 796 error ("shared call_stmt:");
75a70cf9 797 debug_gimple_stmt (stmt);
9bfec7c2 798 error_found = true;
799 }
799c8711 800 if (!e->indirect_unknown_callee)
28454517 801 {
799c8711 802 if (e->callee->same_body_alias)
803 {
804 error ("edge points to same body alias:");
805 debug_tree (e->callee->decl);
806 error_found = true;
807 }
e748b31d 808#ifdef ENABLE_CHECKING
809 else if (!e->callee->global.inlined_to
799c8711 810 && decl
e748b31d 811 && cgraph_get_node (decl)
812 && (e->callee->former_clone_of
813 != cgraph_get_node (decl)->decl)
799c8711 814 && !clone_of_p (cgraph_node (decl),
815 e->callee))
816 {
817 error ("edge points to wrong declaration:");
818 debug_tree (e->callee->decl);
819 fprintf (stderr," Instead of:");
820 debug_tree (decl);
821 error_found = true;
822 }
e748b31d 823#endif
28454517 824 }
799c8711 825 else if (decl)
9bfec7c2 826 {
799c8711 827 error ("an indirect edge with unknown callee "
828 "corresponding to a call_stmt with "
829 "a known declaration:");
ee3f5fc0 830 error_found = true;
799c8711 831 debug_gimple_stmt (e->call_stmt);
9bfec7c2 832 }
833 e->aux = (void *)1;
834 }
799c8711 835 else if (decl)
9bfec7c2 836 {
0a81f5a0 837 error ("missing callgraph edge for call stmt:");
75a70cf9 838 debug_gimple_stmt (stmt);
9bfec7c2 839 error_found = true;
840 }
841 }
842 }
e27482aa 843 pointer_set_destroy (visited_nodes);
e27482aa 844 }
845 else
846 /* No CFG available?! */
847 gcc_unreachable ();
848
b0cdf642 849 for (e = node->callees; e; e = e->next_callee)
850 {
799c8711 851 if (!e->aux)
b0cdf642 852 {
0a81f5a0 853 error ("edge %s->%s has no corresponding call_stmt",
abd3e6b5 854 identifier_to_locale (cgraph_node_name (e->caller)),
855 identifier_to_locale (cgraph_node_name (e->callee)));
75a70cf9 856 debug_gimple_stmt (e->call_stmt);
b0cdf642 857 error_found = true;
858 }
859 e->aux = 0;
860 }
799c8711 861 for (e = node->indirect_calls; e; e = e->next_callee)
862 {
863 if (!e->aux)
864 {
865 error ("an indirect edge from %s has no corresponding call_stmt",
866 identifier_to_locale (cgraph_node_name (e->caller)));
867 debug_gimple_stmt (e->call_stmt);
868 error_found = true;
869 }
870 e->aux = 0;
871 }
b0cdf642 872 }
873 if (error_found)
874 {
875 dump_cgraph_node (stderr, node);
0a81f5a0 876 internal_error ("verify_cgraph_node failed");
b0cdf642 877 }
117ef3d7 878 set_cfun (saved_cfun);
b0cdf642 879 timevar_pop (TV_CGRAPH_VERIFY);
880}
881
882/* Verify whole cgraph structure. */
4b987fac 883DEBUG_FUNCTION void
b0cdf642 884verify_cgraph (void)
885{
886 struct cgraph_node *node;
887
852f689e 888 if (seen_error ())
8ec2a798 889 return;
890
b0cdf642 891 for (node = cgraph_nodes; node; node = node->next)
892 verify_cgraph_node (node);
893}
894
56af936e 895/* Output all asm statements we have stored up to be output. */
896
897static void
898cgraph_output_pending_asms (void)
899{
900 struct cgraph_asm_node *can;
901
852f689e 902 if (seen_error ())
56af936e 903 return;
904
905 for (can = cgraph_asm_nodes; can; can = can->next)
906 assemble_asm (can->asm_str);
907 cgraph_asm_nodes = NULL;
908}
909
0785e435 910/* Analyze the function scheduled to be output. */
bfec3452 911static void
0785e435 912cgraph_analyze_function (struct cgraph_node *node)
913{
bfec3452 914 tree save = current_function_decl;
0785e435 915 tree decl = node->decl;
916
ec1e35b2 917 current_function_decl = decl;
e27482aa 918 push_cfun (DECL_STRUCT_FUNCTION (decl));
bfec3452 919
6816d0c4 920 assign_assembler_name_if_neeeded (node->decl);
921
bfec3452 922 /* Make sure to gimplify bodies only once. During analyzing a
923 function we lower it, which will require gimplified nested
924 functions, so we can end up here with an already gimplified
925 body. */
926 if (!gimple_body (decl))
927 gimplify_function_tree (decl);
928 dump_function (TDI_generic, decl);
929
e27482aa 930 cgraph_lower_function (node);
6e8d6e86 931 node->analyzed = true;
0785e435 932
e27482aa 933 pop_cfun ();
bfec3452 934 current_function_decl = save;
0785e435 935}
936
05806473 937/* Look for externally_visible and used attributes and mark cgraph nodes
938 accordingly.
939
940 We cannot mark the nodes at the point the attributes are processed (in
941 handle_*_attribute) because the copy of the declarations available at that
942 point may not be canonical. For example, in:
943
944 void f();
945 void f() __attribute__((used));
946
947 the declaration we see in handle_used_attribute will be the second
948 declaration -- but the front end will subsequently merge that declaration
949 with the original declaration and discard the second declaration.
950
951 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
952
953 void f() {}
954 void f() __attribute__((externally_visible));
955
956 is valid.
957
958 So, we walk the nodes at the end of the translation unit, applying the
959 attributes at that point. */
960
961static void
962process_function_and_variable_attributes (struct cgraph_node *first,
1d416bd7 963 struct varpool_node *first_var)
05806473 964{
965 struct cgraph_node *node;
1d416bd7 966 struct varpool_node *vnode;
05806473 967
968 for (node = cgraph_nodes; node != first; node = node->next)
969 {
970 tree decl = node->decl;
83a23b05 971 if (DECL_PRESERVE_P (decl))
0b49f8f8 972 cgraph_mark_needed_node (node);
05806473 973 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
974 {
ba12ea31 975 if (! TREE_PUBLIC (node->decl))
712d2297 976 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
977 "%<externally_visible%>"
978 " attribute have effect only on public objects");
59dd4830 979 else if (node->local.finalized)
980 cgraph_mark_needed_node (node);
05806473 981 }
982 }
1d416bd7 983 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
05806473 984 {
985 tree decl = vnode->decl;
83a23b05 986 if (DECL_PRESERVE_P (decl))
05806473 987 {
22671757 988 vnode->force_output = true;
05806473 989 if (vnode->finalized)
1d416bd7 990 varpool_mark_needed_node (vnode);
05806473 991 }
992 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
993 {
ba12ea31 994 if (! TREE_PUBLIC (vnode->decl))
712d2297 995 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
996 "%<externally_visible%>"
997 " attribute have effect only on public objects");
59dd4830 998 else if (vnode->finalized)
999 varpool_mark_needed_node (vnode);
05806473 1000 }
1001 }
1002}
1003
aeeb194b 1004/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
1005 each reachable functions) and build cgraph.
1006 The function can be called multiple times after inserting new nodes
0d424440 1007 into beginning of queue. Just the new part of queue is re-scanned then. */
ae01b312 1008
aeeb194b 1009static void
1010cgraph_analyze_functions (void)
ae01b312 1011{
c1dcd13c 1012 /* Keep track of already processed nodes when called multiple times for
06b27565 1013 intermodule optimization. */
c1dcd13c 1014 static struct cgraph_node *first_analyzed;
c17d0de1 1015 struct cgraph_node *first_processed = first_analyzed;
1d416bd7 1016 static struct varpool_node *first_analyzed_var;
aeeb194b 1017 struct cgraph_node *node, *next;
ae01b312 1018
c17d0de1 1019 process_function_and_variable_attributes (first_processed,
1020 first_analyzed_var);
1021 first_processed = cgraph_nodes;
1d416bd7 1022 first_analyzed_var = varpool_nodes;
1023 varpool_analyze_pending_decls ();
f79b6507 1024 if (cgraph_dump_file)
ae01b312 1025 {
e4200070 1026 fprintf (cgraph_dump_file, "Initial entry points:");
c1dcd13c 1027 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 1028 if (node->needed)
f79b6507 1029 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1030 fprintf (cgraph_dump_file, "\n");
ae01b312 1031 }
aeeb194b 1032 cgraph_process_new_functions ();
ae01b312 1033
e6d2b2d8 1034 /* Propagate reachability flag and lower representation of all reachable
1035 functions. In the future, lowering will introduce new functions and
1036 new entry points on the way (by template instantiation and virtual
1037 method table generation for instance). */
3d7bfc56 1038 while (cgraph_nodes_queue)
ae01b312 1039 {
0785e435 1040 struct cgraph_edge *edge;
3d7bfc56 1041 tree decl = cgraph_nodes_queue->decl;
1042
1043 node = cgraph_nodes_queue;
d87976fb 1044 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 1045 node->next_needed = NULL;
ae01b312 1046
638531ad 1047 /* ??? It is possible to create extern inline function and later using
bbd5cba2 1048 weak alias attribute to kill its body. See
638531ad 1049 gcc.c-torture/compile/20011119-1.c */
75a70cf9 1050 if (!DECL_STRUCT_FUNCTION (decl))
9b8fb23a 1051 {
1052 cgraph_reset_node (node);
1053 continue;
1054 }
638531ad 1055
7bfefa9d 1056 if (!node->analyzed)
1057 cgraph_analyze_function (node);
2c0b522d 1058
ae01b312 1059 for (edge = node->callees; edge; edge = edge->next_callee)
0785e435 1060 if (!edge->callee->reachable)
2c0b522d 1061 cgraph_mark_reachable_node (edge->callee);
1062
61c2c7b1 1063 if (node->same_comdat_group)
1064 {
1065 for (next = node->same_comdat_group;
1066 next != node;
1067 next = next->same_comdat_group)
1068 cgraph_mark_reachable_node (next);
1069 }
1070
d544ceff 1071 /* If decl is a clone of an abstract function, mark that abstract
1072 function so that we don't release its body. The DECL_INITIAL() of that
1073 abstract function declaration will be later needed to output debug info. */
1074 if (DECL_ABSTRACT_ORIGIN (decl))
1075 {
1076 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
1077 origin_node->abstract_and_needed = true;
1078 }
1079
c17d0de1 1080 /* We finalize local static variables during constructing callgraph
1081 edges. Process their attributes too. */
1082 process_function_and_variable_attributes (first_processed,
1083 first_analyzed_var);
1084 first_processed = cgraph_nodes;
1d416bd7 1085 first_analyzed_var = varpool_nodes;
1086 varpool_analyze_pending_decls ();
aeeb194b 1087 cgraph_process_new_functions ();
ae01b312 1088 }
2c0b522d 1089
aa5e06c7 1090 /* Collect entry points to the unit. */
f79b6507 1091 if (cgraph_dump_file)
3d7bfc56 1092 {
e4200070 1093 fprintf (cgraph_dump_file, "Unit entry points:");
c1dcd13c 1094 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 1095 if (node->needed)
f79b6507 1096 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
e4200070 1097 fprintf (cgraph_dump_file, "\n\nInitial ");
0785e435 1098 dump_cgraph (cgraph_dump_file);
3d7bfc56 1099 }
e6d2b2d8 1100
f79b6507 1101 if (cgraph_dump_file)
1102 fprintf (cgraph_dump_file, "\nReclaiming functions:");
ae01b312 1103
f4ec5ce1 1104 for (node = cgraph_nodes; node != first_analyzed; node = next)
ae01b312 1105 {
1106 tree decl = node->decl;
f4ec5ce1 1107 next = node->next;
ae01b312 1108
1a1a827a 1109 if (node->local.finalized && !gimple_has_body_p (decl))
a0c938f0 1110 cgraph_reset_node (node);
9b8fb23a 1111
1a1a827a 1112 if (!node->reachable && gimple_has_body_p (decl))
ae01b312 1113 {
f79b6507 1114 if (cgraph_dump_file)
1115 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
b0cdf642 1116 cgraph_remove_node (node);
9b8fb23a 1117 continue;
ae01b312 1118 }
bc5cab3b 1119 else
1120 node->next_needed = NULL;
1a1a827a 1121 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
9b8fb23a 1122 gcc_assert (node->analyzed == node->local.finalized);
ae01b312 1123 }
f79b6507 1124 if (cgraph_dump_file)
e4200070 1125 {
1126 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1127 dump_cgraph (cgraph_dump_file);
1128 }
c1dcd13c 1129 first_analyzed = cgraph_nodes;
ae01b312 1130 ggc_collect ();
aeeb194b 1131}
1132
8f69fd82 1133
aeeb194b 1134/* Analyze the whole compilation unit once it is parsed completely. */
1135
1136void
1137cgraph_finalize_compilation_unit (void)
1138{
9929334e 1139 timevar_push (TV_CGRAPH);
1140
bfec3452 1141 /* Do not skip analyzing the functions if there were errors, we
1142 miss diagnostics for following functions otherwise. */
aeeb194b 1143
8f69fd82 1144 /* Emit size functions we didn't inline. */
4189e677 1145 finalize_size_functions ();
8f69fd82 1146
8f69fd82 1147 /* Call functions declared with the "constructor" or "destructor"
1148 attribute. */
1149 cgraph_build_cdtor_fns ();
aeeb194b 1150
9929334e 1151 /* Mark alias targets necessary and emit diagnostics. */
1152 finish_aliases_1 ();
1153
aeeb194b 1154 if (!quiet_flag)
1155 {
1156 fprintf (stderr, "\nAnalyzing compilation unit\n");
1157 fflush (stderr);
1158 }
1159
9929334e 1160 /* Gimplify and lower all functions, compute reachability and
1161 remove unreachable nodes. */
1162 cgraph_analyze_functions ();
1163
8f69fd82 1164 /* Mark alias targets necessary and emit diagnostics. */
1165 finish_aliases_1 ();
1166
9929334e 1167 /* Gimplify and lower thunks. */
aeeb194b 1168 cgraph_analyze_functions ();
bfec3452 1169
9929334e 1170 /* Finally drive the pass manager. */
bfec3452 1171 cgraph_optimize ();
9929334e 1172
1173 timevar_pop (TV_CGRAPH);
ae01b312 1174}
9ed5b1f5 1175
1176
ae01b312 1177/* Figure out what functions we want to assemble. */
1178
1179static void
d9d9733a 1180cgraph_mark_functions_to_output (void)
ae01b312 1181{
1182 struct cgraph_node *node;
61c2c7b1 1183#ifdef ENABLE_CHECKING
1184 bool check_same_comdat_groups = false;
1185
1186 for (node = cgraph_nodes; node; node = node->next)
1187 gcc_assert (!node->process);
1188#endif
ae01b312 1189
ae01b312 1190 for (node = cgraph_nodes; node; node = node->next)
1191 {
1192 tree decl = node->decl;
d7c6d889 1193 struct cgraph_edge *e;
a0c938f0 1194
61c2c7b1 1195 gcc_assert (!node->process || node->same_comdat_group);
1196 if (node->process)
1197 continue;
d7c6d889 1198
1199 for (e = node->callers; e; e = e->next_caller)
611e5405 1200 if (e->inline_failed)
d7c6d889 1201 break;
ae01b312 1202
e6d2b2d8 1203 /* We need to output all local functions that are used and not
1204 always inlined, as well as those that are reachable from
1205 outside the current compilation unit. */
1a1a827a 1206 if (node->analyzed
b0cdf642 1207 && !node->global.inlined_to
08843223 1208 && (node->needed || node->reachable_from_other_partition
cdedc740 1209 || node->address_taken
d7c6d889 1210 || (e && node->reachable))
4ee9c684 1211 && !TREE_ASM_WRITTEN (decl)
ae01b312 1212 && !DECL_EXTERNAL (decl))
61c2c7b1 1213 {
1214 node->process = 1;
1215 if (node->same_comdat_group)
1216 {
1217 struct cgraph_node *next;
1218 for (next = node->same_comdat_group;
1219 next != node;
1220 next = next->same_comdat_group)
1221 next->process = 1;
1222 }
1223 }
1224 else if (node->same_comdat_group)
1225 {
1226#ifdef ENABLE_CHECKING
1227 check_same_comdat_groups = true;
1228#endif
1229 }
cc636d56 1230 else
9cee7c3f 1231 {
1232 /* We should've reclaimed all functions that are not needed. */
1233#ifdef ENABLE_CHECKING
75a70cf9 1234 if (!node->global.inlined_to
1a1a827a 1235 && gimple_has_body_p (decl)
08843223 1236 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1237 are inside partition, we can end up not removing the body since we no longer
1238 have analyzed node pointing to it. */
1239 && !node->in_other_partition
9cee7c3f 1240 && !DECL_EXTERNAL (decl))
1241 {
1242 dump_cgraph_node (stderr, node);
1243 internal_error ("failed to reclaim unneeded function");
1244 }
1245#endif
75a70cf9 1246 gcc_assert (node->global.inlined_to
1a1a827a 1247 || !gimple_has_body_p (decl)
08843223 1248 || node->in_other_partition
9cee7c3f 1249 || DECL_EXTERNAL (decl));
1250
1251 }
a0c938f0 1252
961e3b13 1253 }
61c2c7b1 1254#ifdef ENABLE_CHECKING
1255 if (check_same_comdat_groups)
1256 for (node = cgraph_nodes; node; node = node->next)
1257 if (node->same_comdat_group && !node->process)
1258 {
1259 tree decl = node->decl;
1260 if (!node->global.inlined_to
1261 && gimple_has_body_p (decl)
08843223 1262 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1263 are inside partition, we can end up not removing the body since we no longer
1264 have analyzed node pointing to it. */
1265 && !node->in_other_partition
61c2c7b1 1266 && !DECL_EXTERNAL (decl))
1267 {
1268 dump_cgraph_node (stderr, node);
1269 internal_error ("failed to reclaim unneeded function");
1270 }
1271 }
1272#endif
961e3b13 1273}
1274
28454517 1275/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1276 in lowered gimple form.
1277
1278 Set current_function_decl and cfun to newly constructed empty function body.
1279 return basic block in the function body. */
1280
1281static basic_block
1282init_lowered_empty_function (tree decl)
1283{
1284 basic_block bb;
1285
1286 current_function_decl = decl;
1287 allocate_struct_function (decl, false);
1288 gimple_register_cfg_hooks ();
1289 init_empty_tree_cfg ();
1290 init_tree_ssa (cfun);
1291 init_ssa_operands ();
1292 cfun->gimple_df->in_ssa_p = true;
1293 DECL_INITIAL (decl) = make_node (BLOCK);
1294
1295 DECL_SAVED_TREE (decl) = error_mark_node;
1296 cfun->curr_properties |=
1297 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1298 PROP_ssa);
1299
1300 /* Create BB for body of the function and connect it properly. */
1301 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1302 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1303 make_edge (bb, EXIT_BLOCK_PTR, 0);
1304
1305 return bb;
1306}
1307
1308/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1309 offset indicated by VIRTUAL_OFFSET, if that is
1310 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1311 zero for a result adjusting thunk. */
1312
1313static tree
1314thunk_adjust (gimple_stmt_iterator * bsi,
1315 tree ptr, bool this_adjusting,
1316 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1317{
1318 gimple stmt;
1319 tree ret;
1320
55d6cb23 1321 if (this_adjusting
1322 && fixed_offset != 0)
28454517 1323 {
1324 stmt = gimple_build_assign (ptr,
1325 fold_build2_loc (input_location,
1326 POINTER_PLUS_EXPR,
1327 TREE_TYPE (ptr), ptr,
1328 size_int (fixed_offset)));
1329 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1330 }
1331
1332 /* If there's a virtual offset, look up that value in the vtable and
1333 adjust the pointer again. */
1334 if (virtual_offset)
1335 {
1336 tree vtabletmp;
1337 tree vtabletmp2;
1338 tree vtabletmp3;
1339 tree offsettmp;
1340
1341 if (!vtable_entry_type)
1342 {
1343 tree vfunc_type = make_node (FUNCTION_TYPE);
1344 TREE_TYPE (vfunc_type) = integer_type_node;
1345 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1346 layout_type (vfunc_type);
1347
1348 vtable_entry_type = build_pointer_type (vfunc_type);
1349 }
1350
1351 vtabletmp =
1352 create_tmp_var (build_pointer_type
1353 (build_pointer_type (vtable_entry_type)), "vptr");
1354
1355 /* The vptr is always at offset zero in the object. */
1356 stmt = gimple_build_assign (vtabletmp,
1357 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1358 ptr));
1359 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1360 mark_symbols_for_renaming (stmt);
1361 find_referenced_vars_in (stmt);
1362
1363 /* Form the vtable address. */
1364 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1365 "vtableaddr");
1366 stmt = gimple_build_assign (vtabletmp2,
182cf5a9 1367 build_simple_mem_ref (vtabletmp));
28454517 1368 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1369 mark_symbols_for_renaming (stmt);
1370 find_referenced_vars_in (stmt);
1371
1372 /* Find the entry with the vcall offset. */
1373 stmt = gimple_build_assign (vtabletmp2,
1374 fold_build2_loc (input_location,
1375 POINTER_PLUS_EXPR,
1376 TREE_TYPE (vtabletmp2),
1377 vtabletmp2,
1378 fold_convert (sizetype,
1379 virtual_offset)));
1380 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1381
1382 /* Get the offset itself. */
1383 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1384 "vcalloffset");
1385 stmt = gimple_build_assign (vtabletmp3,
182cf5a9 1386 build_simple_mem_ref (vtabletmp2));
28454517 1387 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1388 mark_symbols_for_renaming (stmt);
1389 find_referenced_vars_in (stmt);
1390
1391 /* Cast to sizetype. */
1392 offsettmp = create_tmp_var (sizetype, "offset");
1393 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1394 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1395 mark_symbols_for_renaming (stmt);
1396 find_referenced_vars_in (stmt);
1397
1398 /* Adjust the `this' pointer. */
1399 ptr = fold_build2_loc (input_location,
1400 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1401 offsettmp);
1402 }
1403
55d6cb23 1404 if (!this_adjusting
1405 && fixed_offset != 0)
28454517 1406 /* Adjust the pointer by the constant. */
1407 {
1408 tree ptrtmp;
1409
1410 if (TREE_CODE (ptr) == VAR_DECL)
1411 ptrtmp = ptr;
1412 else
1413 {
1414 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1415 stmt = gimple_build_assign (ptrtmp, ptr);
1416 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1417 mark_symbols_for_renaming (stmt);
1418 find_referenced_vars_in (stmt);
1419 }
1420 ptr = fold_build2_loc (input_location,
1421 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1422 size_int (fixed_offset));
1423 }
1424
1425 /* Emit the statement and gimplify the adjustment expression. */
1426 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1427 stmt = gimple_build_assign (ret, ptr);
1428 mark_symbols_for_renaming (stmt);
1429 find_referenced_vars_in (stmt);
1430 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1431
1432 return ret;
1433}
1434
1435/* Produce assembler for thunk NODE. */
1436
1437static void
1438assemble_thunk (struct cgraph_node *node)
1439{
1440 bool this_adjusting = node->thunk.this_adjusting;
1441 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1442 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1443 tree virtual_offset = NULL;
1444 tree alias = node->thunk.alias;
1445 tree thunk_fndecl = node->decl;
1446 tree a = DECL_ARGUMENTS (thunk_fndecl);
1447
1448 current_function_decl = thunk_fndecl;
1449
1450 if (this_adjusting
1451 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1452 virtual_value, alias))
1453 {
1454 const char *fnname;
1455 tree fn_block;
1456
1457 DECL_RESULT (thunk_fndecl)
1458 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1459 RESULT_DECL, 0, integer_type_node);
22ea3b47 1460 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
28454517 1461
1462 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1463 create one. */
1464 fn_block = make_node (BLOCK);
1465 BLOCK_VARS (fn_block) = a;
1466 DECL_INITIAL (thunk_fndecl) = fn_block;
1467 init_function_start (thunk_fndecl);
1468 cfun->is_thunk = 1;
1469 assemble_start_function (thunk_fndecl, fnname);
1470
1471 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1472 fixed_offset, virtual_value, alias);
1473
1474 assemble_end_function (thunk_fndecl, fnname);
1475 init_insn_lengths ();
1476 free_after_compilation (cfun);
1477 set_cfun (NULL);
1478 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1479 }
1480 else
1481 {
1482 tree restype;
1483 basic_block bb, then_bb, else_bb, return_bb;
1484 gimple_stmt_iterator bsi;
1485 int nargs = 0;
1486 tree arg;
1487 int i;
1488 tree resdecl;
1489 tree restmp = NULL;
1490 VEC(tree, heap) *vargs;
1491
1492 gimple call;
1493 gimple ret;
1494
1495 DECL_IGNORED_P (thunk_fndecl) = 1;
1496 bitmap_obstack_initialize (NULL);
1497
1498 if (node->thunk.virtual_offset_p)
1499 virtual_offset = size_int (virtual_value);
1500
1501 /* Build the return declaration for the function. */
1502 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1503 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1504 {
1505 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1506 DECL_ARTIFICIAL (resdecl) = 1;
1507 DECL_IGNORED_P (resdecl) = 1;
1508 DECL_RESULT (thunk_fndecl) = resdecl;
1509 }
1510 else
1511 resdecl = DECL_RESULT (thunk_fndecl);
1512
1513 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1514
1515 bsi = gsi_start_bb (bb);
1516
1517 /* Build call to the function being thunked. */
1518 if (!VOID_TYPE_P (restype))
1519 {
1520 if (!is_gimple_reg_type (restype))
1521 {
1522 restmp = resdecl;
2ab2ce89 1523 add_local_decl (cfun, restmp);
28454517 1524 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1525 }
1526 else
1527 restmp = create_tmp_var_raw (restype, "retval");
1528 }
1529
1530 for (arg = a; arg; arg = TREE_CHAIN (arg))
1531 nargs++;
1532 vargs = VEC_alloc (tree, heap, nargs);
1533 if (this_adjusting)
1534 VEC_quick_push (tree, vargs,
1535 thunk_adjust (&bsi,
1536 a, 1, fixed_offset,
1537 virtual_offset));
1538 else
1539 VEC_quick_push (tree, vargs, a);
1540 for (i = 1, arg = TREE_CHAIN (a); i < nargs; i++, arg = TREE_CHAIN (arg))
1541 VEC_quick_push (tree, vargs, arg);
1542 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1543 VEC_free (tree, heap, vargs);
1544 gimple_call_set_cannot_inline (call, true);
1545 gimple_call_set_from_thunk (call, true);
1546 if (restmp)
1547 gimple_call_set_lhs (call, restmp);
1548 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1549 mark_symbols_for_renaming (call);
1550 find_referenced_vars_in (call);
1551 update_stmt (call);
1552
1553 if (restmp && !this_adjusting)
1554 {
57ab8ec3 1555 tree true_label = NULL_TREE;
28454517 1556
1557 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1558 {
1559 gimple stmt;
1560 /* If the return type is a pointer, we need to
1561 protect against NULL. We know there will be an
1562 adjustment, because that's why we're emitting a
1563 thunk. */
1564 then_bb = create_basic_block (NULL, (void *) 0, bb);
1565 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1566 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1567 remove_edge (single_succ_edge (bb));
1568 true_label = gimple_block_label (then_bb);
28454517 1569 stmt = gimple_build_cond (NE_EXPR, restmp,
1570 fold_convert (TREE_TYPE (restmp),
1571 integer_zero_node),
1572 NULL_TREE, NULL_TREE);
1573 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1574 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1575 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1576 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1577 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1578 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1579 bsi = gsi_last_bb (then_bb);
1580 }
1581
1582 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1583 fixed_offset, virtual_offset);
1584 if (true_label)
1585 {
1586 gimple stmt;
1587 bsi = gsi_last_bb (else_bb);
1588 stmt = gimple_build_assign (restmp, fold_convert (TREE_TYPE (restmp),
1589 integer_zero_node));
1590 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1591 bsi = gsi_last_bb (return_bb);
1592 }
1593 }
1594 else
1595 gimple_call_set_tail (call, true);
1596
1597 /* Build return value. */
1598 ret = gimple_build_return (restmp);
1599 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1600
1601 delete_unreachable_blocks ();
1602 update_ssa (TODO_update_ssa);
1603
1604 cgraph_remove_same_body_alias (node);
1605 /* Since we want to emit the thunk, we explicitly mark its name as
1606 referenced. */
28454517 1607 cgraph_add_new_function (thunk_fndecl, true);
1608 bitmap_obstack_release (NULL);
1609 }
1610 current_function_decl = NULL;
1611}
1612
ae01b312 1613/* Expand function specified by NODE. */
e6d2b2d8 1614
ae01b312 1615static void
d9d9733a 1616cgraph_expand_function (struct cgraph_node *node)
ae01b312 1617{
1618 tree decl = node->decl;
1619
b0cdf642 1620 /* We ought to not compile any inline clones. */
cc636d56 1621 gcc_assert (!node->global.inlined_to);
b0cdf642 1622
6329636b 1623 announce_function (decl);
09fc9532 1624 node->process = 0;
961e3b13 1625
e7c352d1 1626 gcc_assert (node->lowered);
f8deefc1 1627
794da2bb 1628 /* Generate RTL for the body of DECL. */
84e10000 1629 tree_rest_of_compilation (decl);
961e3b13 1630
4ee9c684 1631 /* Make sure that BE didn't give up on compiling. */
c04e3894 1632 gcc_assert (TREE_ASM_WRITTEN (decl));
ae01b312 1633 current_function_decl = NULL;
ed772161 1634 if (node->same_body)
1635 {
28454517 1636 struct cgraph_node *alias, *next;
ed772161 1637 bool saved_alias = node->alias;
28454517 1638 for (alias = node->same_body;
1639 alias && alias->next; alias = alias->next)
1640 ;
1641 /* Walk aliases in the order they were created; it is possible that
1642 thunks reffers to the aliases made earlier. */
1643 for (; alias; alias = next)
1644 {
1645 next = alias->previous;
1646 if (!alias->thunk.thunk_p)
1647 assemble_alias (alias->decl,
1648 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1649 else
1650 assemble_thunk (alias);
1651 }
ed772161 1652 node->alias = saved_alias;
1653 }
1a1a827a 1654 gcc_assert (!cgraph_preserve_function_body_p (decl));
1655 cgraph_release_function_body (node);
1656 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1657 points to the dead function body. */
1658 cgraph_node_remove_callees (node);
e1be32b8 1659
1660 cgraph_function_flags_ready = true;
ae01b312 1661}
1662
b0cdf642 1663/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
d7c6d889 1664
1665bool
326a9581 1666cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
d7c6d889 1667{
b0cdf642 1668 *reason = e->inline_failed;
1669 return !e->inline_failed;
d7c6d889 1670}
b0cdf642 1671
acc70efa 1672
acc70efa 1673
d9d9733a 1674/* Expand all functions that must be output.
1675
d7c6d889 1676 Attempt to topologically sort the nodes so function is output when
1677 all called functions are already assembled to allow data to be
91c82c20 1678 propagated across the callgraph. Use a stack to get smaller distance
3927afe0 1679 between a function and its callees (later we may choose to use a more
d7c6d889 1680 sophisticated algorithm for function reordering; we will likely want
1681 to use subsections to make the output functions appear in top-down
1682 order). */
1683
1684static void
a6868229 1685cgraph_expand_all_functions (void)
d7c6d889 1686{
1687 struct cgraph_node *node;
4c36ffe6 1688 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
c04e3894 1689 int order_pos, new_order_pos = 0;
d7c6d889 1690 int i;
1691
d7c6d889 1692 order_pos = cgraph_postorder (order);
cc636d56 1693 gcc_assert (order_pos == cgraph_n_nodes);
d7c6d889 1694
7bd28bba 1695 /* Garbage collector may remove inline clones we eliminate during
b0cdf642 1696 optimization. So we must be sure to not reference them. */
1697 for (i = 0; i < order_pos; i++)
09fc9532 1698 if (order[i]->process)
b0cdf642 1699 order[new_order_pos++] = order[i];
1700
1701 for (i = new_order_pos - 1; i >= 0; i--)
d7c6d889 1702 {
1703 node = order[i];
09fc9532 1704 if (node->process)
d7c6d889 1705 {
cc636d56 1706 gcc_assert (node->reachable);
09fc9532 1707 node->process = 0;
d7c6d889 1708 cgraph_expand_function (node);
1709 }
1710 }
523c1122 1711 cgraph_process_new_functions ();
773c5ba7 1712
d7c6d889 1713 free (order);
773c5ba7 1714
d7c6d889 1715}
1716
56af936e 1717/* This is used to sort the node types by the cgraph order number. */
1718
0b09525f 1719enum cgraph_order_sort_kind
1720{
1721 ORDER_UNDEFINED = 0,
1722 ORDER_FUNCTION,
1723 ORDER_VAR,
1724 ORDER_ASM
1725};
1726
56af936e 1727struct cgraph_order_sort
1728{
0b09525f 1729 enum cgraph_order_sort_kind kind;
56af936e 1730 union
1731 {
1732 struct cgraph_node *f;
1d416bd7 1733 struct varpool_node *v;
56af936e 1734 struct cgraph_asm_node *a;
1735 } u;
1736};
1737
1738/* Output all functions, variables, and asm statements in the order
1739 according to their order fields, which is the order in which they
1740 appeared in the file. This implements -fno-toplevel-reorder. In
1741 this mode we may output functions and variables which don't really
1742 need to be output. */
1743
1744static void
1745cgraph_output_in_order (void)
1746{
1747 int max;
56af936e 1748 struct cgraph_order_sort *nodes;
1749 int i;
1750 struct cgraph_node *pf;
1d416bd7 1751 struct varpool_node *pv;
56af936e 1752 struct cgraph_asm_node *pa;
1753
1754 max = cgraph_order;
3e1cde87 1755 nodes = XCNEWVEC (struct cgraph_order_sort, max);
56af936e 1756
1d416bd7 1757 varpool_analyze_pending_decls ();
56af936e 1758
1759 for (pf = cgraph_nodes; pf; pf = pf->next)
1760 {
09fc9532 1761 if (pf->process)
56af936e 1762 {
1763 i = pf->order;
1764 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1765 nodes[i].kind = ORDER_FUNCTION;
1766 nodes[i].u.f = pf;
1767 }
1768 }
1769
1d416bd7 1770 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
56af936e 1771 {
1772 i = pv->order;
1773 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1774 nodes[i].kind = ORDER_VAR;
1775 nodes[i].u.v = pv;
1776 }
1777
1778 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1779 {
1780 i = pa->order;
1781 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1782 nodes[i].kind = ORDER_ASM;
1783 nodes[i].u.a = pa;
1784 }
56af936e 1785
304e5318 1786 /* In toplevel reorder mode we output all statics; mark them as needed. */
1787 for (i = 0; i < max; ++i)
1788 {
1789 if (nodes[i].kind == ORDER_VAR)
1790 {
1791 varpool_mark_needed_node (nodes[i].u.v);
1792 }
1793 }
1794 varpool_empty_needed_queue ();
1795
56af936e 1796 for (i = 0; i < max; ++i)
1797 {
1798 switch (nodes[i].kind)
1799 {
1800 case ORDER_FUNCTION:
09fc9532 1801 nodes[i].u.f->process = 0;
56af936e 1802 cgraph_expand_function (nodes[i].u.f);
1803 break;
1804
1805 case ORDER_VAR:
1d416bd7 1806 varpool_assemble_decl (nodes[i].u.v);
56af936e 1807 break;
1808
1809 case ORDER_ASM:
1810 assemble_asm (nodes[i].u.a->asm_str);
1811 break;
1812
1813 case ORDER_UNDEFINED:
1814 break;
1815
1816 default:
1817 gcc_unreachable ();
1818 }
1819 }
4b4ea2db 1820
1821 cgraph_asm_nodes = NULL;
3e1cde87 1822 free (nodes);
56af936e 1823}
1824
b0cdf642 1825/* Return true when function body of DECL still needs to be kept around
1826 for later re-use. */
1827bool
1828cgraph_preserve_function_body_p (tree decl)
1829{
1830 struct cgraph_node *node;
8d8c4c8d 1831
1832 gcc_assert (cgraph_global_info_ready);
b0cdf642 1833 /* Look if there is any clone around. */
ccf4ab6b 1834 node = cgraph_node (decl);
1835 if (node->clones)
1836 return true;
b0cdf642 1837 return false;
1838}
1839
77fce4cd 1840static void
1841ipa_passes (void)
1842{
87d4aa85 1843 set_cfun (NULL);
4b14adf9 1844 current_function_decl = NULL;
75a70cf9 1845 gimple_register_cfg_hooks ();
77fce4cd 1846 bitmap_obstack_initialize (NULL);
59dd4830 1847
c9036234 1848 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1849
59dd4830 1850 if (!in_lto_p)
1851 execute_ipa_pass_list (all_small_ipa_passes);
9ed5b1f5 1852
7bfefa9d 1853 /* If pass_all_early_optimizations was not scheduled, the state of
1854 the cgraph will not be properly updated. Update it now. */
1855 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1856 cgraph_state = CGRAPH_STATE_IPA_SSA;
9ed5b1f5 1857
7bfefa9d 1858 if (!in_lto_p)
1859 {
1860 /* Generate coverage variables and constructors. */
1861 coverage_finish ();
1862
1863 /* Process new functions added. */
1864 set_cfun (NULL);
1865 current_function_decl = NULL;
1866 cgraph_process_new_functions ();
7bfefa9d 1867
c9036234 1868 execute_ipa_summary_passes
1869 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
8867b500 1870 }
23433d72 1871
1872 /* Some targets need to handle LTO assembler output specially. */
1873 if (flag_generate_lto)
1874 targetm.asm_out.lto_start ();
1875
7bfefa9d 1876 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1877
1878 if (!in_lto_p)
1879 ipa_write_summaries ();
1880
23433d72 1881 if (flag_generate_lto)
1882 targetm.asm_out.lto_end ();
1883
8867b500 1884 if (!flag_ltrans)
1885 execute_ipa_pass_list (all_regular_ipa_passes);
c9036234 1886 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
9ed5b1f5 1887
77fce4cd 1888 bitmap_obstack_release (NULL);
1889}
1890
34e5cced 1891
ae01b312 1892/* Perform simple optimizations based on callgraph. */
1893
7bfefa9d 1894void
d9d9733a 1895cgraph_optimize (void)
ae01b312 1896{
852f689e 1897 if (seen_error ())
cb2b5570 1898 return;
1899
b0cdf642 1900#ifdef ENABLE_CHECKING
1901 verify_cgraph ();
1902#endif
a861fe52 1903
c1dcd13c 1904 /* Frontend may output common variables after the unit has been finalized.
1905 It is safe to deal with them here as they are always zero initialized. */
1d416bd7 1906 varpool_analyze_pending_decls ();
e9f08e82 1907
f79b6507 1908 timevar_push (TV_CGRAPHOPT);
51949610 1909 if (pre_ipa_mem_report)
1910 {
1911 fprintf (stderr, "Memory consumption before IPA\n");
1912 dump_memory_report (false);
1913 }
d7c6d889 1914 if (!quiet_flag)
cd6bca02 1915 fprintf (stderr, "Performing interprocedural optimizations\n");
523c1122 1916 cgraph_state = CGRAPH_STATE_IPA;
c04e3894 1917
be4d0974 1918 /* Don't run the IPA passes if there was any error or sorry messages. */
852f689e 1919 if (!seen_error ())
be4d0974 1920 ipa_passes ();
1921
34e5cced 1922 /* Do nothing else if any IPA pass found errors. */
852f689e 1923 if (seen_error ())
021c1c18 1924 {
1925 timevar_pop (TV_CGRAPHOPT);
1926 return;
1927 }
34e5cced 1928
e1be32b8 1929 /* This pass remove bodies of extern inline functions we never inlined.
1930 Do this later so other IPA passes see what is really going on. */
1931 cgraph_remove_unreachable_nodes (false, dump_file);
80a85d8a 1932 cgraph_global_info_ready = true;
f79b6507 1933 if (cgraph_dump_file)
1934 {
e4200070 1935 fprintf (cgraph_dump_file, "Optimized ");
f79b6507 1936 dump_cgraph (cgraph_dump_file);
c1dcd13c 1937 dump_varpool (cgraph_dump_file);
f79b6507 1938 }
51949610 1939 if (post_ipa_mem_report)
1940 {
defa2fa6 1941 fprintf (stderr, "Memory consumption after IPA\n");
51949610 1942 dump_memory_report (false);
1943 }
f79b6507 1944 timevar_pop (TV_CGRAPHOPT);
ae01b312 1945
d7c6d889 1946 /* Output everything. */
47306a5d 1947 (*debug_hooks->assembly_start) ();
e4200070 1948 if (!quiet_flag)
1949 fprintf (stderr, "Assembling functions:\n");
b0cdf642 1950#ifdef ENABLE_CHECKING
1951 verify_cgraph ();
1952#endif
56af936e 1953
ccf4ab6b 1954 cgraph_materialize_all_clones ();
acc70efa 1955 cgraph_mark_functions_to_output ();
c1dcd13c 1956
523c1122 1957 cgraph_state = CGRAPH_STATE_EXPANSION;
56af936e 1958 if (!flag_toplevel_reorder)
1959 cgraph_output_in_order ();
1960 else
1961 {
1962 cgraph_output_pending_asms ();
1963
1964 cgraph_expand_all_functions ();
1d416bd7 1965 varpool_remove_unreferenced_decls ();
56af936e 1966
1d416bd7 1967 varpool_assemble_pending_decls ();
56af936e 1968 }
523c1122 1969 cgraph_process_new_functions ();
1970 cgraph_state = CGRAPH_STATE_FINISHED;
c1dcd13c 1971
f79b6507 1972 if (cgraph_dump_file)
1973 {
e4200070 1974 fprintf (cgraph_dump_file, "\nFinal ");
f79b6507 1975 dump_cgraph (cgraph_dump_file);
1976 }
b0cdf642 1977#ifdef ENABLE_CHECKING
1978 verify_cgraph ();
4ee9c684 1979 /* Double check that all inline clones are gone and that all
1980 function bodies have been released from memory. */
852f689e 1981 if (!seen_error ())
4ee9c684 1982 {
1983 struct cgraph_node *node;
1984 bool error_found = false;
1985
1986 for (node = cgraph_nodes; node; node = node->next)
1987 if (node->analyzed
1988 && (node->global.inlined_to
1a1a827a 1989 || gimple_has_body_p (node->decl)))
4ee9c684 1990 {
1991 error_found = true;
1992 dump_cgraph_node (stderr, node);
a0c938f0 1993 }
4ee9c684 1994 if (error_found)
c04e3894 1995 internal_error ("nodes with unreleased memory found");
4ee9c684 1996 }
b0cdf642 1997#endif
ae01b312 1998}
34e5cced 1999
2000
2c56f72e 2001/* Generate and emit a static constructor or destructor. WHICH must
2002 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
2003 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
f0b5f617 2004 initialization priority for this constructor or destructor. */
b5530559 2005
2006void
c5344b58 2007cgraph_build_static_cdtor (char which, tree body, int priority)
b5530559 2008{
2009 static int counter = 0;
2010 char which_buf[16];
540edea7 2011 tree decl, name, resdecl;
b5530559 2012
2c56f72e 2013 /* The priority is encoded in the constructor or destructor name.
2014 collect2 will sort the names and arrange that they are called at
2015 program startup. */
2016 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
db85cc4f 2017 name = get_file_function_name (which_buf);
b5530559 2018
e60a6f7b 2019 decl = build_decl (input_location, FUNCTION_DECL, name,
b5530559 2020 build_function_type (void_type_node, void_list_node));
2021 current_function_decl = decl;
2022
e60a6f7b 2023 resdecl = build_decl (input_location,
2024 RESULT_DECL, NULL_TREE, void_type_node);
540edea7 2025 DECL_ARTIFICIAL (resdecl) = 1;
540edea7 2026 DECL_RESULT (decl) = resdecl;
8e5b4ed6 2027 DECL_CONTEXT (resdecl) = decl;
540edea7 2028
80f2ef47 2029 allocate_struct_function (decl, false);
b5530559 2030
2031 TREE_STATIC (decl) = 1;
2032 TREE_USED (decl) = 1;
2033 DECL_ARTIFICIAL (decl) = 1;
b5530559 2034 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
2035 DECL_SAVED_TREE (decl) = body;
b1be8e04 2036 if (!targetm.have_ctors_dtors)
2037 {
2038 TREE_PUBLIC (decl) = 1;
2039 DECL_PRESERVE_P (decl) = 1;
2040 }
b5530559 2041 DECL_UNINLINABLE (decl) = 1;
2042
2043 DECL_INITIAL (decl) = make_node (BLOCK);
2044 TREE_USED (DECL_INITIAL (decl)) = 1;
2045
2046 DECL_SOURCE_LOCATION (decl) = input_location;
2047 cfun->function_end_locus = input_location;
2048
cc636d56 2049 switch (which)
2050 {
2051 case 'I':
2052 DECL_STATIC_CONSTRUCTOR (decl) = 1;
64c2e9b0 2053 decl_init_priority_insert (decl, priority);
cc636d56 2054 break;
2055 case 'D':
2056 DECL_STATIC_DESTRUCTOR (decl) = 1;
64c2e9b0 2057 decl_fini_priority_insert (decl, priority);
cc636d56 2058 break;
2059 default:
2060 gcc_unreachable ();
2061 }
b5530559 2062
2063 gimplify_function_tree (decl);
2064
523c1122 2065 cgraph_add_new_function (decl, false);
2066 cgraph_mark_needed_node (cgraph_node (decl));
e3a37aef 2067 set_cfun (NULL);
b5530559 2068}
121f3051 2069
2070void
2071init_cgraph (void)
2072{
01ec0a6c 2073 if (!cgraph_dump_file)
2074 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
121f3051 2075}
b5d36404 2076
a0c938f0 2077/* The edges representing the callers of the NEW_VERSION node were
b5d36404 2078 fixed by cgraph_function_versioning (), now the call_expr in their
2079 respective tree code should be updated to call the NEW_VERSION. */
2080
2081static void
2082update_call_expr (struct cgraph_node *new_version)
2083{
2084 struct cgraph_edge *e;
2085
2086 gcc_assert (new_version);
75a70cf9 2087
2088 /* Update the call expr on the edges to call the new version. */
b5d36404 2089 for (e = new_version->callers; e; e = e->next_caller)
e03a95e7 2090 {
2091 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2092 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
e38def9c 2093 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
e03a95e7 2094 }
b5d36404 2095}
2096
2097
2098/* Create a new cgraph node which is the new version of
2099 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2100 edges which should be redirected to point to
2101 NEW_VERSION. ALL the callees edges of OLD_VERSION
2102 are cloned to the new version node. Return the new
b06ab5fa 2103 version node.
2104
2105 If non-NULL BLOCK_TO_COPY determine what basic blocks
2106 was copied to prevent duplications of calls that are dead
2107 in the clone. */
b5d36404 2108
2109static struct cgraph_node *
2110cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
4460a647 2111 tree new_decl,
b06ab5fa 2112 VEC(cgraph_edge_p,heap) *redirect_callers,
2113 bitmap bbs_to_copy)
2114 {
b5d36404 2115 struct cgraph_node *new_version;
32936803 2116 struct cgraph_edge *e;
b5d36404 2117 unsigned i;
2118
2119 gcc_assert (old_version);
a0c938f0 2120
b5d36404 2121 new_version = cgraph_node (new_decl);
2122
2123 new_version->analyzed = true;
2124 new_version->local = old_version->local;
a70a5e2c 2125 new_version->local.externally_visible = false;
2126 new_version->local.local = true;
2127 new_version->local.vtable_method = false;
b5d36404 2128 new_version->global = old_version->global;
a93f1c3b 2129 new_version->rtl = old_version->rtl;
b5d36404 2130 new_version->reachable = true;
2131 new_version->count = old_version->count;
2132
a70a5e2c 2133 for (e = old_version->callees; e; e=e->next_callee)
b06ab5fa 2134 if (!bbs_to_copy
2135 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2136 cgraph_clone_edge (e, new_version, e->call_stmt,
2137 e->lto_stmt_uid, REG_BR_PROB_BASE,
2138 CGRAPH_FREQ_BASE,
2139 e->loop_nest, true);
a70a5e2c 2140 for (e = old_version->indirect_calls; e; e=e->next_callee)
b06ab5fa 2141 if (!bbs_to_copy
2142 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2143 cgraph_clone_edge (e, new_version, e->call_stmt,
2144 e->lto_stmt_uid, REG_BR_PROB_BASE,
2145 CGRAPH_FREQ_BASE,
2146 e->loop_nest, true);
4460a647 2147 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
2148 {
2149 /* Redirect calls to the old version node to point to its new
2150 version. */
2151 cgraph_redirect_edge_callee (e, new_version);
2152 }
b5d36404 2153
2154 return new_version;
2155 }
2156
2157 /* Perform function versioning.
a0c938f0 2158 Function versioning includes copying of the tree and
b5d36404 2159 a callgraph update (creating a new cgraph node and updating
2160 its callees and callers).
2161
2162 REDIRECT_CALLERS varray includes the edges to be redirected
2163 to the new version.
2164
2165 TREE_MAP is a mapping of tree nodes we want to replace with
2166 new ones (according to results of prior analysis).
2167 OLD_VERSION_NODE is the node that is versioned.
48e1416a 2168 It returns the new version's cgraph node.
b06ab5fa 2169 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2170 from new version.
2171 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
2172 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
b5d36404 2173
2174struct cgraph_node *
2175cgraph_function_versioning (struct cgraph_node *old_version_node,
4460a647 2176 VEC(cgraph_edge_p,heap) *redirect_callers,
ccf4ab6b 2177 VEC (ipa_replace_map_p,gc)* tree_map,
a70a5e2c 2178 bitmap args_to_skip,
b06ab5fa 2179 bitmap bbs_to_copy,
2180 basic_block new_entry_block,
a70a5e2c 2181 const char *clone_name)
b5d36404 2182{
2183 tree old_decl = old_version_node->decl;
2184 struct cgraph_node *new_version_node = NULL;
2185 tree new_decl;
2186
2187 if (!tree_versionable_function_p (old_decl))
2188 return NULL;
2189
2190 /* Make a new FUNCTION_DECL tree node for the
2191 new version. */
5afe38fe 2192 if (!args_to_skip)
2193 new_decl = copy_node (old_decl);
2194 else
2195 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
b5d36404 2196
df0b8dfb 2197 /* Generate a new name for the new version. */
2198 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2199 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2200 SET_DECL_RTL (new_decl, NULL);
2201
b5d36404 2202 /* Create the new version's call-graph node.
2203 and update the edges of the new node. */
2204 new_version_node =
2205 cgraph_copy_node_for_versioning (old_version_node, new_decl,
b06ab5fa 2206 redirect_callers, bbs_to_copy);
b5d36404 2207
2208 /* Copy the OLD_VERSION_NODE function tree to the new version. */
b06ab5fa 2209 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2210 bbs_to_copy, new_entry_block);
b5d36404 2211
a0c938f0 2212 /* Update the new version's properties.
e03a95e7 2213 Make The new version visible only within this translation unit. Make sure
2214 that is not weak also.
a0c938f0 2215 ??? We cannot use COMDAT linkage because there is no
b5d36404 2216 ABI support for this. */
6137cc9f 2217 cgraph_make_decl_local (new_version_node->decl);
f014e39d 2218 DECL_VIRTUAL_P (new_version_node->decl) = 0;
b5d36404 2219 new_version_node->local.externally_visible = 0;
2220 new_version_node->local.local = 1;
2221 new_version_node->lowered = true;
f014e39d 2222
e03a95e7 2223 /* Update the call_expr on the edges to call the new version node. */
2224 update_call_expr (new_version_node);
48e1416a 2225
50828ed8 2226 cgraph_call_function_insertion_hooks (new_version_node);
b5d36404 2227 return new_version_node;
2228}
469679ab 2229
2230/* Produce separate function body for inline clones so the offline copy can be
2231 modified without affecting them. */
2232struct cgraph_node *
2233save_inline_function_body (struct cgraph_node *node)
2234{
ccf4ab6b 2235 struct cgraph_node *first_clone, *n;
469679ab 2236
2237 gcc_assert (node == cgraph_node (node->decl));
2238
2239 cgraph_lower_function (node);
2240
ccf4ab6b 2241 first_clone = node->clones;
469679ab 2242
2243 first_clone->decl = copy_node (node->decl);
469679ab 2244 cgraph_insert_node_to_hashtable (first_clone);
2245 gcc_assert (first_clone == cgraph_node (first_clone->decl));
ccf4ab6b 2246 if (first_clone->next_sibling_clone)
2247 {
2248 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2249 n->clone_of = first_clone;
2250 n->clone_of = first_clone;
2251 n->next_sibling_clone = first_clone->clones;
2252 if (first_clone->clones)
2253 first_clone->clones->prev_sibling_clone = n;
2254 first_clone->clones = first_clone->next_sibling_clone;
2255 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2256 first_clone->next_sibling_clone = NULL;
2257 gcc_assert (!first_clone->prev_sibling_clone);
2258 }
2259 first_clone->clone_of = NULL;
2260 node->clones = NULL;
2261
2262 if (first_clone->clones)
2263 for (n = first_clone->clones; n != first_clone;)
2264 {
2265 gcc_assert (n->decl == node->decl);
2266 n->decl = first_clone->decl;
2267 if (n->clones)
2268 n = n->clones;
2269 else if (n->next_sibling_clone)
2270 n = n->next_sibling_clone;
2271 else
2272 {
2273 while (n != first_clone && !n->next_sibling_clone)
2274 n = n->clone_of;
2275 if (n != first_clone)
2276 n = n->next_sibling_clone;
2277 }
2278 }
469679ab 2279
2280 /* Copy the OLD_VERSION_NODE function tree to the new version. */
b06ab5fa 2281 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL,
2282 NULL, NULL);
469679ab 2283
2284 DECL_EXTERNAL (first_clone->decl) = 0;
ecd88073 2285 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
469679ab 2286 TREE_PUBLIC (first_clone->decl) = 0;
2287 DECL_COMDAT (first_clone->decl) = 0;
7fe9b425 2288 VEC_free (ipa_opt_pass, heap,
6d1cc52c 2289 first_clone->ipa_transforms_to_apply);
2290 first_clone->ipa_transforms_to_apply = NULL;
469679ab 2291
469679ab 2292#ifdef ENABLE_CHECKING
2293 verify_cgraph_node (first_clone);
2294#endif
2295 return first_clone;
2296}
a861fe52 2297
ccf4ab6b 2298/* Given virtual clone, turn it into actual clone. */
2299static void
2300cgraph_materialize_clone (struct cgraph_node *node)
2301{
2302 bitmap_obstack_initialize (NULL);
e748b31d 2303#ifdef ENABLE_CHECKING
2304 node->former_clone_of = node->clone_of->decl;
2305 if (node->clone_of->former_clone_of)
2306 node->former_clone_of = node->clone_of->former_clone_of;
2307#endif
ccf4ab6b 2308 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2309 tree_function_versioning (node->clone_of->decl, node->decl,
2310 node->clone.tree_map, true,
b06ab5fa 2311 node->clone.args_to_skip, NULL, NULL);
e20422ea 2312 if (cgraph_dump_file)
2313 {
2314 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2315 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2316 }
ccf4ab6b 2317
2318 /* Function is no longer clone. */
2319 if (node->next_sibling_clone)
2320 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2321 if (node->prev_sibling_clone)
2322 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2323 else
2324 node->clone_of->clones = node->next_sibling_clone;
2325 node->next_sibling_clone = NULL;
2326 node->prev_sibling_clone = NULL;
6d1cc52c 2327 if (!node->clone_of->analyzed && !node->clone_of->clones)
7d6a1ec8 2328 {
2329 cgraph_release_function_body (node->clone_of);
2330 cgraph_node_remove_callees (node->clone_of);
2331 ipa_remove_all_references (&node->clone_of->ref_list);
2332 }
ccf4ab6b 2333 node->clone_of = NULL;
2334 bitmap_obstack_release (NULL);
2335}
2336
c596d830 2337/* If necessary, change the function declaration in the call statement
2338 associated with E so that it corresponds to the edge callee. */
2339
2340gimple
2341cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2342{
2343 tree decl = gimple_call_fndecl (e->call_stmt);
2344 gimple new_stmt;
1f449108 2345#ifdef ENABLE_CHECKING
2346 struct cgraph_node *node;
2347#endif
c596d830 2348
2349 if (!decl || decl == e->callee->decl
2350 /* Don't update call from same body alias to the real function. */
2351 || cgraph_get_node (decl) == cgraph_get_node (e->callee->decl))
2352 return e->call_stmt;
2353
1f449108 2354#ifdef ENABLE_CHECKING
2355 node = cgraph_get_node (decl);
2356 gcc_assert (!node || !node->clone.combined_args_to_skip);
2357#endif
e748b31d 2358
c596d830 2359 if (cgraph_dump_file)
2360 {
2361 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2362 cgraph_node_name (e->caller), e->caller->uid,
2363 cgraph_node_name (e->callee), e->callee->uid);
2364 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
e748b31d 2365 if (e->callee->clone.combined_args_to_skip)
91aba934 2366 {
2367 fprintf (cgraph_dump_file, " combined args to skip: ");
2368 dump_bitmap (cgraph_dump_file,
2369 e->callee->clone.combined_args_to_skip);
e748b31d 2370 }
c596d830 2371 }
2372
2373 if (e->callee->clone.combined_args_to_skip)
91aba934 2374 {
2375 gimple_stmt_iterator gsi;
2376
2377 new_stmt
2378 = gimple_call_copy_skip_args (e->call_stmt,
2379 e->callee->clone.combined_args_to_skip);
2380
2381 if (gimple_vdef (new_stmt)
2382 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2383 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2384
2385 gsi = gsi_for_stmt (e->call_stmt);
2386 gsi_replace (&gsi, new_stmt, true);
2387 }
c596d830 2388 else
2389 new_stmt = e->call_stmt;
c596d830 2390
91aba934 2391 gimple_call_set_fndecl (new_stmt, e->callee->decl);
e32916b6 2392 update_stmt (new_stmt);
c596d830 2393
c596d830 2394 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2395
2396 if (cgraph_dump_file)
2397 {
2398 fprintf (cgraph_dump_file, " updated to:");
2399 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2400 }
2401 return new_stmt;
2402}
2403
ccf4ab6b 2404/* Once all functions from compilation unit are in memory, produce all clones
c596d830 2405 and update all calls. We might also do this on demand if we don't want to
2406 bring all functions to memory prior compilation, but current WHOPR
2407 implementation does that and it is is bit easier to keep everything right in
2408 this order. */
ccf4ab6b 2409void
2410cgraph_materialize_all_clones (void)
2411{
2412 struct cgraph_node *node;
2413 bool stabilized = false;
2414
2415 if (cgraph_dump_file)
2416 fprintf (cgraph_dump_file, "Materializing clones\n");
2417#ifdef ENABLE_CHECKING
2418 verify_cgraph ();
2419#endif
2420
2421 /* We can also do topological order, but number of iterations should be
2422 bounded by number of IPA passes since single IPA pass is probably not
2423 going to create clones of clones it created itself. */
2424 while (!stabilized)
2425 {
2426 stabilized = true;
2427 for (node = cgraph_nodes; node; node = node->next)
2428 {
2429 if (node->clone_of && node->decl != node->clone_of->decl
2430 && !gimple_has_body_p (node->decl))
2431 {
2432 if (gimple_has_body_p (node->clone_of->decl))
2433 {
2434 if (cgraph_dump_file)
e20422ea 2435 {
2436 fprintf (cgraph_dump_file, "clonning %s to %s\n",
2437 cgraph_node_name (node->clone_of),
2438 cgraph_node_name (node));
2439 if (node->clone.tree_map)
2440 {
2441 unsigned int i;
2442 fprintf (cgraph_dump_file, " replace map: ");
2443 for (i = 0; i < VEC_length (ipa_replace_map_p,
2444 node->clone.tree_map);
2445 i++)
2446 {
2447 struct ipa_replace_map *replace_info;
2448 replace_info = VEC_index (ipa_replace_map_p,
2449 node->clone.tree_map,
2450 i);
2451 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2452 fprintf (cgraph_dump_file, " -> ");
2453 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2454 fprintf (cgraph_dump_file, "%s%s;",
2455 replace_info->replace_p ? "(replace)":"",
2456 replace_info->ref_p ? "(ref)":"");
2457 }
2458 fprintf (cgraph_dump_file, "\n");
2459 }
2460 if (node->clone.args_to_skip)
2461 {
2462 fprintf (cgraph_dump_file, " args_to_skip: ");
2463 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2464 }
2465 if (node->clone.args_to_skip)
2466 {
2467 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2468 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2469 }
2470 }
ccf4ab6b 2471 cgraph_materialize_clone (node);
a510bd8d 2472 stabilized = false;
ccf4ab6b 2473 }
ccf4ab6b 2474 }
2475 }
2476 }
ee3f5fc0 2477 for (node = cgraph_nodes; node; node = node->next)
2478 if (!node->analyzed && node->callees)
2479 cgraph_node_remove_callees (node);
c596d830 2480 if (cgraph_dump_file)
2481 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
947781ac 2482#ifdef ENABLE_CHECKING
2483 verify_cgraph ();
2484#endif
ccf4ab6b 2485 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2486}
2487
a861fe52 2488#include "gt-cgraphunit.h"