]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
gcc/ChangeLog:
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
cd6bca02 1/* Callgraph based interprocedural optimizations.
028a99ef 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
e3a37aef 3 Free Software Foundation, Inc.
ae01b312 4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
ae01b312 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
ae01b312 21
b0cdf642 22/* This module implements main driver of compilation process as well as
cd6bca02 23 few basic interprocedural optimizers.
b0cdf642 24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
b326746d 35 (There is one exception needed for implementing GCC extern inline
36 function.)
b0cdf642 37
1d416bd7 38 - varpool_finalize_variable
b0cdf642 39
7bd28bba 40 This function has same behavior as the above but is used for static
b0cdf642 41 variables.
42
43 - cgraph_finalize_compilation_unit
44
b326746d 45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
b0cdf642 47
6329636b 48 In the the call-graph construction and local function
b0cdf642 49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
b326746d 52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
b0cdf642 54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
b0cdf642 62 - cgraph_mark_needed_node
1d416bd7 63 - varpool_mark_needed_node
b0cdf642 64
b326746d 65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
ccd2f3d1 69 used by C++ frontend to explicitly mark the keyed methods.
b0cdf642 70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
6329636b 80 Analyzing of all functions is deferred
b0cdf642 81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
ca67a72b 91 The intra-procedural information is produced and its existence
b0cdf642 92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
6329636b 106 where reference has been optimized out. */
121f3051 107
acc70efa 108
ae01b312 109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
b5530559 114#include "rtl.h"
acc70efa 115#include "tree-flow.h"
ae01b312 116#include "tree-inline.h"
117#include "langhooks.h"
c6224531 118#include "pointer-set.h"
ae01b312 119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
80a85d8a 125#include "diagnostic.h"
ce084dfc 126#include "tree-pretty-print.h"
127#include "gimple-pretty-print.h"
f79b6507 128#include "timevar.h"
d7c6d889 129#include "params.h"
130#include "fibheap.h"
611e5405 131#include "intl.h"
b69eb0ff 132#include "function.h"
b5d36404 133#include "ipa-prop.h"
75a70cf9 134#include "gimple.h"
135#include "tree-iterator.h"
f1e2a033 136#include "tree-pass.h"
bfec3452 137#include "tree-dump.h"
c1dcd13c 138#include "output.h"
9ed5b1f5 139#include "coverage.h"
c9036234 140#include "plugin.h"
d7c6d889 141
a6868229 142static void cgraph_expand_all_functions (void);
d9d9733a 143static void cgraph_mark_functions_to_output (void);
144static void cgraph_expand_function (struct cgraph_node *);
f788fff2 145static void cgraph_output_pending_asms (void);
bfec3452 146static void cgraph_analyze_function (struct cgraph_node *);
25bb88de 147
ecb08119 148FILE *cgraph_dump_file;
121f3051 149
2c56f72e 150/* A vector of FUNCTION_DECLs declared as static constructors. */
151static GTY (()) VEC(tree, gc) *static_ctors;
152/* A vector of FUNCTION_DECLs declared as static destructors. */
153static GTY (()) VEC(tree, gc) *static_dtors;
a861fe52 154
28454517 155/* Used for vtable lookup in thunk adjusting. */
156static GTY (()) tree vtable_entry_type;
157
a861fe52 158/* When target does not have ctors and dtors, we call all constructor
310d2511 159 and destructor by special initialization/destruction function
48e1416a 160 recognized by collect2.
161
a861fe52 162 When we are going to build this function, collect all constructors and
163 destructors and turn them into normal functions. */
164
165static void
166record_cdtor_fn (tree fndecl)
167{
2de29097 168 struct cgraph_node *node;
169 if (targetm.have_ctors_dtors
170 || (!DECL_STATIC_CONSTRUCTOR (fndecl)
171 && !DECL_STATIC_DESTRUCTOR (fndecl)))
a861fe52 172 return;
173
174 if (DECL_STATIC_CONSTRUCTOR (fndecl))
175 {
2c56f72e 176 VEC_safe_push (tree, gc, static_ctors, fndecl);
a861fe52 177 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
a861fe52 178 }
179 if (DECL_STATIC_DESTRUCTOR (fndecl))
180 {
2c56f72e 181 VEC_safe_push (tree, gc, static_dtors, fndecl);
a861fe52 182 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
a861fe52 183 }
2de29097 184 node = cgraph_node (fndecl);
185 node->local.disregard_inline_limits = 1;
186 cgraph_mark_reachable_node (node);
a861fe52 187}
188
2c56f72e 189/* Define global constructors/destructor functions for the CDTORS, of
190 which they are LEN. The CDTORS are sorted by initialization
191 priority. If CTOR_P is true, these are constructors; otherwise,
192 they are destructors. */
193
a861fe52 194static void
2c56f72e 195build_cdtor (bool ctor_p, tree *cdtors, size_t len)
a861fe52 196{
2c56f72e 197 size_t i;
a861fe52 198
2c56f72e 199 i = 0;
200 while (i < len)
201 {
202 tree body;
203 tree fn;
204 priority_type priority;
205
206 priority = 0;
207 body = NULL_TREE;
208 /* Find the next batch of constructors/destructors with the same
209 initialization priority. */
210 do
211 {
212 priority_type p;
213 fn = cdtors[i];
214 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
215 if (!body)
216 priority = p;
217 else if (p != priority)
218 break;
389dd41b 219 append_to_statement_list (build_function_call_expr (UNKNOWN_LOCATION,
220 fn, 0),
2c56f72e 221 &body);
222 ++i;
223 }
224 while (i < len);
225 gcc_assert (body != NULL_TREE);
226 /* Generate a function to call all the function of like
227 priority. */
228 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
229 }
230}
231
232/* Comparison function for qsort. P1 and P2 are actually of type
233 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
234 used to determine the sort order. */
a861fe52 235
2c56f72e 236static int
237compare_ctor (const void *p1, const void *p2)
238{
239 tree f1;
240 tree f2;
241 int priority1;
242 int priority2;
243
244 f1 = *(const tree *)p1;
245 f2 = *(const tree *)p2;
246 priority1 = DECL_INIT_PRIORITY (f1);
247 priority2 = DECL_INIT_PRIORITY (f2);
48e1416a 248
2c56f72e 249 if (priority1 < priority2)
250 return -1;
251 else if (priority1 > priority2)
252 return 1;
253 else
254 /* Ensure a stable sort. */
255 return (const tree *)p1 - (const tree *)p2;
256}
257
258/* Comparison function for qsort. P1 and P2 are actually of type
259 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
260 used to determine the sort order. */
a861fe52 261
2c56f72e 262static int
263compare_dtor (const void *p1, const void *p2)
264{
265 tree f1;
266 tree f2;
267 int priority1;
268 int priority2;
269
270 f1 = *(const tree *)p1;
271 f2 = *(const tree *)p2;
272 priority1 = DECL_FINI_PRIORITY (f1);
273 priority2 = DECL_FINI_PRIORITY (f2);
48e1416a 274
2c56f72e 275 if (priority1 < priority2)
276 return -1;
277 else if (priority1 > priority2)
278 return 1;
279 else
280 /* Ensure a stable sort. */
281 return (const tree *)p1 - (const tree *)p2;
a861fe52 282}
283
284/* Generate functions to call static constructors and destructors
285 for targets that do not support .ctors/.dtors sections. These
286 functions have magic names which are detected by collect2. */
287
288static void
289cgraph_build_cdtor_fns (void)
290{
2c56f72e 291 if (!VEC_empty (tree, static_ctors))
a861fe52 292 {
2c56f72e 293 gcc_assert (!targetm.have_ctors_dtors);
294 qsort (VEC_address (tree, static_ctors),
48e1416a 295 VEC_length (tree, static_ctors),
2c56f72e 296 sizeof (tree),
297 compare_ctor);
298 build_cdtor (/*ctor_p=*/true,
299 VEC_address (tree, static_ctors),
48e1416a 300 VEC_length (tree, static_ctors));
2c56f72e 301 VEC_truncate (tree, static_ctors, 0);
a861fe52 302 }
2c56f72e 303
304 if (!VEC_empty (tree, static_dtors))
a861fe52 305 {
2c56f72e 306 gcc_assert (!targetm.have_ctors_dtors);
307 qsort (VEC_address (tree, static_dtors),
48e1416a 308 VEC_length (tree, static_dtors),
2c56f72e 309 sizeof (tree),
310 compare_dtor);
311 build_cdtor (/*ctor_p=*/false,
312 VEC_address (tree, static_dtors),
48e1416a 313 VEC_length (tree, static_dtors));
2c56f72e 314 VEC_truncate (tree, static_dtors, 0);
a861fe52 315 }
316}
317
2c0b522d 318/* Determine if function DECL is needed. That is, visible to something
319 either outside this translation unit, something magic in the system
6329636b 320 configury. */
2c0b522d 321
7bfefa9d 322bool
323cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
2c0b522d 324{
3f82b628 325 /* If the user told us it is used, then it must be so. */
05806473 326 if (node->local.externally_visible)
327 return true;
328
3f82b628 329 /* ??? If the assembler name is set by hand, it is possible to assemble
330 the name later after finalizing the function and the fact is noticed
331 in assemble_name then. This is arguably a bug. */
332 if (DECL_ASSEMBLER_NAME_SET_P (decl)
333 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
334 return true;
335
55680bef 336 /* With -fkeep-inline-functions we are keeping all inline functions except
337 for extern inline ones. */
338 if (flag_keep_inline_functions
339 && DECL_DECLARED_INLINE_P (decl)
316ef6d8 340 && !DECL_EXTERNAL (decl)
341 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
55680bef 342 return true;
343
2c0b522d 344 /* If we decided it was needed before, but at the time we didn't have
345 the body of the function available, then it's still needed. We have
346 to go back and re-check its dependencies now. */
347 if (node->needed)
348 return true;
349
350 /* Externally visible functions must be output. The exception is
a0c938f0 351 COMDAT functions that must be output only when they are needed.
8baa9d15 352
353 When not optimizing, also output the static functions. (see
95da6220 354 PR24561), but don't do so for always_inline functions, functions
0f9238c0 355 declared inline and nested functions. These were optimized out
d3d410e1 356 in the original implementation and it is unclear whether we want
554f2707 357 to change the behavior here. */
bba7ddf8 358 if (((TREE_PUBLIC (decl)
0f9238c0 359 || (!optimize
360 && !node->local.disregard_inline_limits
d3d410e1 361 && !DECL_DECLARED_INLINE_P (decl)
0f9238c0 362 && !(DECL_CONTEXT (decl)
363 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
59dd4830 364 && !flag_whole_program
365 && !flag_lto
366 && !flag_whopr)
62eec3b4 367 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
2c0b522d 368 return true;
369
370 /* Constructors and destructors are reachable from the runtime by
371 some mechanism. */
372 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
373 return true;
374
2c0b522d 375 return false;
376}
377
bdc40eb8 378/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
523c1122 379 functions into callgraph in a way so they look like ordinary reachable
380 functions inserted into callgraph already at construction time. */
381
382bool
383cgraph_process_new_functions (void)
384{
385 bool output = false;
386 tree fndecl;
387 struct cgraph_node *node;
388
0cddb138 389 varpool_analyze_pending_decls ();
523c1122 390 /* Note that this queue may grow as its being processed, as the new
391 functions may generate new ones. */
392 while (cgraph_new_nodes)
393 {
394 node = cgraph_new_nodes;
395 fndecl = node->decl;
396 cgraph_new_nodes = cgraph_new_nodes->next_needed;
397 switch (cgraph_state)
398 {
399 case CGRAPH_STATE_CONSTRUCTION:
400 /* At construction time we just need to finalize function and move
401 it into reachable functions list. */
402
403 node->next_needed = NULL;
404 cgraph_finalize_function (fndecl, false);
405 cgraph_mark_reachable_node (node);
406 output = true;
407 break;
408
409 case CGRAPH_STATE_IPA:
f517b36e 410 case CGRAPH_STATE_IPA_SSA:
523c1122 411 /* When IPA optimization already started, do all essential
412 transformations that has been already performed on the whole
413 cgraph but not on this function. */
414
75a70cf9 415 gimple_register_cfg_hooks ();
523c1122 416 if (!node->analyzed)
417 cgraph_analyze_function (node);
418 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
419 current_function_decl = fndecl;
9c1bff7a 420 compute_inline_parameters (node);
f517b36e 421 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
422 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
423 /* When not optimizing, be sure we run early local passes anyway
424 to expand OMP. */
425 || !optimize)
20099e35 426 execute_pass_list (pass_early_local_passes.pass.sub);
523c1122 427 free_dominance_info (CDI_POST_DOMINATORS);
428 free_dominance_info (CDI_DOMINATORS);
429 pop_cfun ();
430 current_function_decl = NULL;
431 break;
432
433 case CGRAPH_STATE_EXPANSION:
434 /* Functions created during expansion shall be compiled
435 directly. */
09fc9532 436 node->process = 0;
523c1122 437 cgraph_expand_function (node);
438 break;
439
440 default:
441 gcc_unreachable ();
442 break;
443 }
50828ed8 444 cgraph_call_function_insertion_hooks (node);
0cddb138 445 varpool_analyze_pending_decls ();
523c1122 446 }
447 return output;
448}
449
9b8fb23a 450/* As an GCC extension we allow redefinition of the function. The
451 semantics when both copies of bodies differ is not well defined.
452 We replace the old body with new body so in unit at a time mode
453 we always use new body, while in normal mode we may end up with
454 old body inlined into some functions and new body expanded and
455 inlined in others.
456
457 ??? It may make more sense to use one body for inlining and other
458 body for expanding the function but this is difficult to do. */
459
460static void
461cgraph_reset_node (struct cgraph_node *node)
462{
09fc9532 463 /* If node->process is set, then we have already begun whole-unit analysis.
6329636b 464 This is *not* testing for whether we've already emitted the function.
465 That case can be sort-of legitimately seen with real function redefinition
466 errors. I would argue that the front end should never present us with
467 such a case, but don't enforce that for now. */
09fc9532 468 gcc_assert (!node->process);
9b8fb23a 469
470 /* Reset our data structures so we can analyze the function again. */
471 memset (&node->local, 0, sizeof (node->local));
472 memset (&node->global, 0, sizeof (node->global));
473 memset (&node->rtl, 0, sizeof (node->rtl));
474 node->analyzed = false;
475 node->local.redefined_extern_inline = true;
476 node->local.finalized = false;
477
9b8fb23a 478 cgraph_node_remove_callees (node);
479
480 /* We may need to re-queue the node for assembling in case
46beef9a 481 we already proceeded it and ignored as not needed or got
482 a re-declaration in IMA mode. */
483 if (node->reachable)
9b8fb23a 484 {
485 struct cgraph_node *n;
486
487 for (n = cgraph_nodes_queue; n; n = n->next_needed)
488 if (n == node)
489 break;
490 if (!n)
491 node->reachable = 0;
492 }
493}
c08871a9 494
1e8e9920 495static void
496cgraph_lower_function (struct cgraph_node *node)
497{
498 if (node->lowered)
499 return;
bfec3452 500
501 if (node->nested)
502 lower_nested_functions (node->decl);
503 gcc_assert (!node->nested);
504
1e8e9920 505 tree_lowering_passes (node->decl);
506 node->lowered = true;
507}
508
28df663b 509/* DECL has been parsed. Take it, queue it, compile it at the whim of the
510 logic in effect. If NESTED is true, then our caller cannot stand to have
511 the garbage collector run at the moment. We would need to either create
512 a new GC context, or just not compile right now. */
ae01b312 513
514void
28df663b 515cgraph_finalize_function (tree decl, bool nested)
ae01b312 516{
517 struct cgraph_node *node = cgraph_node (decl);
518
c08871a9 519 if (node->local.finalized)
9b8fb23a 520 cgraph_reset_node (node);
28df663b 521
167b550b 522 node->pid = cgraph_max_pid ++;
c08871a9 523 notice_global_symbol (decl);
79bb87b4 524 node->local.finalized = true;
e27482aa 525 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
22c748a1 526 node->finalized_by_frontend = true;
a861fe52 527 record_cdtor_fn (node->decl);
ae01b312 528
7bfefa9d 529 if (cgraph_decide_is_function_needed (node, decl))
2c0b522d 530 cgraph_mark_needed_node (node);
531
ecda6e51 532 /* Since we reclaim unreachable nodes at the end of every language
3f82b628 533 level unit, we need to be conservative about possible entry points
534 there. */
62eec3b4 535 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
3f82b628 536 cgraph_mark_reachable_node (node);
537
2c0b522d 538 /* If we've not yet emitted decl, tell the debug info about it. */
28df663b 539 if (!TREE_ASM_WRITTEN (decl))
2c0b522d 540 (*debug_hooks->deferred_inline_function) (decl);
4e8871a0 541
b69eb0ff 542 /* Possibly warn about unused parameters. */
543 if (warn_unused_parameter)
544 do_warn_unused_parameter (decl);
6329636b 545
546 if (!nested)
547 ggc_collect ();
ae01b312 548}
549
0da03d11 550/* C99 extern inline keywords allow changing of declaration after function
551 has been finalized. We need to re-decide if we want to mark the function as
552 needed then. */
553
554void
555cgraph_mark_if_needed (tree decl)
556{
557 struct cgraph_node *node = cgraph_node (decl);
7bfefa9d 558 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
0da03d11 559 cgraph_mark_needed_node (node);
560}
561
f2d608d8 562#ifdef ENABLE_CHECKING
ccf4ab6b 563/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
564static bool
565clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
566{
567 while (node != node2 && node2)
568 node2 = node2->clone_of;
569 return node2 != NULL;
570}
f2d608d8 571#endif
ccf4ab6b 572
b0cdf642 573/* Verify cgraph nodes of given cgraph node. */
4b987fac 574DEBUG_FUNCTION void
b0cdf642 575verify_cgraph_node (struct cgraph_node *node)
576{
577 struct cgraph_edge *e;
e27482aa 578 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
117ef3d7 579 struct function *saved_cfun = cfun;
e27482aa 580 basic_block this_block;
75a70cf9 581 gimple_stmt_iterator gsi;
9bfec7c2 582 bool error_found = false;
b0cdf642 583
852f689e 584 if (seen_error ())
bd09cd3e 585 return;
586
b0cdf642 587 timevar_push (TV_CGRAPH_VERIFY);
117ef3d7 588 /* debug_generic_stmt needs correct cfun */
589 set_cfun (this_cfun);
b0cdf642 590 for (e = node->callees; e; e = e->next_callee)
591 if (e->aux)
592 {
0a81f5a0 593 error ("aux field set for edge %s->%s",
abd3e6b5 594 identifier_to_locale (cgraph_node_name (e->caller)),
595 identifier_to_locale (cgraph_node_name (e->callee)));
b0cdf642 596 error_found = true;
597 }
a2cb9b3b 598 if (node->count < 0)
599 {
600 error ("Execution count is negative");
601 error_found = true;
602 }
59dd4830 603 if (node->global.inlined_to && node->local.externally_visible)
604 {
605 error ("Externally visible inline clone");
606 error_found = true;
607 }
608 if (node->global.inlined_to && node->address_taken)
609 {
610 error ("Inline clone with address taken");
611 error_found = true;
612 }
613 if (node->global.inlined_to && node->needed)
614 {
615 error ("Inline clone is needed");
616 error_found = true;
617 }
799c8711 618 for (e = node->indirect_calls; e; e = e->next_callee)
619 {
620 if (e->aux)
621 {
622 error ("aux field set for indirect edge from %s",
623 identifier_to_locale (cgraph_node_name (e->caller)));
624 error_found = true;
625 }
626 if (!e->indirect_unknown_callee
627 || !e->indirect_info)
628 {
629 error ("An indirect edge from %s is not marked as indirect or has "
630 "associated indirect_info, the corresponding statement is: ",
631 identifier_to_locale (cgraph_node_name (e->caller)));
632 debug_gimple_stmt (e->call_stmt);
633 error_found = true;
634 }
635 }
b0cdf642 636 for (e = node->callers; e; e = e->next_caller)
637 {
a2cb9b3b 638 if (e->count < 0)
639 {
640 error ("caller edge count is negative");
641 error_found = true;
642 }
4ae20857 643 if (e->frequency < 0)
644 {
645 error ("caller edge frequency is negative");
646 error_found = true;
647 }
648 if (e->frequency > CGRAPH_FREQ_MAX)
649 {
650 error ("caller edge frequency is too large");
651 error_found = true;
652 }
1c094d2f 653 if (gimple_has_body_p (e->caller->decl)
654 && !e->caller->global.inlined_to
655 && (e->frequency
656 != compute_call_stmt_bb_frequency (e->caller->decl,
657 gimple_bb (e->call_stmt))))
658 {
659 error ("caller edge frequency %i does not match BB freqency %i",
660 e->frequency,
661 compute_call_stmt_bb_frequency (e->caller->decl,
662 gimple_bb (e->call_stmt)));
663 error_found = true;
664 }
b0cdf642 665 if (!e->inline_failed)
666 {
667 if (node->global.inlined_to
668 != (e->caller->global.inlined_to
669 ? e->caller->global.inlined_to : e->caller))
670 {
0a81f5a0 671 error ("inlined_to pointer is wrong");
b0cdf642 672 error_found = true;
673 }
674 if (node->callers->next_caller)
675 {
0a81f5a0 676 error ("multiple inline callers");
b0cdf642 677 error_found = true;
678 }
679 }
680 else
681 if (node->global.inlined_to)
682 {
0a81f5a0 683 error ("inlined_to pointer set for noninline callers");
b0cdf642 684 error_found = true;
685 }
686 }
687 if (!node->callers && node->global.inlined_to)
688 {
5cd75817 689 error ("inlined_to pointer is set but no predecessors found");
b0cdf642 690 error_found = true;
691 }
692 if (node->global.inlined_to == node)
693 {
0a81f5a0 694 error ("inlined_to pointer refers to itself");
b0cdf642 695 error_found = true;
696 }
697
0f6439b9 698 if (!cgraph_node (node->decl))
b0cdf642 699 {
0f6439b9 700 error ("node not found in cgraph_hash");
b0cdf642 701 error_found = true;
702 }
a0c938f0 703
ccf4ab6b 704 if (node->clone_of)
705 {
706 struct cgraph_node *n;
707 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
708 if (n == node)
709 break;
710 if (!n)
711 {
712 error ("node has wrong clone_of");
713 error_found = true;
714 }
715 }
716 if (node->clones)
717 {
718 struct cgraph_node *n;
719 for (n = node->clones; n; n = n->next_sibling_clone)
720 if (n->clone_of != node)
721 break;
722 if (n)
723 {
724 error ("node has wrong clone list");
725 error_found = true;
726 }
727 }
728 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
729 {
730 error ("node is in clone list but it is not clone");
731 error_found = true;
732 }
733 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
734 {
735 error ("node has wrong prev_clone pointer");
736 error_found = true;
737 }
738 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
739 {
740 error ("double linked list of clones corrupted");
741 error_found = true;
742 }
c524ac5d 743 if (node->same_comdat_group)
744 {
745 struct cgraph_node *n = node->same_comdat_group;
746
747 if (!DECL_ONE_ONLY (node->decl))
748 {
749 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
750 error_found = true;
751 }
752 if (n == node)
753 {
754 error ("node is alone in a comdat group");
755 error_found = true;
756 }
757 do
758 {
759 if (!n->same_comdat_group)
760 {
761 error ("same_comdat_group is not a circular list");
762 error_found = true;
763 break;
764 }
765 n = n->same_comdat_group;
766 }
767 while (n != node);
768 }
ccf4ab6b 769
770 if (node->analyzed && gimple_has_body_p (node->decl)
75a70cf9 771 && !TREE_ASM_WRITTEN (node->decl)
7bfefa9d 772 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
773 && !flag_wpa)
b0cdf642 774 {
e27482aa 775 if (this_cfun->cfg)
776 {
777 /* The nodes we're interested in are never shared, so walk
778 the tree ignoring duplicates. */
e7c352d1 779 struct pointer_set_t *visited_nodes = pointer_set_create ();
e27482aa 780 /* Reach the trees by walking over the CFG, and note the
781 enclosing basic-blocks in the call edges. */
782 FOR_EACH_BB_FN (this_block, this_cfun)
75a70cf9 783 for (gsi = gsi_start_bb (this_block);
784 !gsi_end_p (gsi);
785 gsi_next (&gsi))
9bfec7c2 786 {
75a70cf9 787 gimple stmt = gsi_stmt (gsi);
799c8711 788 if (is_gimple_call (stmt))
9bfec7c2 789 {
790 struct cgraph_edge *e = cgraph_edge (node, stmt);
799c8711 791 tree decl = gimple_call_fndecl (stmt);
9bfec7c2 792 if (e)
793 {
794 if (e->aux)
795 {
0a81f5a0 796 error ("shared call_stmt:");
75a70cf9 797 debug_gimple_stmt (stmt);
9bfec7c2 798 error_found = true;
799 }
799c8711 800 if (!e->indirect_unknown_callee)
28454517 801 {
799c8711 802 if (e->callee->same_body_alias)
803 {
804 error ("edge points to same body alias:");
805 debug_tree (e->callee->decl);
806 error_found = true;
807 }
e748b31d 808#ifdef ENABLE_CHECKING
809 else if (!e->callee->global.inlined_to
799c8711 810 && decl
e748b31d 811 && cgraph_get_node (decl)
812 && (e->callee->former_clone_of
813 != cgraph_get_node (decl)->decl)
799c8711 814 && !clone_of_p (cgraph_node (decl),
815 e->callee))
816 {
817 error ("edge points to wrong declaration:");
818 debug_tree (e->callee->decl);
819 fprintf (stderr," Instead of:");
820 debug_tree (decl);
821 error_found = true;
822 }
e748b31d 823#endif
28454517 824 }
799c8711 825 else if (decl)
9bfec7c2 826 {
799c8711 827 error ("an indirect edge with unknown callee "
828 "corresponding to a call_stmt with "
829 "a known declaration:");
ee3f5fc0 830 error_found = true;
799c8711 831 debug_gimple_stmt (e->call_stmt);
9bfec7c2 832 }
833 e->aux = (void *)1;
834 }
799c8711 835 else if (decl)
9bfec7c2 836 {
0a81f5a0 837 error ("missing callgraph edge for call stmt:");
75a70cf9 838 debug_gimple_stmt (stmt);
9bfec7c2 839 error_found = true;
840 }
841 }
842 }
e27482aa 843 pointer_set_destroy (visited_nodes);
e27482aa 844 }
845 else
846 /* No CFG available?! */
847 gcc_unreachable ();
848
b0cdf642 849 for (e = node->callees; e; e = e->next_callee)
850 {
799c8711 851 if (!e->aux)
b0cdf642 852 {
0a81f5a0 853 error ("edge %s->%s has no corresponding call_stmt",
abd3e6b5 854 identifier_to_locale (cgraph_node_name (e->caller)),
855 identifier_to_locale (cgraph_node_name (e->callee)));
75a70cf9 856 debug_gimple_stmt (e->call_stmt);
b0cdf642 857 error_found = true;
858 }
859 e->aux = 0;
860 }
799c8711 861 for (e = node->indirect_calls; e; e = e->next_callee)
862 {
863 if (!e->aux)
864 {
865 error ("an indirect edge from %s has no corresponding call_stmt",
866 identifier_to_locale (cgraph_node_name (e->caller)));
867 debug_gimple_stmt (e->call_stmt);
868 error_found = true;
869 }
870 e->aux = 0;
871 }
b0cdf642 872 }
873 if (error_found)
874 {
875 dump_cgraph_node (stderr, node);
0a81f5a0 876 internal_error ("verify_cgraph_node failed");
b0cdf642 877 }
117ef3d7 878 set_cfun (saved_cfun);
b0cdf642 879 timevar_pop (TV_CGRAPH_VERIFY);
880}
881
882/* Verify whole cgraph structure. */
4b987fac 883DEBUG_FUNCTION void
b0cdf642 884verify_cgraph (void)
885{
886 struct cgraph_node *node;
887
852f689e 888 if (seen_error ())
8ec2a798 889 return;
890
b0cdf642 891 for (node = cgraph_nodes; node; node = node->next)
892 verify_cgraph_node (node);
893}
894
56af936e 895/* Output all asm statements we have stored up to be output. */
896
897static void
898cgraph_output_pending_asms (void)
899{
900 struct cgraph_asm_node *can;
901
852f689e 902 if (seen_error ())
56af936e 903 return;
904
905 for (can = cgraph_asm_nodes; can; can = can->next)
906 assemble_asm (can->asm_str);
907 cgraph_asm_nodes = NULL;
908}
909
0785e435 910/* Analyze the function scheduled to be output. */
bfec3452 911static void
0785e435 912cgraph_analyze_function (struct cgraph_node *node)
913{
bfec3452 914 tree save = current_function_decl;
0785e435 915 tree decl = node->decl;
916
ec1e35b2 917 current_function_decl = decl;
e27482aa 918 push_cfun (DECL_STRUCT_FUNCTION (decl));
bfec3452 919
6816d0c4 920 assign_assembler_name_if_neeeded (node->decl);
921
bfec3452 922 /* Make sure to gimplify bodies only once. During analyzing a
923 function we lower it, which will require gimplified nested
924 functions, so we can end up here with an already gimplified
925 body. */
926 if (!gimple_body (decl))
927 gimplify_function_tree (decl);
928 dump_function (TDI_generic, decl);
929
e27482aa 930 cgraph_lower_function (node);
6e8d6e86 931 node->analyzed = true;
0785e435 932
e27482aa 933 pop_cfun ();
bfec3452 934 current_function_decl = save;
0785e435 935}
936
05806473 937/* Look for externally_visible and used attributes and mark cgraph nodes
938 accordingly.
939
940 We cannot mark the nodes at the point the attributes are processed (in
941 handle_*_attribute) because the copy of the declarations available at that
942 point may not be canonical. For example, in:
943
944 void f();
945 void f() __attribute__((used));
946
947 the declaration we see in handle_used_attribute will be the second
948 declaration -- but the front end will subsequently merge that declaration
949 with the original declaration and discard the second declaration.
950
951 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
952
953 void f() {}
954 void f() __attribute__((externally_visible));
955
956 is valid.
957
958 So, we walk the nodes at the end of the translation unit, applying the
959 attributes at that point. */
960
961static void
962process_function_and_variable_attributes (struct cgraph_node *first,
1d416bd7 963 struct varpool_node *first_var)
05806473 964{
965 struct cgraph_node *node;
1d416bd7 966 struct varpool_node *vnode;
05806473 967
968 for (node = cgraph_nodes; node != first; node = node->next)
969 {
970 tree decl = node->decl;
83a23b05 971 if (DECL_PRESERVE_P (decl))
0b49f8f8 972 cgraph_mark_needed_node (node);
05806473 973 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
974 {
ba12ea31 975 if (! TREE_PUBLIC (node->decl))
712d2297 976 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
977 "%<externally_visible%>"
978 " attribute have effect only on public objects");
59dd4830 979 else if (node->local.finalized)
980 cgraph_mark_needed_node (node);
05806473 981 }
982 }
1d416bd7 983 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
05806473 984 {
985 tree decl = vnode->decl;
83a23b05 986 if (DECL_PRESERVE_P (decl))
05806473 987 {
22671757 988 vnode->force_output = true;
05806473 989 if (vnode->finalized)
1d416bd7 990 varpool_mark_needed_node (vnode);
05806473 991 }
992 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
993 {
ba12ea31 994 if (! TREE_PUBLIC (vnode->decl))
712d2297 995 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
996 "%<externally_visible%>"
997 " attribute have effect only on public objects");
59dd4830 998 else if (vnode->finalized)
999 varpool_mark_needed_node (vnode);
05806473 1000 }
1001 }
1002}
1003
aeeb194b 1004/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
1005 each reachable functions) and build cgraph.
1006 The function can be called multiple times after inserting new nodes
0d424440 1007 into beginning of queue. Just the new part of queue is re-scanned then. */
ae01b312 1008
aeeb194b 1009static void
1010cgraph_analyze_functions (void)
ae01b312 1011{
c1dcd13c 1012 /* Keep track of already processed nodes when called multiple times for
06b27565 1013 intermodule optimization. */
c1dcd13c 1014 static struct cgraph_node *first_analyzed;
c17d0de1 1015 struct cgraph_node *first_processed = first_analyzed;
1d416bd7 1016 static struct varpool_node *first_analyzed_var;
aeeb194b 1017 struct cgraph_node *node, *next;
ae01b312 1018
c17d0de1 1019 process_function_and_variable_attributes (first_processed,
1020 first_analyzed_var);
1021 first_processed = cgraph_nodes;
1d416bd7 1022 first_analyzed_var = varpool_nodes;
1023 varpool_analyze_pending_decls ();
f79b6507 1024 if (cgraph_dump_file)
ae01b312 1025 {
e4200070 1026 fprintf (cgraph_dump_file, "Initial entry points:");
c1dcd13c 1027 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 1028 if (node->needed)
f79b6507 1029 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1030 fprintf (cgraph_dump_file, "\n");
ae01b312 1031 }
aeeb194b 1032 cgraph_process_new_functions ();
ae01b312 1033
e6d2b2d8 1034 /* Propagate reachability flag and lower representation of all reachable
1035 functions. In the future, lowering will introduce new functions and
1036 new entry points on the way (by template instantiation and virtual
1037 method table generation for instance). */
3d7bfc56 1038 while (cgraph_nodes_queue)
ae01b312 1039 {
0785e435 1040 struct cgraph_edge *edge;
3d7bfc56 1041 tree decl = cgraph_nodes_queue->decl;
1042
1043 node = cgraph_nodes_queue;
d87976fb 1044 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 1045 node->next_needed = NULL;
ae01b312 1046
638531ad 1047 /* ??? It is possible to create extern inline function and later using
bbd5cba2 1048 weak alias attribute to kill its body. See
638531ad 1049 gcc.c-torture/compile/20011119-1.c */
75a70cf9 1050 if (!DECL_STRUCT_FUNCTION (decl))
9b8fb23a 1051 {
1052 cgraph_reset_node (node);
1053 continue;
1054 }
638531ad 1055
7bfefa9d 1056 if (!node->analyzed)
1057 cgraph_analyze_function (node);
2c0b522d 1058
ae01b312 1059 for (edge = node->callees; edge; edge = edge->next_callee)
0785e435 1060 if (!edge->callee->reachable)
2c0b522d 1061 cgraph_mark_reachable_node (edge->callee);
1062
61c2c7b1 1063 if (node->same_comdat_group)
1064 {
1065 for (next = node->same_comdat_group;
1066 next != node;
1067 next = next->same_comdat_group)
1068 cgraph_mark_reachable_node (next);
1069 }
1070
d544ceff 1071 /* If decl is a clone of an abstract function, mark that abstract
1072 function so that we don't release its body. The DECL_INITIAL() of that
1073 abstract function declaration will be later needed to output debug info. */
1074 if (DECL_ABSTRACT_ORIGIN (decl))
1075 {
1076 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
1077 origin_node->abstract_and_needed = true;
1078 }
1079
c17d0de1 1080 /* We finalize local static variables during constructing callgraph
1081 edges. Process their attributes too. */
1082 process_function_and_variable_attributes (first_processed,
1083 first_analyzed_var);
1084 first_processed = cgraph_nodes;
1d416bd7 1085 first_analyzed_var = varpool_nodes;
1086 varpool_analyze_pending_decls ();
aeeb194b 1087 cgraph_process_new_functions ();
ae01b312 1088 }
2c0b522d 1089
aa5e06c7 1090 /* Collect entry points to the unit. */
f79b6507 1091 if (cgraph_dump_file)
3d7bfc56 1092 {
e4200070 1093 fprintf (cgraph_dump_file, "Unit entry points:");
c1dcd13c 1094 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 1095 if (node->needed)
f79b6507 1096 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
e4200070 1097 fprintf (cgraph_dump_file, "\n\nInitial ");
0785e435 1098 dump_cgraph (cgraph_dump_file);
3d7bfc56 1099 }
e6d2b2d8 1100
f79b6507 1101 if (cgraph_dump_file)
1102 fprintf (cgraph_dump_file, "\nReclaiming functions:");
ae01b312 1103
f4ec5ce1 1104 for (node = cgraph_nodes; node != first_analyzed; node = next)
ae01b312 1105 {
1106 tree decl = node->decl;
f4ec5ce1 1107 next = node->next;
ae01b312 1108
1a1a827a 1109 if (node->local.finalized && !gimple_has_body_p (decl))
a0c938f0 1110 cgraph_reset_node (node);
9b8fb23a 1111
1a1a827a 1112 if (!node->reachable && gimple_has_body_p (decl))
ae01b312 1113 {
f79b6507 1114 if (cgraph_dump_file)
1115 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
b0cdf642 1116 cgraph_remove_node (node);
9b8fb23a 1117 continue;
ae01b312 1118 }
bc5cab3b 1119 else
1120 node->next_needed = NULL;
1a1a827a 1121 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
9b8fb23a 1122 gcc_assert (node->analyzed == node->local.finalized);
ae01b312 1123 }
f79b6507 1124 if (cgraph_dump_file)
e4200070 1125 {
1126 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1127 dump_cgraph (cgraph_dump_file);
1128 }
c1dcd13c 1129 first_analyzed = cgraph_nodes;
ae01b312 1130 ggc_collect ();
aeeb194b 1131}
1132
8f69fd82 1133
aeeb194b 1134/* Analyze the whole compilation unit once it is parsed completely. */
1135
1136void
1137cgraph_finalize_compilation_unit (void)
1138{
9929334e 1139 timevar_push (TV_CGRAPH);
1140
bfec3452 1141 /* Do not skip analyzing the functions if there were errors, we
1142 miss diagnostics for following functions otherwise. */
aeeb194b 1143
8f69fd82 1144 /* Emit size functions we didn't inline. */
4189e677 1145 finalize_size_functions ();
8f69fd82 1146
8f69fd82 1147 /* Call functions declared with the "constructor" or "destructor"
1148 attribute. */
1149 cgraph_build_cdtor_fns ();
aeeb194b 1150
9929334e 1151 /* Mark alias targets necessary and emit diagnostics. */
1152 finish_aliases_1 ();
1153
aeeb194b 1154 if (!quiet_flag)
1155 {
1156 fprintf (stderr, "\nAnalyzing compilation unit\n");
1157 fflush (stderr);
1158 }
1159
9929334e 1160 /* Gimplify and lower all functions, compute reachability and
1161 remove unreachable nodes. */
1162 cgraph_analyze_functions ();
1163
8f69fd82 1164 /* Mark alias targets necessary and emit diagnostics. */
1165 finish_aliases_1 ();
1166
9929334e 1167 /* Gimplify and lower thunks. */
aeeb194b 1168 cgraph_analyze_functions ();
bfec3452 1169
9929334e 1170 /* Finally drive the pass manager. */
bfec3452 1171 cgraph_optimize ();
9929334e 1172
1173 timevar_pop (TV_CGRAPH);
ae01b312 1174}
9ed5b1f5 1175
1176
ae01b312 1177/* Figure out what functions we want to assemble. */
1178
1179static void
d9d9733a 1180cgraph_mark_functions_to_output (void)
ae01b312 1181{
1182 struct cgraph_node *node;
61c2c7b1 1183#ifdef ENABLE_CHECKING
1184 bool check_same_comdat_groups = false;
1185
1186 for (node = cgraph_nodes; node; node = node->next)
1187 gcc_assert (!node->process);
1188#endif
ae01b312 1189
ae01b312 1190 for (node = cgraph_nodes; node; node = node->next)
1191 {
1192 tree decl = node->decl;
d7c6d889 1193 struct cgraph_edge *e;
a0c938f0 1194
61c2c7b1 1195 gcc_assert (!node->process || node->same_comdat_group);
1196 if (node->process)
1197 continue;
d7c6d889 1198
1199 for (e = node->callers; e; e = e->next_caller)
611e5405 1200 if (e->inline_failed)
d7c6d889 1201 break;
ae01b312 1202
e6d2b2d8 1203 /* We need to output all local functions that are used and not
1204 always inlined, as well as those that are reachable from
1205 outside the current compilation unit. */
1a1a827a 1206 if (node->analyzed
b0cdf642 1207 && !node->global.inlined_to
08843223 1208 && (node->needed || node->reachable_from_other_partition
cdedc740 1209 || node->address_taken
d7c6d889 1210 || (e && node->reachable))
4ee9c684 1211 && !TREE_ASM_WRITTEN (decl)
ae01b312 1212 && !DECL_EXTERNAL (decl))
61c2c7b1 1213 {
1214 node->process = 1;
1215 if (node->same_comdat_group)
1216 {
1217 struct cgraph_node *next;
1218 for (next = node->same_comdat_group;
1219 next != node;
1220 next = next->same_comdat_group)
1221 next->process = 1;
1222 }
1223 }
1224 else if (node->same_comdat_group)
1225 {
1226#ifdef ENABLE_CHECKING
1227 check_same_comdat_groups = true;
1228#endif
1229 }
cc636d56 1230 else
9cee7c3f 1231 {
1232 /* We should've reclaimed all functions that are not needed. */
1233#ifdef ENABLE_CHECKING
75a70cf9 1234 if (!node->global.inlined_to
1a1a827a 1235 && gimple_has_body_p (decl)
08843223 1236 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1237 are inside partition, we can end up not removing the body since we no longer
1238 have analyzed node pointing to it. */
1239 && !node->in_other_partition
9cee7c3f 1240 && !DECL_EXTERNAL (decl))
1241 {
1242 dump_cgraph_node (stderr, node);
1243 internal_error ("failed to reclaim unneeded function");
1244 }
1245#endif
75a70cf9 1246 gcc_assert (node->global.inlined_to
1a1a827a 1247 || !gimple_has_body_p (decl)
08843223 1248 || node->in_other_partition
9cee7c3f 1249 || DECL_EXTERNAL (decl));
1250
1251 }
a0c938f0 1252
961e3b13 1253 }
61c2c7b1 1254#ifdef ENABLE_CHECKING
1255 if (check_same_comdat_groups)
1256 for (node = cgraph_nodes; node; node = node->next)
1257 if (node->same_comdat_group && !node->process)
1258 {
1259 tree decl = node->decl;
1260 if (!node->global.inlined_to
1261 && gimple_has_body_p (decl)
08843223 1262 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1263 are inside partition, we can end up not removing the body since we no longer
1264 have analyzed node pointing to it. */
1265 && !node->in_other_partition
61c2c7b1 1266 && !DECL_EXTERNAL (decl))
1267 {
1268 dump_cgraph_node (stderr, node);
1269 internal_error ("failed to reclaim unneeded function");
1270 }
1271 }
1272#endif
961e3b13 1273}
1274
28454517 1275/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1276 in lowered gimple form.
1277
1278 Set current_function_decl and cfun to newly constructed empty function body.
1279 return basic block in the function body. */
1280
1281static basic_block
1282init_lowered_empty_function (tree decl)
1283{
1284 basic_block bb;
1285
1286 current_function_decl = decl;
1287 allocate_struct_function (decl, false);
1288 gimple_register_cfg_hooks ();
1289 init_empty_tree_cfg ();
1290 init_tree_ssa (cfun);
1291 init_ssa_operands ();
1292 cfun->gimple_df->in_ssa_p = true;
1293 DECL_INITIAL (decl) = make_node (BLOCK);
1294
1295 DECL_SAVED_TREE (decl) = error_mark_node;
1296 cfun->curr_properties |=
1297 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1298 PROP_ssa);
1299
1300 /* Create BB for body of the function and connect it properly. */
1301 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1302 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1303 make_edge (bb, EXIT_BLOCK_PTR, 0);
1304
1305 return bb;
1306}
1307
1308/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1309 offset indicated by VIRTUAL_OFFSET, if that is
1310 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1311 zero for a result adjusting thunk. */
1312
1313static tree
1314thunk_adjust (gimple_stmt_iterator * bsi,
1315 tree ptr, bool this_adjusting,
1316 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1317{
1318 gimple stmt;
1319 tree ret;
1320
55d6cb23 1321 if (this_adjusting
1322 && fixed_offset != 0)
28454517 1323 {
1324 stmt = gimple_build_assign (ptr,
1325 fold_build2_loc (input_location,
1326 POINTER_PLUS_EXPR,
1327 TREE_TYPE (ptr), ptr,
1328 size_int (fixed_offset)));
1329 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1330 }
1331
1332 /* If there's a virtual offset, look up that value in the vtable and
1333 adjust the pointer again. */
1334 if (virtual_offset)
1335 {
1336 tree vtabletmp;
1337 tree vtabletmp2;
1338 tree vtabletmp3;
1339 tree offsettmp;
1340
1341 if (!vtable_entry_type)
1342 {
1343 tree vfunc_type = make_node (FUNCTION_TYPE);
1344 TREE_TYPE (vfunc_type) = integer_type_node;
1345 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1346 layout_type (vfunc_type);
1347
1348 vtable_entry_type = build_pointer_type (vfunc_type);
1349 }
1350
1351 vtabletmp =
1352 create_tmp_var (build_pointer_type
1353 (build_pointer_type (vtable_entry_type)), "vptr");
1354
1355 /* The vptr is always at offset zero in the object. */
1356 stmt = gimple_build_assign (vtabletmp,
1357 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1358 ptr));
1359 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1360 mark_symbols_for_renaming (stmt);
1361 find_referenced_vars_in (stmt);
1362
1363 /* Form the vtable address. */
1364 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1365 "vtableaddr");
1366 stmt = gimple_build_assign (vtabletmp2,
1367 build1 (INDIRECT_REF,
1368 TREE_TYPE (vtabletmp2), vtabletmp));
1369 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1370 mark_symbols_for_renaming (stmt);
1371 find_referenced_vars_in (stmt);
1372
1373 /* Find the entry with the vcall offset. */
1374 stmt = gimple_build_assign (vtabletmp2,
1375 fold_build2_loc (input_location,
1376 POINTER_PLUS_EXPR,
1377 TREE_TYPE (vtabletmp2),
1378 vtabletmp2,
1379 fold_convert (sizetype,
1380 virtual_offset)));
1381 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1382
1383 /* Get the offset itself. */
1384 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1385 "vcalloffset");
1386 stmt = gimple_build_assign (vtabletmp3,
1387 build1 (INDIRECT_REF,
1388 TREE_TYPE (vtabletmp3),
1389 vtabletmp2));
1390 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1391 mark_symbols_for_renaming (stmt);
1392 find_referenced_vars_in (stmt);
1393
1394 /* Cast to sizetype. */
1395 offsettmp = create_tmp_var (sizetype, "offset");
1396 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1397 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1398 mark_symbols_for_renaming (stmt);
1399 find_referenced_vars_in (stmt);
1400
1401 /* Adjust the `this' pointer. */
1402 ptr = fold_build2_loc (input_location,
1403 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1404 offsettmp);
1405 }
1406
55d6cb23 1407 if (!this_adjusting
1408 && fixed_offset != 0)
28454517 1409 /* Adjust the pointer by the constant. */
1410 {
1411 tree ptrtmp;
1412
1413 if (TREE_CODE (ptr) == VAR_DECL)
1414 ptrtmp = ptr;
1415 else
1416 {
1417 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1418 stmt = gimple_build_assign (ptrtmp, ptr);
1419 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1420 mark_symbols_for_renaming (stmt);
1421 find_referenced_vars_in (stmt);
1422 }
1423 ptr = fold_build2_loc (input_location,
1424 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1425 size_int (fixed_offset));
1426 }
1427
1428 /* Emit the statement and gimplify the adjustment expression. */
1429 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1430 stmt = gimple_build_assign (ret, ptr);
1431 mark_symbols_for_renaming (stmt);
1432 find_referenced_vars_in (stmt);
1433 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1434
1435 return ret;
1436}
1437
1438/* Produce assembler for thunk NODE. */
1439
1440static void
1441assemble_thunk (struct cgraph_node *node)
1442{
1443 bool this_adjusting = node->thunk.this_adjusting;
1444 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1445 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1446 tree virtual_offset = NULL;
1447 tree alias = node->thunk.alias;
1448 tree thunk_fndecl = node->decl;
1449 tree a = DECL_ARGUMENTS (thunk_fndecl);
1450
1451 current_function_decl = thunk_fndecl;
1452
1453 if (this_adjusting
1454 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1455 virtual_value, alias))
1456 {
1457 const char *fnname;
1458 tree fn_block;
1459
1460 DECL_RESULT (thunk_fndecl)
1461 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1462 RESULT_DECL, 0, integer_type_node);
22ea3b47 1463 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
28454517 1464
1465 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1466 create one. */
1467 fn_block = make_node (BLOCK);
1468 BLOCK_VARS (fn_block) = a;
1469 DECL_INITIAL (thunk_fndecl) = fn_block;
1470 init_function_start (thunk_fndecl);
1471 cfun->is_thunk = 1;
1472 assemble_start_function (thunk_fndecl, fnname);
1473
1474 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1475 fixed_offset, virtual_value, alias);
1476
1477 assemble_end_function (thunk_fndecl, fnname);
1478 init_insn_lengths ();
1479 free_after_compilation (cfun);
1480 set_cfun (NULL);
1481 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1482 }
1483 else
1484 {
1485 tree restype;
1486 basic_block bb, then_bb, else_bb, return_bb;
1487 gimple_stmt_iterator bsi;
1488 int nargs = 0;
1489 tree arg;
1490 int i;
1491 tree resdecl;
1492 tree restmp = NULL;
1493 VEC(tree, heap) *vargs;
1494
1495 gimple call;
1496 gimple ret;
1497
1498 DECL_IGNORED_P (thunk_fndecl) = 1;
1499 bitmap_obstack_initialize (NULL);
1500
1501 if (node->thunk.virtual_offset_p)
1502 virtual_offset = size_int (virtual_value);
1503
1504 /* Build the return declaration for the function. */
1505 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1506 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1507 {
1508 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1509 DECL_ARTIFICIAL (resdecl) = 1;
1510 DECL_IGNORED_P (resdecl) = 1;
1511 DECL_RESULT (thunk_fndecl) = resdecl;
1512 }
1513 else
1514 resdecl = DECL_RESULT (thunk_fndecl);
1515
1516 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1517
1518 bsi = gsi_start_bb (bb);
1519
1520 /* Build call to the function being thunked. */
1521 if (!VOID_TYPE_P (restype))
1522 {
1523 if (!is_gimple_reg_type (restype))
1524 {
1525 restmp = resdecl;
1526 cfun->local_decls = tree_cons (NULL_TREE, restmp, cfun->local_decls);
1527 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1528 }
1529 else
1530 restmp = create_tmp_var_raw (restype, "retval");
1531 }
1532
1533 for (arg = a; arg; arg = TREE_CHAIN (arg))
1534 nargs++;
1535 vargs = VEC_alloc (tree, heap, nargs);
1536 if (this_adjusting)
1537 VEC_quick_push (tree, vargs,
1538 thunk_adjust (&bsi,
1539 a, 1, fixed_offset,
1540 virtual_offset));
1541 else
1542 VEC_quick_push (tree, vargs, a);
1543 for (i = 1, arg = TREE_CHAIN (a); i < nargs; i++, arg = TREE_CHAIN (arg))
1544 VEC_quick_push (tree, vargs, arg);
1545 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1546 VEC_free (tree, heap, vargs);
1547 gimple_call_set_cannot_inline (call, true);
1548 gimple_call_set_from_thunk (call, true);
1549 if (restmp)
1550 gimple_call_set_lhs (call, restmp);
1551 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1552 mark_symbols_for_renaming (call);
1553 find_referenced_vars_in (call);
1554 update_stmt (call);
1555
1556 if (restmp && !this_adjusting)
1557 {
57ab8ec3 1558 tree true_label = NULL_TREE;
28454517 1559
1560 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1561 {
1562 gimple stmt;
1563 /* If the return type is a pointer, we need to
1564 protect against NULL. We know there will be an
1565 adjustment, because that's why we're emitting a
1566 thunk. */
1567 then_bb = create_basic_block (NULL, (void *) 0, bb);
1568 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1569 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1570 remove_edge (single_succ_edge (bb));
1571 true_label = gimple_block_label (then_bb);
28454517 1572 stmt = gimple_build_cond (NE_EXPR, restmp,
1573 fold_convert (TREE_TYPE (restmp),
1574 integer_zero_node),
1575 NULL_TREE, NULL_TREE);
1576 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1577 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1578 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1579 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1580 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1581 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1582 bsi = gsi_last_bb (then_bb);
1583 }
1584
1585 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1586 fixed_offset, virtual_offset);
1587 if (true_label)
1588 {
1589 gimple stmt;
1590 bsi = gsi_last_bb (else_bb);
1591 stmt = gimple_build_assign (restmp, fold_convert (TREE_TYPE (restmp),
1592 integer_zero_node));
1593 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1594 bsi = gsi_last_bb (return_bb);
1595 }
1596 }
1597 else
1598 gimple_call_set_tail (call, true);
1599
1600 /* Build return value. */
1601 ret = gimple_build_return (restmp);
1602 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1603
1604 delete_unreachable_blocks ();
1605 update_ssa (TODO_update_ssa);
1606
1607 cgraph_remove_same_body_alias (node);
1608 /* Since we want to emit the thunk, we explicitly mark its name as
1609 referenced. */
28454517 1610 cgraph_add_new_function (thunk_fndecl, true);
1611 bitmap_obstack_release (NULL);
1612 }
1613 current_function_decl = NULL;
1614}
1615
ae01b312 1616/* Expand function specified by NODE. */
e6d2b2d8 1617
ae01b312 1618static void
d9d9733a 1619cgraph_expand_function (struct cgraph_node *node)
ae01b312 1620{
1621 tree decl = node->decl;
1622
b0cdf642 1623 /* We ought to not compile any inline clones. */
cc636d56 1624 gcc_assert (!node->global.inlined_to);
b0cdf642 1625
6329636b 1626 announce_function (decl);
09fc9532 1627 node->process = 0;
961e3b13 1628
e7c352d1 1629 gcc_assert (node->lowered);
f8deefc1 1630
794da2bb 1631 /* Generate RTL for the body of DECL. */
84e10000 1632 tree_rest_of_compilation (decl);
961e3b13 1633
4ee9c684 1634 /* Make sure that BE didn't give up on compiling. */
c04e3894 1635 gcc_assert (TREE_ASM_WRITTEN (decl));
ae01b312 1636 current_function_decl = NULL;
ed772161 1637 if (node->same_body)
1638 {
28454517 1639 struct cgraph_node *alias, *next;
ed772161 1640 bool saved_alias = node->alias;
28454517 1641 for (alias = node->same_body;
1642 alias && alias->next; alias = alias->next)
1643 ;
1644 /* Walk aliases in the order they were created; it is possible that
1645 thunks reffers to the aliases made earlier. */
1646 for (; alias; alias = next)
1647 {
1648 next = alias->previous;
1649 if (!alias->thunk.thunk_p)
1650 assemble_alias (alias->decl,
1651 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1652 else
1653 assemble_thunk (alias);
1654 }
ed772161 1655 node->alias = saved_alias;
1656 }
1a1a827a 1657 gcc_assert (!cgraph_preserve_function_body_p (decl));
1658 cgraph_release_function_body (node);
1659 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1660 points to the dead function body. */
1661 cgraph_node_remove_callees (node);
e1be32b8 1662
1663 cgraph_function_flags_ready = true;
ae01b312 1664}
1665
b0cdf642 1666/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
d7c6d889 1667
1668bool
326a9581 1669cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
d7c6d889 1670{
b0cdf642 1671 *reason = e->inline_failed;
1672 return !e->inline_failed;
d7c6d889 1673}
b0cdf642 1674
acc70efa 1675
acc70efa 1676
d9d9733a 1677/* Expand all functions that must be output.
1678
d7c6d889 1679 Attempt to topologically sort the nodes so function is output when
1680 all called functions are already assembled to allow data to be
91c82c20 1681 propagated across the callgraph. Use a stack to get smaller distance
3927afe0 1682 between a function and its callees (later we may choose to use a more
d7c6d889 1683 sophisticated algorithm for function reordering; we will likely want
1684 to use subsections to make the output functions appear in top-down
1685 order). */
1686
1687static void
a6868229 1688cgraph_expand_all_functions (void)
d7c6d889 1689{
1690 struct cgraph_node *node;
4c36ffe6 1691 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
c04e3894 1692 int order_pos, new_order_pos = 0;
d7c6d889 1693 int i;
1694
d7c6d889 1695 order_pos = cgraph_postorder (order);
cc636d56 1696 gcc_assert (order_pos == cgraph_n_nodes);
d7c6d889 1697
7bd28bba 1698 /* Garbage collector may remove inline clones we eliminate during
b0cdf642 1699 optimization. So we must be sure to not reference them. */
1700 for (i = 0; i < order_pos; i++)
09fc9532 1701 if (order[i]->process)
b0cdf642 1702 order[new_order_pos++] = order[i];
1703
1704 for (i = new_order_pos - 1; i >= 0; i--)
d7c6d889 1705 {
1706 node = order[i];
09fc9532 1707 if (node->process)
d7c6d889 1708 {
cc636d56 1709 gcc_assert (node->reachable);
09fc9532 1710 node->process = 0;
d7c6d889 1711 cgraph_expand_function (node);
1712 }
1713 }
523c1122 1714 cgraph_process_new_functions ();
773c5ba7 1715
d7c6d889 1716 free (order);
773c5ba7 1717
d7c6d889 1718}
1719
56af936e 1720/* This is used to sort the node types by the cgraph order number. */
1721
0b09525f 1722enum cgraph_order_sort_kind
1723{
1724 ORDER_UNDEFINED = 0,
1725 ORDER_FUNCTION,
1726 ORDER_VAR,
1727 ORDER_ASM
1728};
1729
56af936e 1730struct cgraph_order_sort
1731{
0b09525f 1732 enum cgraph_order_sort_kind kind;
56af936e 1733 union
1734 {
1735 struct cgraph_node *f;
1d416bd7 1736 struct varpool_node *v;
56af936e 1737 struct cgraph_asm_node *a;
1738 } u;
1739};
1740
1741/* Output all functions, variables, and asm statements in the order
1742 according to their order fields, which is the order in which they
1743 appeared in the file. This implements -fno-toplevel-reorder. In
1744 this mode we may output functions and variables which don't really
1745 need to be output. */
1746
1747static void
1748cgraph_output_in_order (void)
1749{
1750 int max;
56af936e 1751 struct cgraph_order_sort *nodes;
1752 int i;
1753 struct cgraph_node *pf;
1d416bd7 1754 struct varpool_node *pv;
56af936e 1755 struct cgraph_asm_node *pa;
1756
1757 max = cgraph_order;
3e1cde87 1758 nodes = XCNEWVEC (struct cgraph_order_sort, max);
56af936e 1759
1d416bd7 1760 varpool_analyze_pending_decls ();
56af936e 1761
1762 for (pf = cgraph_nodes; pf; pf = pf->next)
1763 {
09fc9532 1764 if (pf->process)
56af936e 1765 {
1766 i = pf->order;
1767 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1768 nodes[i].kind = ORDER_FUNCTION;
1769 nodes[i].u.f = pf;
1770 }
1771 }
1772
1d416bd7 1773 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
56af936e 1774 {
1775 i = pv->order;
1776 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1777 nodes[i].kind = ORDER_VAR;
1778 nodes[i].u.v = pv;
1779 }
1780
1781 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1782 {
1783 i = pa->order;
1784 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1785 nodes[i].kind = ORDER_ASM;
1786 nodes[i].u.a = pa;
1787 }
56af936e 1788
304e5318 1789 /* In toplevel reorder mode we output all statics; mark them as needed. */
1790 for (i = 0; i < max; ++i)
1791 {
1792 if (nodes[i].kind == ORDER_VAR)
1793 {
1794 varpool_mark_needed_node (nodes[i].u.v);
1795 }
1796 }
1797 varpool_empty_needed_queue ();
1798
56af936e 1799 for (i = 0; i < max; ++i)
1800 {
1801 switch (nodes[i].kind)
1802 {
1803 case ORDER_FUNCTION:
09fc9532 1804 nodes[i].u.f->process = 0;
56af936e 1805 cgraph_expand_function (nodes[i].u.f);
1806 break;
1807
1808 case ORDER_VAR:
1d416bd7 1809 varpool_assemble_decl (nodes[i].u.v);
56af936e 1810 break;
1811
1812 case ORDER_ASM:
1813 assemble_asm (nodes[i].u.a->asm_str);
1814 break;
1815
1816 case ORDER_UNDEFINED:
1817 break;
1818
1819 default:
1820 gcc_unreachable ();
1821 }
1822 }
4b4ea2db 1823
1824 cgraph_asm_nodes = NULL;
3e1cde87 1825 free (nodes);
56af936e 1826}
1827
b0cdf642 1828/* Return true when function body of DECL still needs to be kept around
1829 for later re-use. */
1830bool
1831cgraph_preserve_function_body_p (tree decl)
1832{
1833 struct cgraph_node *node;
8d8c4c8d 1834
1835 gcc_assert (cgraph_global_info_ready);
b0cdf642 1836 /* Look if there is any clone around. */
ccf4ab6b 1837 node = cgraph_node (decl);
1838 if (node->clones)
1839 return true;
b0cdf642 1840 return false;
1841}
1842
77fce4cd 1843static void
1844ipa_passes (void)
1845{
87d4aa85 1846 set_cfun (NULL);
4b14adf9 1847 current_function_decl = NULL;
75a70cf9 1848 gimple_register_cfg_hooks ();
77fce4cd 1849 bitmap_obstack_initialize (NULL);
59dd4830 1850
c9036234 1851 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1852
59dd4830 1853 if (!in_lto_p)
1854 execute_ipa_pass_list (all_small_ipa_passes);
9ed5b1f5 1855
7bfefa9d 1856 /* If pass_all_early_optimizations was not scheduled, the state of
1857 the cgraph will not be properly updated. Update it now. */
1858 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1859 cgraph_state = CGRAPH_STATE_IPA_SSA;
9ed5b1f5 1860
7bfefa9d 1861 if (!in_lto_p)
1862 {
1863 /* Generate coverage variables and constructors. */
1864 coverage_finish ();
1865
1866 /* Process new functions added. */
1867 set_cfun (NULL);
1868 current_function_decl = NULL;
1869 cgraph_process_new_functions ();
7bfefa9d 1870
c9036234 1871 execute_ipa_summary_passes
1872 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
8867b500 1873 }
23433d72 1874
1875 /* Some targets need to handle LTO assembler output specially. */
1876 if (flag_generate_lto)
1877 targetm.asm_out.lto_start ();
1878
7bfefa9d 1879 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1880
1881 if (!in_lto_p)
1882 ipa_write_summaries ();
1883
23433d72 1884 if (flag_generate_lto)
1885 targetm.asm_out.lto_end ();
1886
8867b500 1887 if (!flag_ltrans)
1888 execute_ipa_pass_list (all_regular_ipa_passes);
c9036234 1889 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
9ed5b1f5 1890
77fce4cd 1891 bitmap_obstack_release (NULL);
1892}
1893
34e5cced 1894
ae01b312 1895/* Perform simple optimizations based on callgraph. */
1896
7bfefa9d 1897void
d9d9733a 1898cgraph_optimize (void)
ae01b312 1899{
852f689e 1900 if (seen_error ())
cb2b5570 1901 return;
1902
b0cdf642 1903#ifdef ENABLE_CHECKING
1904 verify_cgraph ();
1905#endif
a861fe52 1906
c1dcd13c 1907 /* Frontend may output common variables after the unit has been finalized.
1908 It is safe to deal with them here as they are always zero initialized. */
1d416bd7 1909 varpool_analyze_pending_decls ();
e9f08e82 1910
f79b6507 1911 timevar_push (TV_CGRAPHOPT);
51949610 1912 if (pre_ipa_mem_report)
1913 {
1914 fprintf (stderr, "Memory consumption before IPA\n");
1915 dump_memory_report (false);
1916 }
d7c6d889 1917 if (!quiet_flag)
cd6bca02 1918 fprintf (stderr, "Performing interprocedural optimizations\n");
523c1122 1919 cgraph_state = CGRAPH_STATE_IPA;
c04e3894 1920
be4d0974 1921 /* Don't run the IPA passes if there was any error or sorry messages. */
852f689e 1922 if (!seen_error ())
be4d0974 1923 ipa_passes ();
1924
34e5cced 1925 /* Do nothing else if any IPA pass found errors. */
852f689e 1926 if (seen_error ())
021c1c18 1927 {
1928 timevar_pop (TV_CGRAPHOPT);
1929 return;
1930 }
34e5cced 1931
e1be32b8 1932 /* This pass remove bodies of extern inline functions we never inlined.
1933 Do this later so other IPA passes see what is really going on. */
1934 cgraph_remove_unreachable_nodes (false, dump_file);
80a85d8a 1935 cgraph_global_info_ready = true;
f79b6507 1936 if (cgraph_dump_file)
1937 {
e4200070 1938 fprintf (cgraph_dump_file, "Optimized ");
f79b6507 1939 dump_cgraph (cgraph_dump_file);
c1dcd13c 1940 dump_varpool (cgraph_dump_file);
f79b6507 1941 }
51949610 1942 if (post_ipa_mem_report)
1943 {
defa2fa6 1944 fprintf (stderr, "Memory consumption after IPA\n");
51949610 1945 dump_memory_report (false);
1946 }
f79b6507 1947 timevar_pop (TV_CGRAPHOPT);
ae01b312 1948
d7c6d889 1949 /* Output everything. */
47306a5d 1950 (*debug_hooks->assembly_start) ();
e4200070 1951 if (!quiet_flag)
1952 fprintf (stderr, "Assembling functions:\n");
b0cdf642 1953#ifdef ENABLE_CHECKING
1954 verify_cgraph ();
1955#endif
56af936e 1956
ccf4ab6b 1957 cgraph_materialize_all_clones ();
acc70efa 1958 cgraph_mark_functions_to_output ();
c1dcd13c 1959
523c1122 1960 cgraph_state = CGRAPH_STATE_EXPANSION;
56af936e 1961 if (!flag_toplevel_reorder)
1962 cgraph_output_in_order ();
1963 else
1964 {
1965 cgraph_output_pending_asms ();
1966
1967 cgraph_expand_all_functions ();
1d416bd7 1968 varpool_remove_unreferenced_decls ();
56af936e 1969
1d416bd7 1970 varpool_assemble_pending_decls ();
56af936e 1971 }
523c1122 1972 cgraph_process_new_functions ();
1973 cgraph_state = CGRAPH_STATE_FINISHED;
c1dcd13c 1974
f79b6507 1975 if (cgraph_dump_file)
1976 {
e4200070 1977 fprintf (cgraph_dump_file, "\nFinal ");
f79b6507 1978 dump_cgraph (cgraph_dump_file);
1979 }
b0cdf642 1980#ifdef ENABLE_CHECKING
1981 verify_cgraph ();
4ee9c684 1982 /* Double check that all inline clones are gone and that all
1983 function bodies have been released from memory. */
852f689e 1984 if (!seen_error ())
4ee9c684 1985 {
1986 struct cgraph_node *node;
1987 bool error_found = false;
1988
1989 for (node = cgraph_nodes; node; node = node->next)
1990 if (node->analyzed
1991 && (node->global.inlined_to
1a1a827a 1992 || gimple_has_body_p (node->decl)))
4ee9c684 1993 {
1994 error_found = true;
1995 dump_cgraph_node (stderr, node);
a0c938f0 1996 }
4ee9c684 1997 if (error_found)
c04e3894 1998 internal_error ("nodes with unreleased memory found");
4ee9c684 1999 }
b0cdf642 2000#endif
ae01b312 2001}
34e5cced 2002
2003
2c56f72e 2004/* Generate and emit a static constructor or destructor. WHICH must
2005 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
2006 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
f0b5f617 2007 initialization priority for this constructor or destructor. */
b5530559 2008
2009void
c5344b58 2010cgraph_build_static_cdtor (char which, tree body, int priority)
b5530559 2011{
2012 static int counter = 0;
2013 char which_buf[16];
540edea7 2014 tree decl, name, resdecl;
b5530559 2015
2c56f72e 2016 /* The priority is encoded in the constructor or destructor name.
2017 collect2 will sort the names and arrange that they are called at
2018 program startup. */
2019 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
db85cc4f 2020 name = get_file_function_name (which_buf);
b5530559 2021
e60a6f7b 2022 decl = build_decl (input_location, FUNCTION_DECL, name,
b5530559 2023 build_function_type (void_type_node, void_list_node));
2024 current_function_decl = decl;
2025
e60a6f7b 2026 resdecl = build_decl (input_location,
2027 RESULT_DECL, NULL_TREE, void_type_node);
540edea7 2028 DECL_ARTIFICIAL (resdecl) = 1;
540edea7 2029 DECL_RESULT (decl) = resdecl;
8e5b4ed6 2030 DECL_CONTEXT (resdecl) = decl;
540edea7 2031
80f2ef47 2032 allocate_struct_function (decl, false);
b5530559 2033
2034 TREE_STATIC (decl) = 1;
2035 TREE_USED (decl) = 1;
2036 DECL_ARTIFICIAL (decl) = 1;
b5530559 2037 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
2038 DECL_SAVED_TREE (decl) = body;
b1be8e04 2039 if (!targetm.have_ctors_dtors)
2040 {
2041 TREE_PUBLIC (decl) = 1;
2042 DECL_PRESERVE_P (decl) = 1;
2043 }
b5530559 2044 DECL_UNINLINABLE (decl) = 1;
2045
2046 DECL_INITIAL (decl) = make_node (BLOCK);
2047 TREE_USED (DECL_INITIAL (decl)) = 1;
2048
2049 DECL_SOURCE_LOCATION (decl) = input_location;
2050 cfun->function_end_locus = input_location;
2051
cc636d56 2052 switch (which)
2053 {
2054 case 'I':
2055 DECL_STATIC_CONSTRUCTOR (decl) = 1;
64c2e9b0 2056 decl_init_priority_insert (decl, priority);
cc636d56 2057 break;
2058 case 'D':
2059 DECL_STATIC_DESTRUCTOR (decl) = 1;
64c2e9b0 2060 decl_fini_priority_insert (decl, priority);
cc636d56 2061 break;
2062 default:
2063 gcc_unreachable ();
2064 }
b5530559 2065
2066 gimplify_function_tree (decl);
2067
523c1122 2068 cgraph_add_new_function (decl, false);
2069 cgraph_mark_needed_node (cgraph_node (decl));
e3a37aef 2070 set_cfun (NULL);
b5530559 2071}
121f3051 2072
2073void
2074init_cgraph (void)
2075{
2076 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
2077}
b5d36404 2078
a0c938f0 2079/* The edges representing the callers of the NEW_VERSION node were
b5d36404 2080 fixed by cgraph_function_versioning (), now the call_expr in their
2081 respective tree code should be updated to call the NEW_VERSION. */
2082
2083static void
2084update_call_expr (struct cgraph_node *new_version)
2085{
2086 struct cgraph_edge *e;
2087
2088 gcc_assert (new_version);
75a70cf9 2089
2090 /* Update the call expr on the edges to call the new version. */
b5d36404 2091 for (e = new_version->callers; e; e = e->next_caller)
e03a95e7 2092 {
2093 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2094 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
e38def9c 2095 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
e03a95e7 2096 }
b5d36404 2097}
2098
2099
2100/* Create a new cgraph node which is the new version of
2101 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2102 edges which should be redirected to point to
2103 NEW_VERSION. ALL the callees edges of OLD_VERSION
2104 are cloned to the new version node. Return the new
b06ab5fa 2105 version node.
2106
2107 If non-NULL BLOCK_TO_COPY determine what basic blocks
2108 was copied to prevent duplications of calls that are dead
2109 in the clone. */
b5d36404 2110
2111static struct cgraph_node *
2112cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
4460a647 2113 tree new_decl,
b06ab5fa 2114 VEC(cgraph_edge_p,heap) *redirect_callers,
2115 bitmap bbs_to_copy)
2116 {
b5d36404 2117 struct cgraph_node *new_version;
32936803 2118 struct cgraph_edge *e;
b5d36404 2119 unsigned i;
2120
2121 gcc_assert (old_version);
a0c938f0 2122
b5d36404 2123 new_version = cgraph_node (new_decl);
2124
2125 new_version->analyzed = true;
2126 new_version->local = old_version->local;
a70a5e2c 2127 new_version->local.externally_visible = false;
2128 new_version->local.local = true;
2129 new_version->local.vtable_method = false;
b5d36404 2130 new_version->global = old_version->global;
2131 new_version->rtl = new_version->rtl;
2132 new_version->reachable = true;
2133 new_version->count = old_version->count;
2134
a70a5e2c 2135 for (e = old_version->callees; e; e=e->next_callee)
b06ab5fa 2136 if (!bbs_to_copy
2137 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2138 cgraph_clone_edge (e, new_version, e->call_stmt,
2139 e->lto_stmt_uid, REG_BR_PROB_BASE,
2140 CGRAPH_FREQ_BASE,
2141 e->loop_nest, true);
a70a5e2c 2142 for (e = old_version->indirect_calls; e; e=e->next_callee)
b06ab5fa 2143 if (!bbs_to_copy
2144 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2145 cgraph_clone_edge (e, new_version, e->call_stmt,
2146 e->lto_stmt_uid, REG_BR_PROB_BASE,
2147 CGRAPH_FREQ_BASE,
2148 e->loop_nest, true);
4460a647 2149 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
2150 {
2151 /* Redirect calls to the old version node to point to its new
2152 version. */
2153 cgraph_redirect_edge_callee (e, new_version);
2154 }
b5d36404 2155
2156 return new_version;
2157 }
2158
2159 /* Perform function versioning.
a0c938f0 2160 Function versioning includes copying of the tree and
b5d36404 2161 a callgraph update (creating a new cgraph node and updating
2162 its callees and callers).
2163
2164 REDIRECT_CALLERS varray includes the edges to be redirected
2165 to the new version.
2166
2167 TREE_MAP is a mapping of tree nodes we want to replace with
2168 new ones (according to results of prior analysis).
2169 OLD_VERSION_NODE is the node that is versioned.
48e1416a 2170 It returns the new version's cgraph node.
b06ab5fa 2171 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2172 from new version.
2173 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
2174 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
b5d36404 2175
2176struct cgraph_node *
2177cgraph_function_versioning (struct cgraph_node *old_version_node,
4460a647 2178 VEC(cgraph_edge_p,heap) *redirect_callers,
ccf4ab6b 2179 VEC (ipa_replace_map_p,gc)* tree_map,
a70a5e2c 2180 bitmap args_to_skip,
b06ab5fa 2181 bitmap bbs_to_copy,
2182 basic_block new_entry_block,
a70a5e2c 2183 const char *clone_name)
b5d36404 2184{
2185 tree old_decl = old_version_node->decl;
2186 struct cgraph_node *new_version_node = NULL;
2187 tree new_decl;
2188
2189 if (!tree_versionable_function_p (old_decl))
2190 return NULL;
2191
2192 /* Make a new FUNCTION_DECL tree node for the
2193 new version. */
5afe38fe 2194 if (!args_to_skip)
2195 new_decl = copy_node (old_decl);
2196 else
2197 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
b5d36404 2198
df0b8dfb 2199 cgraph_make_decl_local (new_decl);
2200 /* Generate a new name for the new version. */
2201 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2202 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2203 SET_DECL_RTL (new_decl, NULL);
2204
b5d36404 2205 /* Create the new version's call-graph node.
2206 and update the edges of the new node. */
2207 new_version_node =
2208 cgraph_copy_node_for_versioning (old_version_node, new_decl,
b06ab5fa 2209 redirect_callers, bbs_to_copy);
b5d36404 2210
2211 /* Copy the OLD_VERSION_NODE function tree to the new version. */
b06ab5fa 2212 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2213 bbs_to_copy, new_entry_block);
b5d36404 2214
a0c938f0 2215 /* Update the new version's properties.
e03a95e7 2216 Make The new version visible only within this translation unit. Make sure
2217 that is not weak also.
a0c938f0 2218 ??? We cannot use COMDAT linkage because there is no
b5d36404 2219 ABI support for this. */
6137cc9f 2220 cgraph_make_decl_local (new_version_node->decl);
f014e39d 2221 DECL_VIRTUAL_P (new_version_node->decl) = 0;
b5d36404 2222 new_version_node->local.externally_visible = 0;
2223 new_version_node->local.local = 1;
2224 new_version_node->lowered = true;
f014e39d 2225
e03a95e7 2226 /* Update the call_expr on the edges to call the new version node. */
2227 update_call_expr (new_version_node);
48e1416a 2228
50828ed8 2229 cgraph_call_function_insertion_hooks (new_version_node);
b5d36404 2230 return new_version_node;
2231}
469679ab 2232
2233/* Produce separate function body for inline clones so the offline copy can be
2234 modified without affecting them. */
2235struct cgraph_node *
2236save_inline_function_body (struct cgraph_node *node)
2237{
ccf4ab6b 2238 struct cgraph_node *first_clone, *n;
469679ab 2239
2240 gcc_assert (node == cgraph_node (node->decl));
2241
2242 cgraph_lower_function (node);
2243
ccf4ab6b 2244 first_clone = node->clones;
469679ab 2245
2246 first_clone->decl = copy_node (node->decl);
469679ab 2247 cgraph_insert_node_to_hashtable (first_clone);
2248 gcc_assert (first_clone == cgraph_node (first_clone->decl));
ccf4ab6b 2249 if (first_clone->next_sibling_clone)
2250 {
2251 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2252 n->clone_of = first_clone;
2253 n->clone_of = first_clone;
2254 n->next_sibling_clone = first_clone->clones;
2255 if (first_clone->clones)
2256 first_clone->clones->prev_sibling_clone = n;
2257 first_clone->clones = first_clone->next_sibling_clone;
2258 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2259 first_clone->next_sibling_clone = NULL;
2260 gcc_assert (!first_clone->prev_sibling_clone);
2261 }
2262 first_clone->clone_of = NULL;
2263 node->clones = NULL;
2264
2265 if (first_clone->clones)
2266 for (n = first_clone->clones; n != first_clone;)
2267 {
2268 gcc_assert (n->decl == node->decl);
2269 n->decl = first_clone->decl;
2270 if (n->clones)
2271 n = n->clones;
2272 else if (n->next_sibling_clone)
2273 n = n->next_sibling_clone;
2274 else
2275 {
2276 while (n != first_clone && !n->next_sibling_clone)
2277 n = n->clone_of;
2278 if (n != first_clone)
2279 n = n->next_sibling_clone;
2280 }
2281 }
469679ab 2282
2283 /* Copy the OLD_VERSION_NODE function tree to the new version. */
b06ab5fa 2284 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL,
2285 NULL, NULL);
469679ab 2286
2287 DECL_EXTERNAL (first_clone->decl) = 0;
ecd88073 2288 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
469679ab 2289 TREE_PUBLIC (first_clone->decl) = 0;
2290 DECL_COMDAT (first_clone->decl) = 0;
7fe9b425 2291 VEC_free (ipa_opt_pass, heap,
6d1cc52c 2292 first_clone->ipa_transforms_to_apply);
2293 first_clone->ipa_transforms_to_apply = NULL;
469679ab 2294
469679ab 2295#ifdef ENABLE_CHECKING
2296 verify_cgraph_node (first_clone);
2297#endif
2298 return first_clone;
2299}
a861fe52 2300
ccf4ab6b 2301/* Given virtual clone, turn it into actual clone. */
2302static void
2303cgraph_materialize_clone (struct cgraph_node *node)
2304{
2305 bitmap_obstack_initialize (NULL);
e748b31d 2306#ifdef ENABLE_CHECKING
2307 node->former_clone_of = node->clone_of->decl;
2308 if (node->clone_of->former_clone_of)
2309 node->former_clone_of = node->clone_of->former_clone_of;
2310#endif
ccf4ab6b 2311 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2312 tree_function_versioning (node->clone_of->decl, node->decl,
2313 node->clone.tree_map, true,
b06ab5fa 2314 node->clone.args_to_skip, NULL, NULL);
e20422ea 2315 if (cgraph_dump_file)
2316 {
2317 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2318 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2319 }
ccf4ab6b 2320
2321 /* Function is no longer clone. */
2322 if (node->next_sibling_clone)
2323 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2324 if (node->prev_sibling_clone)
2325 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2326 else
2327 node->clone_of->clones = node->next_sibling_clone;
2328 node->next_sibling_clone = NULL;
2329 node->prev_sibling_clone = NULL;
6d1cc52c 2330 if (!node->clone_of->analyzed && !node->clone_of->clones)
7d6a1ec8 2331 {
2332 cgraph_release_function_body (node->clone_of);
2333 cgraph_node_remove_callees (node->clone_of);
2334 ipa_remove_all_references (&node->clone_of->ref_list);
2335 }
ccf4ab6b 2336 node->clone_of = NULL;
2337 bitmap_obstack_release (NULL);
2338}
2339
c596d830 2340/* If necessary, change the function declaration in the call statement
2341 associated with E so that it corresponds to the edge callee. */
2342
2343gimple
2344cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2345{
2346 tree decl = gimple_call_fndecl (e->call_stmt);
2347 gimple new_stmt;
2348 gimple_stmt_iterator gsi;
1f449108 2349#ifdef ENABLE_CHECKING
2350 struct cgraph_node *node;
2351#endif
c596d830 2352
2353 if (!decl || decl == e->callee->decl
2354 /* Don't update call from same body alias to the real function. */
2355 || cgraph_get_node (decl) == cgraph_get_node (e->callee->decl))
2356 return e->call_stmt;
2357
1f449108 2358#ifdef ENABLE_CHECKING
2359 node = cgraph_get_node (decl);
2360 gcc_assert (!node || !node->clone.combined_args_to_skip);
2361#endif
e748b31d 2362
c596d830 2363 if (cgraph_dump_file)
2364 {
2365 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2366 cgraph_node_name (e->caller), e->caller->uid,
2367 cgraph_node_name (e->callee), e->callee->uid);
2368 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
e748b31d 2369 if (e->callee->clone.combined_args_to_skip)
2370 {
2371 fprintf (cgraph_dump_file, " combined args to skip: ");
2372 dump_bitmap (cgraph_dump_file, e->callee->clone.combined_args_to_skip);
2373 }
c596d830 2374 }
2375
2376 if (e->callee->clone.combined_args_to_skip)
2377 new_stmt = gimple_call_copy_skip_args (e->call_stmt,
2378 e->callee->clone.combined_args_to_skip);
2379 else
2380 new_stmt = e->call_stmt;
2381 if (gimple_vdef (new_stmt)
2382 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2383 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2384 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2385
2386 gsi = gsi_for_stmt (e->call_stmt);
2387 gsi_replace (&gsi, new_stmt, true);
e32916b6 2388 update_stmt (new_stmt);
c596d830 2389
2390 /* Update EH information too, just in case. */
2391 maybe_clean_or_replace_eh_stmt (e->call_stmt, new_stmt);
2392
2393 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2394
2395 if (cgraph_dump_file)
2396 {
2397 fprintf (cgraph_dump_file, " updated to:");
2398 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2399 }
2400 return new_stmt;
2401}
2402
ccf4ab6b 2403/* Once all functions from compilation unit are in memory, produce all clones
c596d830 2404 and update all calls. We might also do this on demand if we don't want to
2405 bring all functions to memory prior compilation, but current WHOPR
2406 implementation does that and it is is bit easier to keep everything right in
2407 this order. */
ccf4ab6b 2408void
2409cgraph_materialize_all_clones (void)
2410{
2411 struct cgraph_node *node;
2412 bool stabilized = false;
2413
2414 if (cgraph_dump_file)
2415 fprintf (cgraph_dump_file, "Materializing clones\n");
2416#ifdef ENABLE_CHECKING
2417 verify_cgraph ();
2418#endif
2419
2420 /* We can also do topological order, but number of iterations should be
2421 bounded by number of IPA passes since single IPA pass is probably not
2422 going to create clones of clones it created itself. */
2423 while (!stabilized)
2424 {
2425 stabilized = true;
2426 for (node = cgraph_nodes; node; node = node->next)
2427 {
2428 if (node->clone_of && node->decl != node->clone_of->decl
2429 && !gimple_has_body_p (node->decl))
2430 {
2431 if (gimple_has_body_p (node->clone_of->decl))
2432 {
2433 if (cgraph_dump_file)
e20422ea 2434 {
2435 fprintf (cgraph_dump_file, "clonning %s to %s\n",
2436 cgraph_node_name (node->clone_of),
2437 cgraph_node_name (node));
2438 if (node->clone.tree_map)
2439 {
2440 unsigned int i;
2441 fprintf (cgraph_dump_file, " replace map: ");
2442 for (i = 0; i < VEC_length (ipa_replace_map_p,
2443 node->clone.tree_map);
2444 i++)
2445 {
2446 struct ipa_replace_map *replace_info;
2447 replace_info = VEC_index (ipa_replace_map_p,
2448 node->clone.tree_map,
2449 i);
2450 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2451 fprintf (cgraph_dump_file, " -> ");
2452 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2453 fprintf (cgraph_dump_file, "%s%s;",
2454 replace_info->replace_p ? "(replace)":"",
2455 replace_info->ref_p ? "(ref)":"");
2456 }
2457 fprintf (cgraph_dump_file, "\n");
2458 }
2459 if (node->clone.args_to_skip)
2460 {
2461 fprintf (cgraph_dump_file, " args_to_skip: ");
2462 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2463 }
2464 if (node->clone.args_to_skip)
2465 {
2466 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2467 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2468 }
2469 }
ccf4ab6b 2470 cgraph_materialize_clone (node);
a510bd8d 2471 stabilized = false;
ccf4ab6b 2472 }
ccf4ab6b 2473 }
2474 }
2475 }
ee3f5fc0 2476 for (node = cgraph_nodes; node; node = node->next)
2477 if (!node->analyzed && node->callees)
2478 cgraph_node_remove_callees (node);
c596d830 2479 if (cgraph_dump_file)
2480 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
947781ac 2481#ifdef ENABLE_CHECKING
2482 verify_cgraph ();
2483#endif
ccf4ab6b 2484 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2485}
2486
a861fe52 2487#include "gt-cgraphunit.h"