]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
* c-pragma.c (pending_weak_d, pending_weak): New.
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
cd6bca02 1/* Callgraph based interprocedural optimizations.
028a99ef 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
e3a37aef 3 Free Software Foundation, Inc.
ae01b312 4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
ae01b312 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
ae01b312 21
b0cdf642 22/* This module implements main driver of compilation process as well as
cd6bca02 23 few basic interprocedural optimizers.
b0cdf642 24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
b326746d 35 (There is one exception needed for implementing GCC extern inline
36 function.)
b0cdf642 37
1d416bd7 38 - varpool_finalize_variable
b0cdf642 39
7bd28bba 40 This function has same behavior as the above but is used for static
b0cdf642 41 variables.
42
43 - cgraph_finalize_compilation_unit
44
b326746d 45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
b0cdf642 47
6329636b 48 In the the call-graph construction and local function
b0cdf642 49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
b326746d 52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
b0cdf642 54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
b0cdf642 62 - cgraph_mark_needed_node
1d416bd7 63 - varpool_mark_needed_node
b0cdf642 64
b326746d 65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
ccd2f3d1 69 used by C++ frontend to explicitly mark the keyed methods.
b0cdf642 70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
6329636b 80 Analyzing of all functions is deferred
b0cdf642 81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
ca67a72b 91 The intra-procedural information is produced and its existence
b0cdf642 92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
6329636b 106 where reference has been optimized out. */
121f3051 107
acc70efa 108
ae01b312 109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
b5530559 114#include "rtl.h"
acc70efa 115#include "tree-flow.h"
ae01b312 116#include "tree-inline.h"
117#include "langhooks.h"
c6224531 118#include "pointer-set.h"
ae01b312 119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
80a85d8a 125#include "diagnostic.h"
f79b6507 126#include "timevar.h"
d7c6d889 127#include "params.h"
128#include "fibheap.h"
611e5405 129#include "intl.h"
b69eb0ff 130#include "function.h"
b5d36404 131#include "ipa-prop.h"
75a70cf9 132#include "gimple.h"
133#include "tree-iterator.h"
f1e2a033 134#include "tree-pass.h"
bfec3452 135#include "tree-dump.h"
c1dcd13c 136#include "output.h"
9ed5b1f5 137#include "coverage.h"
c9036234 138#include "plugin.h"
d7c6d889 139
a6868229 140static void cgraph_expand_all_functions (void);
d9d9733a 141static void cgraph_mark_functions_to_output (void);
142static void cgraph_expand_function (struct cgraph_node *);
f788fff2 143static void cgraph_output_pending_asms (void);
bfec3452 144static void cgraph_analyze_function (struct cgraph_node *);
25bb88de 145
121f3051 146static FILE *cgraph_dump_file;
147
2c56f72e 148/* A vector of FUNCTION_DECLs declared as static constructors. */
149static GTY (()) VEC(tree, gc) *static_ctors;
150/* A vector of FUNCTION_DECLs declared as static destructors. */
151static GTY (()) VEC(tree, gc) *static_dtors;
a861fe52 152
28454517 153/* Used for vtable lookup in thunk adjusting. */
154static GTY (()) tree vtable_entry_type;
155
a861fe52 156/* When target does not have ctors and dtors, we call all constructor
310d2511 157 and destructor by special initialization/destruction function
48e1416a 158 recognized by collect2.
159
a861fe52 160 When we are going to build this function, collect all constructors and
161 destructors and turn them into normal functions. */
162
163static void
164record_cdtor_fn (tree fndecl)
165{
2de29097 166 struct cgraph_node *node;
167 if (targetm.have_ctors_dtors
168 || (!DECL_STATIC_CONSTRUCTOR (fndecl)
169 && !DECL_STATIC_DESTRUCTOR (fndecl)))
a861fe52 170 return;
171
172 if (DECL_STATIC_CONSTRUCTOR (fndecl))
173 {
2c56f72e 174 VEC_safe_push (tree, gc, static_ctors, fndecl);
a861fe52 175 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
a861fe52 176 }
177 if (DECL_STATIC_DESTRUCTOR (fndecl))
178 {
2c56f72e 179 VEC_safe_push (tree, gc, static_dtors, fndecl);
a861fe52 180 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
a861fe52 181 }
2de29097 182 node = cgraph_node (fndecl);
183 node->local.disregard_inline_limits = 1;
184 cgraph_mark_reachable_node (node);
a861fe52 185}
186
2c56f72e 187/* Define global constructors/destructor functions for the CDTORS, of
188 which they are LEN. The CDTORS are sorted by initialization
189 priority. If CTOR_P is true, these are constructors; otherwise,
190 they are destructors. */
191
a861fe52 192static void
2c56f72e 193build_cdtor (bool ctor_p, tree *cdtors, size_t len)
a861fe52 194{
2c56f72e 195 size_t i;
a861fe52 196
2c56f72e 197 i = 0;
198 while (i < len)
199 {
200 tree body;
201 tree fn;
202 priority_type priority;
203
204 priority = 0;
205 body = NULL_TREE;
206 /* Find the next batch of constructors/destructors with the same
207 initialization priority. */
208 do
209 {
210 priority_type p;
211 fn = cdtors[i];
212 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
213 if (!body)
214 priority = p;
215 else if (p != priority)
216 break;
389dd41b 217 append_to_statement_list (build_function_call_expr (UNKNOWN_LOCATION,
218 fn, 0),
2c56f72e 219 &body);
220 ++i;
221 }
222 while (i < len);
223 gcc_assert (body != NULL_TREE);
224 /* Generate a function to call all the function of like
225 priority. */
226 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
227 }
228}
229
230/* Comparison function for qsort. P1 and P2 are actually of type
231 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
232 used to determine the sort order. */
a861fe52 233
2c56f72e 234static int
235compare_ctor (const void *p1, const void *p2)
236{
237 tree f1;
238 tree f2;
239 int priority1;
240 int priority2;
241
242 f1 = *(const tree *)p1;
243 f2 = *(const tree *)p2;
244 priority1 = DECL_INIT_PRIORITY (f1);
245 priority2 = DECL_INIT_PRIORITY (f2);
48e1416a 246
2c56f72e 247 if (priority1 < priority2)
248 return -1;
249 else if (priority1 > priority2)
250 return 1;
251 else
252 /* Ensure a stable sort. */
253 return (const tree *)p1 - (const tree *)p2;
254}
255
256/* Comparison function for qsort. P1 and P2 are actually of type
257 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
258 used to determine the sort order. */
a861fe52 259
2c56f72e 260static int
261compare_dtor (const void *p1, const void *p2)
262{
263 tree f1;
264 tree f2;
265 int priority1;
266 int priority2;
267
268 f1 = *(const tree *)p1;
269 f2 = *(const tree *)p2;
270 priority1 = DECL_FINI_PRIORITY (f1);
271 priority2 = DECL_FINI_PRIORITY (f2);
48e1416a 272
2c56f72e 273 if (priority1 < priority2)
274 return -1;
275 else if (priority1 > priority2)
276 return 1;
277 else
278 /* Ensure a stable sort. */
279 return (const tree *)p1 - (const tree *)p2;
a861fe52 280}
281
282/* Generate functions to call static constructors and destructors
283 for targets that do not support .ctors/.dtors sections. These
284 functions have magic names which are detected by collect2. */
285
286static void
287cgraph_build_cdtor_fns (void)
288{
2c56f72e 289 if (!VEC_empty (tree, static_ctors))
a861fe52 290 {
2c56f72e 291 gcc_assert (!targetm.have_ctors_dtors);
292 qsort (VEC_address (tree, static_ctors),
48e1416a 293 VEC_length (tree, static_ctors),
2c56f72e 294 sizeof (tree),
295 compare_ctor);
296 build_cdtor (/*ctor_p=*/true,
297 VEC_address (tree, static_ctors),
48e1416a 298 VEC_length (tree, static_ctors));
2c56f72e 299 VEC_truncate (tree, static_ctors, 0);
a861fe52 300 }
2c56f72e 301
302 if (!VEC_empty (tree, static_dtors))
a861fe52 303 {
2c56f72e 304 gcc_assert (!targetm.have_ctors_dtors);
305 qsort (VEC_address (tree, static_dtors),
48e1416a 306 VEC_length (tree, static_dtors),
2c56f72e 307 sizeof (tree),
308 compare_dtor);
309 build_cdtor (/*ctor_p=*/false,
310 VEC_address (tree, static_dtors),
48e1416a 311 VEC_length (tree, static_dtors));
2c56f72e 312 VEC_truncate (tree, static_dtors, 0);
a861fe52 313 }
314}
315
2c0b522d 316/* Determine if function DECL is needed. That is, visible to something
317 either outside this translation unit, something magic in the system
6329636b 318 configury. */
2c0b522d 319
7bfefa9d 320bool
321cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
2c0b522d 322{
3f82b628 323 /* If the user told us it is used, then it must be so. */
05806473 324 if (node->local.externally_visible)
325 return true;
326
3f82b628 327 /* ??? If the assembler name is set by hand, it is possible to assemble
328 the name later after finalizing the function and the fact is noticed
329 in assemble_name then. This is arguably a bug. */
330 if (DECL_ASSEMBLER_NAME_SET_P (decl)
331 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
332 return true;
333
55680bef 334 /* With -fkeep-inline-functions we are keeping all inline functions except
335 for extern inline ones. */
336 if (flag_keep_inline_functions
337 && DECL_DECLARED_INLINE_P (decl)
316ef6d8 338 && !DECL_EXTERNAL (decl)
339 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
55680bef 340 return true;
341
2c0b522d 342 /* If we decided it was needed before, but at the time we didn't have
343 the body of the function available, then it's still needed. We have
344 to go back and re-check its dependencies now. */
345 if (node->needed)
346 return true;
347
348 /* Externally visible functions must be output. The exception is
a0c938f0 349 COMDAT functions that must be output only when they are needed.
8baa9d15 350
351 When not optimizing, also output the static functions. (see
95da6220 352 PR24561), but don't do so for always_inline functions, functions
d3d410e1 353 declared inline and nested functions. These was optimized out
354 in the original implementation and it is unclear whether we want
554f2707 355 to change the behavior here. */
bba7ddf8 356 if (((TREE_PUBLIC (decl)
d3d410e1 357 || (!optimize && !node->local.disregard_inline_limits
358 && !DECL_DECLARED_INLINE_P (decl)
359 && !node->origin))
59dd4830 360 && !flag_whole_program
361 && !flag_lto
362 && !flag_whopr)
62eec3b4 363 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
2c0b522d 364 return true;
365
366 /* Constructors and destructors are reachable from the runtime by
367 some mechanism. */
368 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
369 return true;
370
2c0b522d 371 return false;
372}
373
bdc40eb8 374/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
523c1122 375 functions into callgraph in a way so they look like ordinary reachable
376 functions inserted into callgraph already at construction time. */
377
378bool
379cgraph_process_new_functions (void)
380{
381 bool output = false;
382 tree fndecl;
383 struct cgraph_node *node;
384
0cddb138 385 varpool_analyze_pending_decls ();
523c1122 386 /* Note that this queue may grow as its being processed, as the new
387 functions may generate new ones. */
388 while (cgraph_new_nodes)
389 {
390 node = cgraph_new_nodes;
391 fndecl = node->decl;
392 cgraph_new_nodes = cgraph_new_nodes->next_needed;
393 switch (cgraph_state)
394 {
395 case CGRAPH_STATE_CONSTRUCTION:
396 /* At construction time we just need to finalize function and move
397 it into reachable functions list. */
398
399 node->next_needed = NULL;
400 cgraph_finalize_function (fndecl, false);
401 cgraph_mark_reachable_node (node);
402 output = true;
403 break;
404
405 case CGRAPH_STATE_IPA:
f517b36e 406 case CGRAPH_STATE_IPA_SSA:
523c1122 407 /* When IPA optimization already started, do all essential
408 transformations that has been already performed on the whole
409 cgraph but not on this function. */
410
75a70cf9 411 gimple_register_cfg_hooks ();
523c1122 412 if (!node->analyzed)
413 cgraph_analyze_function (node);
414 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
415 current_function_decl = fndecl;
9c1bff7a 416 compute_inline_parameters (node);
f517b36e 417 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
418 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
419 /* When not optimizing, be sure we run early local passes anyway
420 to expand OMP. */
421 || !optimize)
20099e35 422 execute_pass_list (pass_early_local_passes.pass.sub);
523c1122 423 free_dominance_info (CDI_POST_DOMINATORS);
424 free_dominance_info (CDI_DOMINATORS);
425 pop_cfun ();
426 current_function_decl = NULL;
427 break;
428
429 case CGRAPH_STATE_EXPANSION:
430 /* Functions created during expansion shall be compiled
431 directly. */
09fc9532 432 node->process = 0;
523c1122 433 cgraph_expand_function (node);
434 break;
435
436 default:
437 gcc_unreachable ();
438 break;
439 }
50828ed8 440 cgraph_call_function_insertion_hooks (node);
0cddb138 441 varpool_analyze_pending_decls ();
523c1122 442 }
443 return output;
444}
445
9b8fb23a 446/* As an GCC extension we allow redefinition of the function. The
447 semantics when both copies of bodies differ is not well defined.
448 We replace the old body with new body so in unit at a time mode
449 we always use new body, while in normal mode we may end up with
450 old body inlined into some functions and new body expanded and
451 inlined in others.
452
453 ??? It may make more sense to use one body for inlining and other
454 body for expanding the function but this is difficult to do. */
455
456static void
457cgraph_reset_node (struct cgraph_node *node)
458{
09fc9532 459 /* If node->process is set, then we have already begun whole-unit analysis.
6329636b 460 This is *not* testing for whether we've already emitted the function.
461 That case can be sort-of legitimately seen with real function redefinition
462 errors. I would argue that the front end should never present us with
463 such a case, but don't enforce that for now. */
09fc9532 464 gcc_assert (!node->process);
9b8fb23a 465
466 /* Reset our data structures so we can analyze the function again. */
467 memset (&node->local, 0, sizeof (node->local));
468 memset (&node->global, 0, sizeof (node->global));
469 memset (&node->rtl, 0, sizeof (node->rtl));
470 node->analyzed = false;
471 node->local.redefined_extern_inline = true;
472 node->local.finalized = false;
473
9b8fb23a 474 cgraph_node_remove_callees (node);
475
476 /* We may need to re-queue the node for assembling in case
46beef9a 477 we already proceeded it and ignored as not needed or got
478 a re-declaration in IMA mode. */
479 if (node->reachable)
9b8fb23a 480 {
481 struct cgraph_node *n;
482
483 for (n = cgraph_nodes_queue; n; n = n->next_needed)
484 if (n == node)
485 break;
486 if (!n)
487 node->reachable = 0;
488 }
489}
c08871a9 490
1e8e9920 491static void
492cgraph_lower_function (struct cgraph_node *node)
493{
494 if (node->lowered)
495 return;
bfec3452 496
497 if (node->nested)
498 lower_nested_functions (node->decl);
499 gcc_assert (!node->nested);
500
1e8e9920 501 tree_lowering_passes (node->decl);
502 node->lowered = true;
503}
504
28df663b 505/* DECL has been parsed. Take it, queue it, compile it at the whim of the
506 logic in effect. If NESTED is true, then our caller cannot stand to have
507 the garbage collector run at the moment. We would need to either create
508 a new GC context, or just not compile right now. */
ae01b312 509
510void
28df663b 511cgraph_finalize_function (tree decl, bool nested)
ae01b312 512{
513 struct cgraph_node *node = cgraph_node (decl);
514
c08871a9 515 if (node->local.finalized)
9b8fb23a 516 cgraph_reset_node (node);
28df663b 517
167b550b 518 node->pid = cgraph_max_pid ++;
c08871a9 519 notice_global_symbol (decl);
79bb87b4 520 node->local.finalized = true;
e27482aa 521 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
22c748a1 522 node->finalized_by_frontend = true;
a861fe52 523 record_cdtor_fn (node->decl);
ae01b312 524
7bfefa9d 525 if (cgraph_decide_is_function_needed (node, decl))
2c0b522d 526 cgraph_mark_needed_node (node);
527
ecda6e51 528 /* Since we reclaim unreachable nodes at the end of every language
3f82b628 529 level unit, we need to be conservative about possible entry points
530 there. */
62eec3b4 531 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
3f82b628 532 cgraph_mark_reachable_node (node);
533
2c0b522d 534 /* If we've not yet emitted decl, tell the debug info about it. */
28df663b 535 if (!TREE_ASM_WRITTEN (decl))
2c0b522d 536 (*debug_hooks->deferred_inline_function) (decl);
4e8871a0 537
b69eb0ff 538 /* Possibly warn about unused parameters. */
539 if (warn_unused_parameter)
540 do_warn_unused_parameter (decl);
6329636b 541
542 if (!nested)
543 ggc_collect ();
ae01b312 544}
545
0da03d11 546/* C99 extern inline keywords allow changing of declaration after function
547 has been finalized. We need to re-decide if we want to mark the function as
548 needed then. */
549
550void
551cgraph_mark_if_needed (tree decl)
552{
553 struct cgraph_node *node = cgraph_node (decl);
7bfefa9d 554 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
0da03d11 555 cgraph_mark_needed_node (node);
556}
557
ccf4ab6b 558/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
559static bool
560clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
561{
562 while (node != node2 && node2)
563 node2 = node2->clone_of;
564 return node2 != NULL;
565}
566
b0cdf642 567/* Verify cgraph nodes of given cgraph node. */
568void
569verify_cgraph_node (struct cgraph_node *node)
570{
571 struct cgraph_edge *e;
e27482aa 572 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
117ef3d7 573 struct function *saved_cfun = cfun;
e27482aa 574 basic_block this_block;
75a70cf9 575 gimple_stmt_iterator gsi;
9bfec7c2 576 bool error_found = false;
b0cdf642 577
bd09cd3e 578 if (errorcount || sorrycount)
579 return;
580
b0cdf642 581 timevar_push (TV_CGRAPH_VERIFY);
117ef3d7 582 /* debug_generic_stmt needs correct cfun */
583 set_cfun (this_cfun);
b0cdf642 584 for (e = node->callees; e; e = e->next_callee)
585 if (e->aux)
586 {
0a81f5a0 587 error ("aux field set for edge %s->%s",
abd3e6b5 588 identifier_to_locale (cgraph_node_name (e->caller)),
589 identifier_to_locale (cgraph_node_name (e->callee)));
b0cdf642 590 error_found = true;
591 }
a2cb9b3b 592 if (node->count < 0)
593 {
594 error ("Execution count is negative");
595 error_found = true;
596 }
59dd4830 597 if (node->global.inlined_to && node->local.externally_visible)
598 {
599 error ("Externally visible inline clone");
600 error_found = true;
601 }
602 if (node->global.inlined_to && node->address_taken)
603 {
604 error ("Inline clone with address taken");
605 error_found = true;
606 }
607 if (node->global.inlined_to && node->needed)
608 {
609 error ("Inline clone is needed");
610 error_found = true;
611 }
799c8711 612 for (e = node->indirect_calls; e; e = e->next_callee)
613 {
614 if (e->aux)
615 {
616 error ("aux field set for indirect edge from %s",
617 identifier_to_locale (cgraph_node_name (e->caller)));
618 error_found = true;
619 }
620 if (!e->indirect_unknown_callee
621 || !e->indirect_info)
622 {
623 error ("An indirect edge from %s is not marked as indirect or has "
624 "associated indirect_info, the corresponding statement is: ",
625 identifier_to_locale (cgraph_node_name (e->caller)));
626 debug_gimple_stmt (e->call_stmt);
627 error_found = true;
628 }
629 }
b0cdf642 630 for (e = node->callers; e; e = e->next_caller)
631 {
a2cb9b3b 632 if (e->count < 0)
633 {
634 error ("caller edge count is negative");
635 error_found = true;
636 }
4ae20857 637 if (e->frequency < 0)
638 {
639 error ("caller edge frequency is negative");
640 error_found = true;
641 }
642 if (e->frequency > CGRAPH_FREQ_MAX)
643 {
644 error ("caller edge frequency is too large");
645 error_found = true;
646 }
1c094d2f 647 if (gimple_has_body_p (e->caller->decl)
648 && !e->caller->global.inlined_to
649 && (e->frequency
650 != compute_call_stmt_bb_frequency (e->caller->decl,
651 gimple_bb (e->call_stmt))))
652 {
653 error ("caller edge frequency %i does not match BB freqency %i",
654 e->frequency,
655 compute_call_stmt_bb_frequency (e->caller->decl,
656 gimple_bb (e->call_stmt)));
657 error_found = true;
658 }
b0cdf642 659 if (!e->inline_failed)
660 {
661 if (node->global.inlined_to
662 != (e->caller->global.inlined_to
663 ? e->caller->global.inlined_to : e->caller))
664 {
0a81f5a0 665 error ("inlined_to pointer is wrong");
b0cdf642 666 error_found = true;
667 }
668 if (node->callers->next_caller)
669 {
0a81f5a0 670 error ("multiple inline callers");
b0cdf642 671 error_found = true;
672 }
673 }
674 else
675 if (node->global.inlined_to)
676 {
0a81f5a0 677 error ("inlined_to pointer set for noninline callers");
b0cdf642 678 error_found = true;
679 }
680 }
681 if (!node->callers && node->global.inlined_to)
682 {
5cd75817 683 error ("inlined_to pointer is set but no predecessors found");
b0cdf642 684 error_found = true;
685 }
686 if (node->global.inlined_to == node)
687 {
0a81f5a0 688 error ("inlined_to pointer refers to itself");
b0cdf642 689 error_found = true;
690 }
691
0f6439b9 692 if (!cgraph_node (node->decl))
b0cdf642 693 {
0f6439b9 694 error ("node not found in cgraph_hash");
b0cdf642 695 error_found = true;
696 }
a0c938f0 697
ccf4ab6b 698 if (node->clone_of)
699 {
700 struct cgraph_node *n;
701 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
702 if (n == node)
703 break;
704 if (!n)
705 {
706 error ("node has wrong clone_of");
707 error_found = true;
708 }
709 }
710 if (node->clones)
711 {
712 struct cgraph_node *n;
713 for (n = node->clones; n; n = n->next_sibling_clone)
714 if (n->clone_of != node)
715 break;
716 if (n)
717 {
718 error ("node has wrong clone list");
719 error_found = true;
720 }
721 }
722 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
723 {
724 error ("node is in clone list but it is not clone");
725 error_found = true;
726 }
727 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
728 {
729 error ("node has wrong prev_clone pointer");
730 error_found = true;
731 }
732 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
733 {
734 error ("double linked list of clones corrupted");
735 error_found = true;
736 }
c524ac5d 737 if (node->same_comdat_group)
738 {
739 struct cgraph_node *n = node->same_comdat_group;
740
741 if (!DECL_ONE_ONLY (node->decl))
742 {
743 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
744 error_found = true;
745 }
746 if (n == node)
747 {
748 error ("node is alone in a comdat group");
749 error_found = true;
750 }
751 do
752 {
753 if (!n->same_comdat_group)
754 {
755 error ("same_comdat_group is not a circular list");
756 error_found = true;
757 break;
758 }
759 n = n->same_comdat_group;
760 }
761 while (n != node);
762 }
ccf4ab6b 763
764 if (node->analyzed && gimple_has_body_p (node->decl)
75a70cf9 765 && !TREE_ASM_WRITTEN (node->decl)
7bfefa9d 766 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
767 && !flag_wpa)
b0cdf642 768 {
e27482aa 769 if (this_cfun->cfg)
770 {
771 /* The nodes we're interested in are never shared, so walk
772 the tree ignoring duplicates. */
e7c352d1 773 struct pointer_set_t *visited_nodes = pointer_set_create ();
e27482aa 774 /* Reach the trees by walking over the CFG, and note the
775 enclosing basic-blocks in the call edges. */
776 FOR_EACH_BB_FN (this_block, this_cfun)
75a70cf9 777 for (gsi = gsi_start_bb (this_block);
778 !gsi_end_p (gsi);
779 gsi_next (&gsi))
9bfec7c2 780 {
75a70cf9 781 gimple stmt = gsi_stmt (gsi);
799c8711 782 if (is_gimple_call (stmt))
9bfec7c2 783 {
784 struct cgraph_edge *e = cgraph_edge (node, stmt);
799c8711 785 tree decl = gimple_call_fndecl (stmt);
9bfec7c2 786 if (e)
787 {
788 if (e->aux)
789 {
0a81f5a0 790 error ("shared call_stmt:");
75a70cf9 791 debug_gimple_stmt (stmt);
9bfec7c2 792 error_found = true;
793 }
799c8711 794 if (!e->indirect_unknown_callee)
28454517 795 {
799c8711 796 if (e->callee->same_body_alias)
797 {
798 error ("edge points to same body alias:");
799 debug_tree (e->callee->decl);
800 error_found = true;
801 }
802 else if (!node->global.inlined_to
803 && !e->callee->global.inlined_to
804 && decl
805 && !clone_of_p (cgraph_node (decl),
806 e->callee))
807 {
808 error ("edge points to wrong declaration:");
809 debug_tree (e->callee->decl);
810 fprintf (stderr," Instead of:");
811 debug_tree (decl);
812 error_found = true;
813 }
28454517 814 }
799c8711 815 else if (decl)
9bfec7c2 816 {
799c8711 817 error ("an indirect edge with unknown callee "
818 "corresponding to a call_stmt with "
819 "a known declaration:");
ee3f5fc0 820 error_found = true;
799c8711 821 debug_gimple_stmt (e->call_stmt);
9bfec7c2 822 }
823 e->aux = (void *)1;
824 }
799c8711 825 else if (decl)
9bfec7c2 826 {
0a81f5a0 827 error ("missing callgraph edge for call stmt:");
75a70cf9 828 debug_gimple_stmt (stmt);
9bfec7c2 829 error_found = true;
830 }
831 }
832 }
e27482aa 833 pointer_set_destroy (visited_nodes);
e27482aa 834 }
835 else
836 /* No CFG available?! */
837 gcc_unreachable ();
838
b0cdf642 839 for (e = node->callees; e; e = e->next_callee)
840 {
799c8711 841 if (!e->aux)
b0cdf642 842 {
0a81f5a0 843 error ("edge %s->%s has no corresponding call_stmt",
abd3e6b5 844 identifier_to_locale (cgraph_node_name (e->caller)),
845 identifier_to_locale (cgraph_node_name (e->callee)));
75a70cf9 846 debug_gimple_stmt (e->call_stmt);
b0cdf642 847 error_found = true;
848 }
849 e->aux = 0;
850 }
799c8711 851 for (e = node->indirect_calls; e; e = e->next_callee)
852 {
853 if (!e->aux)
854 {
855 error ("an indirect edge from %s has no corresponding call_stmt",
856 identifier_to_locale (cgraph_node_name (e->caller)));
857 debug_gimple_stmt (e->call_stmt);
858 error_found = true;
859 }
860 e->aux = 0;
861 }
b0cdf642 862 }
863 if (error_found)
864 {
865 dump_cgraph_node (stderr, node);
0a81f5a0 866 internal_error ("verify_cgraph_node failed");
b0cdf642 867 }
117ef3d7 868 set_cfun (saved_cfun);
b0cdf642 869 timevar_pop (TV_CGRAPH_VERIFY);
870}
871
872/* Verify whole cgraph structure. */
873void
874verify_cgraph (void)
875{
876 struct cgraph_node *node;
877
8ec2a798 878 if (sorrycount || errorcount)
879 return;
880
b0cdf642 881 for (node = cgraph_nodes; node; node = node->next)
882 verify_cgraph_node (node);
883}
884
56af936e 885/* Output all asm statements we have stored up to be output. */
886
887static void
888cgraph_output_pending_asms (void)
889{
890 struct cgraph_asm_node *can;
891
892 if (errorcount || sorrycount)
893 return;
894
895 for (can = cgraph_asm_nodes; can; can = can->next)
896 assemble_asm (can->asm_str);
897 cgraph_asm_nodes = NULL;
898}
899
0785e435 900/* Analyze the function scheduled to be output. */
bfec3452 901static void
0785e435 902cgraph_analyze_function (struct cgraph_node *node)
903{
bfec3452 904 tree save = current_function_decl;
0785e435 905 tree decl = node->decl;
906
ec1e35b2 907 current_function_decl = decl;
e27482aa 908 push_cfun (DECL_STRUCT_FUNCTION (decl));
bfec3452 909
6816d0c4 910 assign_assembler_name_if_neeeded (node->decl);
911
bfec3452 912 /* Make sure to gimplify bodies only once. During analyzing a
913 function we lower it, which will require gimplified nested
914 functions, so we can end up here with an already gimplified
915 body. */
916 if (!gimple_body (decl))
917 gimplify_function_tree (decl);
918 dump_function (TDI_generic, decl);
919
e27482aa 920 cgraph_lower_function (node);
6e8d6e86 921 node->analyzed = true;
0785e435 922
e27482aa 923 pop_cfun ();
bfec3452 924 current_function_decl = save;
0785e435 925}
926
05806473 927/* Look for externally_visible and used attributes and mark cgraph nodes
928 accordingly.
929
930 We cannot mark the nodes at the point the attributes are processed (in
931 handle_*_attribute) because the copy of the declarations available at that
932 point may not be canonical. For example, in:
933
934 void f();
935 void f() __attribute__((used));
936
937 the declaration we see in handle_used_attribute will be the second
938 declaration -- but the front end will subsequently merge that declaration
939 with the original declaration and discard the second declaration.
940
941 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
942
943 void f() {}
944 void f() __attribute__((externally_visible));
945
946 is valid.
947
948 So, we walk the nodes at the end of the translation unit, applying the
949 attributes at that point. */
950
951static void
952process_function_and_variable_attributes (struct cgraph_node *first,
1d416bd7 953 struct varpool_node *first_var)
05806473 954{
955 struct cgraph_node *node;
1d416bd7 956 struct varpool_node *vnode;
05806473 957
958 for (node = cgraph_nodes; node != first; node = node->next)
959 {
960 tree decl = node->decl;
83a23b05 961 if (DECL_PRESERVE_P (decl))
05806473 962 {
963 mark_decl_referenced (decl);
964 if (node->local.finalized)
965 cgraph_mark_needed_node (node);
966 }
967 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
968 {
ba12ea31 969 if (! TREE_PUBLIC (node->decl))
712d2297 970 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
971 "%<externally_visible%>"
972 " attribute have effect only on public objects");
59dd4830 973 else if (node->local.finalized)
974 cgraph_mark_needed_node (node);
05806473 975 }
976 }
1d416bd7 977 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
05806473 978 {
979 tree decl = vnode->decl;
83a23b05 980 if (DECL_PRESERVE_P (decl))
05806473 981 {
982 mark_decl_referenced (decl);
22671757 983 vnode->force_output = true;
05806473 984 if (vnode->finalized)
1d416bd7 985 varpool_mark_needed_node (vnode);
05806473 986 }
987 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
988 {
ba12ea31 989 if (! TREE_PUBLIC (vnode->decl))
712d2297 990 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
991 "%<externally_visible%>"
992 " attribute have effect only on public objects");
59dd4830 993 else if (vnode->finalized)
994 varpool_mark_needed_node (vnode);
05806473 995 }
996 }
997}
998
aeeb194b 999/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
1000 each reachable functions) and build cgraph.
1001 The function can be called multiple times after inserting new nodes
0d424440 1002 into beginning of queue. Just the new part of queue is re-scanned then. */
ae01b312 1003
aeeb194b 1004static void
1005cgraph_analyze_functions (void)
ae01b312 1006{
c1dcd13c 1007 /* Keep track of already processed nodes when called multiple times for
06b27565 1008 intermodule optimization. */
c1dcd13c 1009 static struct cgraph_node *first_analyzed;
c17d0de1 1010 struct cgraph_node *first_processed = first_analyzed;
1d416bd7 1011 static struct varpool_node *first_analyzed_var;
aeeb194b 1012 struct cgraph_node *node, *next;
ae01b312 1013
c17d0de1 1014 process_function_and_variable_attributes (first_processed,
1015 first_analyzed_var);
1016 first_processed = cgraph_nodes;
1d416bd7 1017 first_analyzed_var = varpool_nodes;
1018 varpool_analyze_pending_decls ();
f79b6507 1019 if (cgraph_dump_file)
ae01b312 1020 {
e4200070 1021 fprintf (cgraph_dump_file, "Initial entry points:");
c1dcd13c 1022 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 1023 if (node->needed)
f79b6507 1024 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1025 fprintf (cgraph_dump_file, "\n");
ae01b312 1026 }
aeeb194b 1027 cgraph_process_new_functions ();
ae01b312 1028
e6d2b2d8 1029 /* Propagate reachability flag and lower representation of all reachable
1030 functions. In the future, lowering will introduce new functions and
1031 new entry points on the way (by template instantiation and virtual
1032 method table generation for instance). */
3d7bfc56 1033 while (cgraph_nodes_queue)
ae01b312 1034 {
0785e435 1035 struct cgraph_edge *edge;
3d7bfc56 1036 tree decl = cgraph_nodes_queue->decl;
1037
1038 node = cgraph_nodes_queue;
d87976fb 1039 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 1040 node->next_needed = NULL;
ae01b312 1041
638531ad 1042 /* ??? It is possible to create extern inline function and later using
bbd5cba2 1043 weak alias attribute to kill its body. See
638531ad 1044 gcc.c-torture/compile/20011119-1.c */
75a70cf9 1045 if (!DECL_STRUCT_FUNCTION (decl))
9b8fb23a 1046 {
1047 cgraph_reset_node (node);
1048 continue;
1049 }
638531ad 1050
7bfefa9d 1051 if (!node->analyzed)
1052 cgraph_analyze_function (node);
2c0b522d 1053
ae01b312 1054 for (edge = node->callees; edge; edge = edge->next_callee)
0785e435 1055 if (!edge->callee->reachable)
2c0b522d 1056 cgraph_mark_reachable_node (edge->callee);
1057
61c2c7b1 1058 if (node->same_comdat_group)
1059 {
1060 for (next = node->same_comdat_group;
1061 next != node;
1062 next = next->same_comdat_group)
1063 cgraph_mark_reachable_node (next);
1064 }
1065
d544ceff 1066 /* If decl is a clone of an abstract function, mark that abstract
1067 function so that we don't release its body. The DECL_INITIAL() of that
1068 abstract function declaration will be later needed to output debug info. */
1069 if (DECL_ABSTRACT_ORIGIN (decl))
1070 {
1071 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
1072 origin_node->abstract_and_needed = true;
1073 }
1074
c17d0de1 1075 /* We finalize local static variables during constructing callgraph
1076 edges. Process their attributes too. */
1077 process_function_and_variable_attributes (first_processed,
1078 first_analyzed_var);
1079 first_processed = cgraph_nodes;
1d416bd7 1080 first_analyzed_var = varpool_nodes;
1081 varpool_analyze_pending_decls ();
aeeb194b 1082 cgraph_process_new_functions ();
ae01b312 1083 }
2c0b522d 1084
aa5e06c7 1085 /* Collect entry points to the unit. */
f79b6507 1086 if (cgraph_dump_file)
3d7bfc56 1087 {
e4200070 1088 fprintf (cgraph_dump_file, "Unit entry points:");
c1dcd13c 1089 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 1090 if (node->needed)
f79b6507 1091 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
e4200070 1092 fprintf (cgraph_dump_file, "\n\nInitial ");
0785e435 1093 dump_cgraph (cgraph_dump_file);
3d7bfc56 1094 }
e6d2b2d8 1095
f79b6507 1096 if (cgraph_dump_file)
1097 fprintf (cgraph_dump_file, "\nReclaiming functions:");
ae01b312 1098
f4ec5ce1 1099 for (node = cgraph_nodes; node != first_analyzed; node = next)
ae01b312 1100 {
1101 tree decl = node->decl;
f4ec5ce1 1102 next = node->next;
ae01b312 1103
1a1a827a 1104 if (node->local.finalized && !gimple_has_body_p (decl))
a0c938f0 1105 cgraph_reset_node (node);
9b8fb23a 1106
1a1a827a 1107 if (!node->reachable && gimple_has_body_p (decl))
ae01b312 1108 {
f79b6507 1109 if (cgraph_dump_file)
1110 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
b0cdf642 1111 cgraph_remove_node (node);
9b8fb23a 1112 continue;
ae01b312 1113 }
bc5cab3b 1114 else
1115 node->next_needed = NULL;
1a1a827a 1116 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
9b8fb23a 1117 gcc_assert (node->analyzed == node->local.finalized);
ae01b312 1118 }
f79b6507 1119 if (cgraph_dump_file)
e4200070 1120 {
1121 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1122 dump_cgraph (cgraph_dump_file);
1123 }
c1dcd13c 1124 first_analyzed = cgraph_nodes;
ae01b312 1125 ggc_collect ();
aeeb194b 1126}
1127
8f69fd82 1128
aeeb194b 1129/* Analyze the whole compilation unit once it is parsed completely. */
1130
1131void
1132cgraph_finalize_compilation_unit (void)
1133{
9929334e 1134 timevar_push (TV_CGRAPH);
1135
bfec3452 1136 /* Do not skip analyzing the functions if there were errors, we
1137 miss diagnostics for following functions otherwise. */
aeeb194b 1138
8f69fd82 1139 /* Emit size functions we didn't inline. */
4189e677 1140 finalize_size_functions ();
8f69fd82 1141
8f69fd82 1142 /* Call functions declared with the "constructor" or "destructor"
1143 attribute. */
1144 cgraph_build_cdtor_fns ();
aeeb194b 1145
9929334e 1146 /* Mark alias targets necessary and emit diagnostics. */
1147 finish_aliases_1 ();
1148
aeeb194b 1149 if (!quiet_flag)
1150 {
1151 fprintf (stderr, "\nAnalyzing compilation unit\n");
1152 fflush (stderr);
1153 }
1154
9929334e 1155 /* Gimplify and lower all functions, compute reachability and
1156 remove unreachable nodes. */
1157 cgraph_analyze_functions ();
1158
8f69fd82 1159 /* Mark alias targets necessary and emit diagnostics. */
1160 finish_aliases_1 ();
1161
9929334e 1162 /* Gimplify and lower thunks. */
aeeb194b 1163 cgraph_analyze_functions ();
bfec3452 1164
9929334e 1165 /* Finally drive the pass manager. */
bfec3452 1166 cgraph_optimize ();
9929334e 1167
1168 timevar_pop (TV_CGRAPH);
ae01b312 1169}
9ed5b1f5 1170
1171
ae01b312 1172/* Figure out what functions we want to assemble. */
1173
1174static void
d9d9733a 1175cgraph_mark_functions_to_output (void)
ae01b312 1176{
1177 struct cgraph_node *node;
61c2c7b1 1178#ifdef ENABLE_CHECKING
1179 bool check_same_comdat_groups = false;
1180
1181 for (node = cgraph_nodes; node; node = node->next)
1182 gcc_assert (!node->process);
1183#endif
ae01b312 1184
ae01b312 1185 for (node = cgraph_nodes; node; node = node->next)
1186 {
1187 tree decl = node->decl;
d7c6d889 1188 struct cgraph_edge *e;
a0c938f0 1189
61c2c7b1 1190 gcc_assert (!node->process || node->same_comdat_group);
1191 if (node->process)
1192 continue;
d7c6d889 1193
1194 for (e = node->callers; e; e = e->next_caller)
611e5405 1195 if (e->inline_failed)
d7c6d889 1196 break;
ae01b312 1197
e6d2b2d8 1198 /* We need to output all local functions that are used and not
1199 always inlined, as well as those that are reachable from
1200 outside the current compilation unit. */
1a1a827a 1201 if (node->analyzed
b0cdf642 1202 && !node->global.inlined_to
08843223 1203 && (node->needed || node->reachable_from_other_partition
d7c6d889 1204 || (e && node->reachable))
4ee9c684 1205 && !TREE_ASM_WRITTEN (decl)
ae01b312 1206 && !DECL_EXTERNAL (decl))
61c2c7b1 1207 {
1208 node->process = 1;
1209 if (node->same_comdat_group)
1210 {
1211 struct cgraph_node *next;
1212 for (next = node->same_comdat_group;
1213 next != node;
1214 next = next->same_comdat_group)
1215 next->process = 1;
1216 }
1217 }
1218 else if (node->same_comdat_group)
1219 {
1220#ifdef ENABLE_CHECKING
1221 check_same_comdat_groups = true;
1222#endif
1223 }
cc636d56 1224 else
9cee7c3f 1225 {
1226 /* We should've reclaimed all functions that are not needed. */
1227#ifdef ENABLE_CHECKING
75a70cf9 1228 if (!node->global.inlined_to
1a1a827a 1229 && gimple_has_body_p (decl)
08843223 1230 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1231 are inside partition, we can end up not removing the body since we no longer
1232 have analyzed node pointing to it. */
1233 && !node->in_other_partition
9cee7c3f 1234 && !DECL_EXTERNAL (decl))
1235 {
1236 dump_cgraph_node (stderr, node);
1237 internal_error ("failed to reclaim unneeded function");
1238 }
1239#endif
75a70cf9 1240 gcc_assert (node->global.inlined_to
1a1a827a 1241 || !gimple_has_body_p (decl)
08843223 1242 || node->in_other_partition
9cee7c3f 1243 || DECL_EXTERNAL (decl));
1244
1245 }
a0c938f0 1246
961e3b13 1247 }
61c2c7b1 1248#ifdef ENABLE_CHECKING
1249 if (check_same_comdat_groups)
1250 for (node = cgraph_nodes; node; node = node->next)
1251 if (node->same_comdat_group && !node->process)
1252 {
1253 tree decl = node->decl;
1254 if (!node->global.inlined_to
1255 && gimple_has_body_p (decl)
08843223 1256 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1257 are inside partition, we can end up not removing the body since we no longer
1258 have analyzed node pointing to it. */
1259 && !node->in_other_partition
61c2c7b1 1260 && !DECL_EXTERNAL (decl))
1261 {
1262 dump_cgraph_node (stderr, node);
1263 internal_error ("failed to reclaim unneeded function");
1264 }
1265 }
1266#endif
961e3b13 1267}
1268
28454517 1269/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1270 in lowered gimple form.
1271
1272 Set current_function_decl and cfun to newly constructed empty function body.
1273 return basic block in the function body. */
1274
1275static basic_block
1276init_lowered_empty_function (tree decl)
1277{
1278 basic_block bb;
1279
1280 current_function_decl = decl;
1281 allocate_struct_function (decl, false);
1282 gimple_register_cfg_hooks ();
1283 init_empty_tree_cfg ();
1284 init_tree_ssa (cfun);
1285 init_ssa_operands ();
1286 cfun->gimple_df->in_ssa_p = true;
1287 DECL_INITIAL (decl) = make_node (BLOCK);
1288
1289 DECL_SAVED_TREE (decl) = error_mark_node;
1290 cfun->curr_properties |=
1291 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1292 PROP_ssa);
1293
1294 /* Create BB for body of the function and connect it properly. */
1295 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1296 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1297 make_edge (bb, EXIT_BLOCK_PTR, 0);
1298
1299 return bb;
1300}
1301
1302/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1303 offset indicated by VIRTUAL_OFFSET, if that is
1304 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1305 zero for a result adjusting thunk. */
1306
1307static tree
1308thunk_adjust (gimple_stmt_iterator * bsi,
1309 tree ptr, bool this_adjusting,
1310 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1311{
1312 gimple stmt;
1313 tree ret;
1314
55d6cb23 1315 if (this_adjusting
1316 && fixed_offset != 0)
28454517 1317 {
1318 stmt = gimple_build_assign (ptr,
1319 fold_build2_loc (input_location,
1320 POINTER_PLUS_EXPR,
1321 TREE_TYPE (ptr), ptr,
1322 size_int (fixed_offset)));
1323 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1324 }
1325
1326 /* If there's a virtual offset, look up that value in the vtable and
1327 adjust the pointer again. */
1328 if (virtual_offset)
1329 {
1330 tree vtabletmp;
1331 tree vtabletmp2;
1332 tree vtabletmp3;
1333 tree offsettmp;
1334
1335 if (!vtable_entry_type)
1336 {
1337 tree vfunc_type = make_node (FUNCTION_TYPE);
1338 TREE_TYPE (vfunc_type) = integer_type_node;
1339 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1340 layout_type (vfunc_type);
1341
1342 vtable_entry_type = build_pointer_type (vfunc_type);
1343 }
1344
1345 vtabletmp =
1346 create_tmp_var (build_pointer_type
1347 (build_pointer_type (vtable_entry_type)), "vptr");
1348
1349 /* The vptr is always at offset zero in the object. */
1350 stmt = gimple_build_assign (vtabletmp,
1351 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1352 ptr));
1353 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1354 mark_symbols_for_renaming (stmt);
1355 find_referenced_vars_in (stmt);
1356
1357 /* Form the vtable address. */
1358 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1359 "vtableaddr");
1360 stmt = gimple_build_assign (vtabletmp2,
1361 build1 (INDIRECT_REF,
1362 TREE_TYPE (vtabletmp2), vtabletmp));
1363 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1364 mark_symbols_for_renaming (stmt);
1365 find_referenced_vars_in (stmt);
1366
1367 /* Find the entry with the vcall offset. */
1368 stmt = gimple_build_assign (vtabletmp2,
1369 fold_build2_loc (input_location,
1370 POINTER_PLUS_EXPR,
1371 TREE_TYPE (vtabletmp2),
1372 vtabletmp2,
1373 fold_convert (sizetype,
1374 virtual_offset)));
1375 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1376
1377 /* Get the offset itself. */
1378 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1379 "vcalloffset");
1380 stmt = gimple_build_assign (vtabletmp3,
1381 build1 (INDIRECT_REF,
1382 TREE_TYPE (vtabletmp3),
1383 vtabletmp2));
1384 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1385 mark_symbols_for_renaming (stmt);
1386 find_referenced_vars_in (stmt);
1387
1388 /* Cast to sizetype. */
1389 offsettmp = create_tmp_var (sizetype, "offset");
1390 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1391 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1392 mark_symbols_for_renaming (stmt);
1393 find_referenced_vars_in (stmt);
1394
1395 /* Adjust the `this' pointer. */
1396 ptr = fold_build2_loc (input_location,
1397 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1398 offsettmp);
1399 }
1400
55d6cb23 1401 if (!this_adjusting
1402 && fixed_offset != 0)
28454517 1403 /* Adjust the pointer by the constant. */
1404 {
1405 tree ptrtmp;
1406
1407 if (TREE_CODE (ptr) == VAR_DECL)
1408 ptrtmp = ptr;
1409 else
1410 {
1411 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1412 stmt = gimple_build_assign (ptrtmp, ptr);
1413 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1414 mark_symbols_for_renaming (stmt);
1415 find_referenced_vars_in (stmt);
1416 }
1417 ptr = fold_build2_loc (input_location,
1418 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1419 size_int (fixed_offset));
1420 }
1421
1422 /* Emit the statement and gimplify the adjustment expression. */
1423 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1424 stmt = gimple_build_assign (ret, ptr);
1425 mark_symbols_for_renaming (stmt);
1426 find_referenced_vars_in (stmt);
1427 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1428
1429 return ret;
1430}
1431
1432/* Produce assembler for thunk NODE. */
1433
1434static void
1435assemble_thunk (struct cgraph_node *node)
1436{
1437 bool this_adjusting = node->thunk.this_adjusting;
1438 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1439 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1440 tree virtual_offset = NULL;
1441 tree alias = node->thunk.alias;
1442 tree thunk_fndecl = node->decl;
1443 tree a = DECL_ARGUMENTS (thunk_fndecl);
1444
1445 current_function_decl = thunk_fndecl;
1446
1447 if (this_adjusting
1448 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1449 virtual_value, alias))
1450 {
1451 const char *fnname;
1452 tree fn_block;
1453
1454 DECL_RESULT (thunk_fndecl)
1455 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1456 RESULT_DECL, 0, integer_type_node);
22ea3b47 1457 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
28454517 1458
1459 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1460 create one. */
1461 fn_block = make_node (BLOCK);
1462 BLOCK_VARS (fn_block) = a;
1463 DECL_INITIAL (thunk_fndecl) = fn_block;
1464 init_function_start (thunk_fndecl);
1465 cfun->is_thunk = 1;
1466 assemble_start_function (thunk_fndecl, fnname);
1467
1468 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1469 fixed_offset, virtual_value, alias);
1470
1471 assemble_end_function (thunk_fndecl, fnname);
1472 init_insn_lengths ();
1473 free_after_compilation (cfun);
1474 set_cfun (NULL);
1475 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1476 }
1477 else
1478 {
1479 tree restype;
1480 basic_block bb, then_bb, else_bb, return_bb;
1481 gimple_stmt_iterator bsi;
1482 int nargs = 0;
1483 tree arg;
1484 int i;
1485 tree resdecl;
1486 tree restmp = NULL;
1487 VEC(tree, heap) *vargs;
1488
1489 gimple call;
1490 gimple ret;
1491
1492 DECL_IGNORED_P (thunk_fndecl) = 1;
1493 bitmap_obstack_initialize (NULL);
1494
1495 if (node->thunk.virtual_offset_p)
1496 virtual_offset = size_int (virtual_value);
1497
1498 /* Build the return declaration for the function. */
1499 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1500 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1501 {
1502 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1503 DECL_ARTIFICIAL (resdecl) = 1;
1504 DECL_IGNORED_P (resdecl) = 1;
1505 DECL_RESULT (thunk_fndecl) = resdecl;
1506 }
1507 else
1508 resdecl = DECL_RESULT (thunk_fndecl);
1509
1510 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1511
1512 bsi = gsi_start_bb (bb);
1513
1514 /* Build call to the function being thunked. */
1515 if (!VOID_TYPE_P (restype))
1516 {
1517 if (!is_gimple_reg_type (restype))
1518 {
1519 restmp = resdecl;
1520 cfun->local_decls = tree_cons (NULL_TREE, restmp, cfun->local_decls);
1521 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1522 }
1523 else
1524 restmp = create_tmp_var_raw (restype, "retval");
1525 }
1526
1527 for (arg = a; arg; arg = TREE_CHAIN (arg))
1528 nargs++;
1529 vargs = VEC_alloc (tree, heap, nargs);
1530 if (this_adjusting)
1531 VEC_quick_push (tree, vargs,
1532 thunk_adjust (&bsi,
1533 a, 1, fixed_offset,
1534 virtual_offset));
1535 else
1536 VEC_quick_push (tree, vargs, a);
1537 for (i = 1, arg = TREE_CHAIN (a); i < nargs; i++, arg = TREE_CHAIN (arg))
1538 VEC_quick_push (tree, vargs, arg);
1539 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1540 VEC_free (tree, heap, vargs);
1541 gimple_call_set_cannot_inline (call, true);
1542 gimple_call_set_from_thunk (call, true);
1543 if (restmp)
1544 gimple_call_set_lhs (call, restmp);
1545 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1546 mark_symbols_for_renaming (call);
1547 find_referenced_vars_in (call);
1548 update_stmt (call);
1549
1550 if (restmp && !this_adjusting)
1551 {
57ab8ec3 1552 tree true_label = NULL_TREE;
28454517 1553
1554 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1555 {
1556 gimple stmt;
1557 /* If the return type is a pointer, we need to
1558 protect against NULL. We know there will be an
1559 adjustment, because that's why we're emitting a
1560 thunk. */
1561 then_bb = create_basic_block (NULL, (void *) 0, bb);
1562 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1563 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1564 remove_edge (single_succ_edge (bb));
1565 true_label = gimple_block_label (then_bb);
28454517 1566 stmt = gimple_build_cond (NE_EXPR, restmp,
1567 fold_convert (TREE_TYPE (restmp),
1568 integer_zero_node),
1569 NULL_TREE, NULL_TREE);
1570 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1571 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1572 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1573 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1574 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1575 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1576 bsi = gsi_last_bb (then_bb);
1577 }
1578
1579 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1580 fixed_offset, virtual_offset);
1581 if (true_label)
1582 {
1583 gimple stmt;
1584 bsi = gsi_last_bb (else_bb);
1585 stmt = gimple_build_assign (restmp, fold_convert (TREE_TYPE (restmp),
1586 integer_zero_node));
1587 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1588 bsi = gsi_last_bb (return_bb);
1589 }
1590 }
1591 else
1592 gimple_call_set_tail (call, true);
1593
1594 /* Build return value. */
1595 ret = gimple_build_return (restmp);
1596 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1597
1598 delete_unreachable_blocks ();
1599 update_ssa (TODO_update_ssa);
1600
1601 cgraph_remove_same_body_alias (node);
1602 /* Since we want to emit the thunk, we explicitly mark its name as
1603 referenced. */
1604 mark_decl_referenced (thunk_fndecl);
1605 cgraph_add_new_function (thunk_fndecl, true);
1606 bitmap_obstack_release (NULL);
1607 }
1608 current_function_decl = NULL;
1609}
1610
ae01b312 1611/* Expand function specified by NODE. */
e6d2b2d8 1612
ae01b312 1613static void
d9d9733a 1614cgraph_expand_function (struct cgraph_node *node)
ae01b312 1615{
1616 tree decl = node->decl;
1617
b0cdf642 1618 /* We ought to not compile any inline clones. */
cc636d56 1619 gcc_assert (!node->global.inlined_to);
b0cdf642 1620
6329636b 1621 announce_function (decl);
09fc9532 1622 node->process = 0;
961e3b13 1623
e7c352d1 1624 gcc_assert (node->lowered);
f8deefc1 1625
794da2bb 1626 /* Generate RTL for the body of DECL. */
84e10000 1627 tree_rest_of_compilation (decl);
961e3b13 1628
4ee9c684 1629 /* Make sure that BE didn't give up on compiling. */
c04e3894 1630 gcc_assert (TREE_ASM_WRITTEN (decl));
ae01b312 1631 current_function_decl = NULL;
ed772161 1632 if (node->same_body)
1633 {
28454517 1634 struct cgraph_node *alias, *next;
ed772161 1635 bool saved_alias = node->alias;
28454517 1636 for (alias = node->same_body;
1637 alias && alias->next; alias = alias->next)
1638 ;
1639 /* Walk aliases in the order they were created; it is possible that
1640 thunks reffers to the aliases made earlier. */
1641 for (; alias; alias = next)
1642 {
1643 next = alias->previous;
1644 if (!alias->thunk.thunk_p)
1645 assemble_alias (alias->decl,
1646 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1647 else
1648 assemble_thunk (alias);
1649 }
ed772161 1650 node->alias = saved_alias;
1651 }
1a1a827a 1652 gcc_assert (!cgraph_preserve_function_body_p (decl));
1653 cgraph_release_function_body (node);
1654 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1655 points to the dead function body. */
1656 cgraph_node_remove_callees (node);
e1be32b8 1657
1658 cgraph_function_flags_ready = true;
ae01b312 1659}
1660
b0cdf642 1661/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
d7c6d889 1662
1663bool
326a9581 1664cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
d7c6d889 1665{
b0cdf642 1666 *reason = e->inline_failed;
1667 return !e->inline_failed;
d7c6d889 1668}
b0cdf642 1669
acc70efa 1670
acc70efa 1671
d9d9733a 1672/* Expand all functions that must be output.
1673
d7c6d889 1674 Attempt to topologically sort the nodes so function is output when
1675 all called functions are already assembled to allow data to be
91c82c20 1676 propagated across the callgraph. Use a stack to get smaller distance
3927afe0 1677 between a function and its callees (later we may choose to use a more
d7c6d889 1678 sophisticated algorithm for function reordering; we will likely want
1679 to use subsections to make the output functions appear in top-down
1680 order). */
1681
1682static void
a6868229 1683cgraph_expand_all_functions (void)
d7c6d889 1684{
1685 struct cgraph_node *node;
4c36ffe6 1686 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
c04e3894 1687 int order_pos, new_order_pos = 0;
d7c6d889 1688 int i;
1689
d7c6d889 1690 order_pos = cgraph_postorder (order);
cc636d56 1691 gcc_assert (order_pos == cgraph_n_nodes);
d7c6d889 1692
7bd28bba 1693 /* Garbage collector may remove inline clones we eliminate during
b0cdf642 1694 optimization. So we must be sure to not reference them. */
1695 for (i = 0; i < order_pos; i++)
09fc9532 1696 if (order[i]->process)
b0cdf642 1697 order[new_order_pos++] = order[i];
1698
1699 for (i = new_order_pos - 1; i >= 0; i--)
d7c6d889 1700 {
1701 node = order[i];
09fc9532 1702 if (node->process)
d7c6d889 1703 {
cc636d56 1704 gcc_assert (node->reachable);
09fc9532 1705 node->process = 0;
d7c6d889 1706 cgraph_expand_function (node);
1707 }
1708 }
523c1122 1709 cgraph_process_new_functions ();
773c5ba7 1710
d7c6d889 1711 free (order);
773c5ba7 1712
d7c6d889 1713}
1714
56af936e 1715/* This is used to sort the node types by the cgraph order number. */
1716
0b09525f 1717enum cgraph_order_sort_kind
1718{
1719 ORDER_UNDEFINED = 0,
1720 ORDER_FUNCTION,
1721 ORDER_VAR,
1722 ORDER_ASM
1723};
1724
56af936e 1725struct cgraph_order_sort
1726{
0b09525f 1727 enum cgraph_order_sort_kind kind;
56af936e 1728 union
1729 {
1730 struct cgraph_node *f;
1d416bd7 1731 struct varpool_node *v;
56af936e 1732 struct cgraph_asm_node *a;
1733 } u;
1734};
1735
1736/* Output all functions, variables, and asm statements in the order
1737 according to their order fields, which is the order in which they
1738 appeared in the file. This implements -fno-toplevel-reorder. In
1739 this mode we may output functions and variables which don't really
1740 need to be output. */
1741
1742static void
1743cgraph_output_in_order (void)
1744{
1745 int max;
56af936e 1746 struct cgraph_order_sort *nodes;
1747 int i;
1748 struct cgraph_node *pf;
1d416bd7 1749 struct varpool_node *pv;
56af936e 1750 struct cgraph_asm_node *pa;
1751
1752 max = cgraph_order;
3e1cde87 1753 nodes = XCNEWVEC (struct cgraph_order_sort, max);
56af936e 1754
1d416bd7 1755 varpool_analyze_pending_decls ();
56af936e 1756
1757 for (pf = cgraph_nodes; pf; pf = pf->next)
1758 {
09fc9532 1759 if (pf->process)
56af936e 1760 {
1761 i = pf->order;
1762 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1763 nodes[i].kind = ORDER_FUNCTION;
1764 nodes[i].u.f = pf;
1765 }
1766 }
1767
1d416bd7 1768 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
56af936e 1769 {
1770 i = pv->order;
1771 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1772 nodes[i].kind = ORDER_VAR;
1773 nodes[i].u.v = pv;
1774 }
1775
1776 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1777 {
1778 i = pa->order;
1779 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1780 nodes[i].kind = ORDER_ASM;
1781 nodes[i].u.a = pa;
1782 }
56af936e 1783
304e5318 1784 /* In toplevel reorder mode we output all statics; mark them as needed. */
1785 for (i = 0; i < max; ++i)
1786 {
1787 if (nodes[i].kind == ORDER_VAR)
1788 {
1789 varpool_mark_needed_node (nodes[i].u.v);
1790 }
1791 }
1792 varpool_empty_needed_queue ();
1793
56af936e 1794 for (i = 0; i < max; ++i)
1795 {
1796 switch (nodes[i].kind)
1797 {
1798 case ORDER_FUNCTION:
09fc9532 1799 nodes[i].u.f->process = 0;
56af936e 1800 cgraph_expand_function (nodes[i].u.f);
1801 break;
1802
1803 case ORDER_VAR:
1d416bd7 1804 varpool_assemble_decl (nodes[i].u.v);
56af936e 1805 break;
1806
1807 case ORDER_ASM:
1808 assemble_asm (nodes[i].u.a->asm_str);
1809 break;
1810
1811 case ORDER_UNDEFINED:
1812 break;
1813
1814 default:
1815 gcc_unreachable ();
1816 }
1817 }
4b4ea2db 1818
1819 cgraph_asm_nodes = NULL;
3e1cde87 1820 free (nodes);
56af936e 1821}
1822
b0cdf642 1823/* Return true when function body of DECL still needs to be kept around
1824 for later re-use. */
1825bool
1826cgraph_preserve_function_body_p (tree decl)
1827{
1828 struct cgraph_node *node;
8d8c4c8d 1829
1830 gcc_assert (cgraph_global_info_ready);
b0cdf642 1831 /* Look if there is any clone around. */
ccf4ab6b 1832 node = cgraph_node (decl);
1833 if (node->clones)
1834 return true;
b0cdf642 1835 return false;
1836}
1837
77fce4cd 1838static void
1839ipa_passes (void)
1840{
87d4aa85 1841 set_cfun (NULL);
4b14adf9 1842 current_function_decl = NULL;
75a70cf9 1843 gimple_register_cfg_hooks ();
77fce4cd 1844 bitmap_obstack_initialize (NULL);
59dd4830 1845
c9036234 1846 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1847
59dd4830 1848 if (!in_lto_p)
1849 execute_ipa_pass_list (all_small_ipa_passes);
9ed5b1f5 1850
7bfefa9d 1851 /* If pass_all_early_optimizations was not scheduled, the state of
1852 the cgraph will not be properly updated. Update it now. */
1853 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1854 cgraph_state = CGRAPH_STATE_IPA_SSA;
9ed5b1f5 1855
7bfefa9d 1856 if (!in_lto_p)
1857 {
1858 /* Generate coverage variables and constructors. */
1859 coverage_finish ();
1860
1861 /* Process new functions added. */
1862 set_cfun (NULL);
1863 current_function_decl = NULL;
1864 cgraph_process_new_functions ();
7bfefa9d 1865
c9036234 1866 execute_ipa_summary_passes
1867 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
8867b500 1868 }
7bfefa9d 1869 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1870
1871 if (!in_lto_p)
1872 ipa_write_summaries ();
1873
8867b500 1874 if (!flag_ltrans)
1875 execute_ipa_pass_list (all_regular_ipa_passes);
c9036234 1876 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
9ed5b1f5 1877
77fce4cd 1878 bitmap_obstack_release (NULL);
1879}
1880
34e5cced 1881
ae01b312 1882/* Perform simple optimizations based on callgraph. */
1883
7bfefa9d 1884void
d9d9733a 1885cgraph_optimize (void)
ae01b312 1886{
cb2b5570 1887 if (errorcount || sorrycount)
1888 return;
1889
b0cdf642 1890#ifdef ENABLE_CHECKING
1891 verify_cgraph ();
1892#endif
a861fe52 1893
c1dcd13c 1894 /* Frontend may output common variables after the unit has been finalized.
1895 It is safe to deal with them here as they are always zero initialized. */
1d416bd7 1896 varpool_analyze_pending_decls ();
e9f08e82 1897
f79b6507 1898 timevar_push (TV_CGRAPHOPT);
51949610 1899 if (pre_ipa_mem_report)
1900 {
1901 fprintf (stderr, "Memory consumption before IPA\n");
1902 dump_memory_report (false);
1903 }
d7c6d889 1904 if (!quiet_flag)
cd6bca02 1905 fprintf (stderr, "Performing interprocedural optimizations\n");
523c1122 1906 cgraph_state = CGRAPH_STATE_IPA;
c04e3894 1907
be4d0974 1908 /* Don't run the IPA passes if there was any error or sorry messages. */
1909 if (errorcount == 0 && sorrycount == 0)
1910 ipa_passes ();
1911
34e5cced 1912 /* Do nothing else if any IPA pass found errors. */
1913 if (errorcount || sorrycount)
021c1c18 1914 {
1915 timevar_pop (TV_CGRAPHOPT);
1916 return;
1917 }
34e5cced 1918
e1be32b8 1919 /* This pass remove bodies of extern inline functions we never inlined.
1920 Do this later so other IPA passes see what is really going on. */
1921 cgraph_remove_unreachable_nodes (false, dump_file);
80a85d8a 1922 cgraph_global_info_ready = true;
f79b6507 1923 if (cgraph_dump_file)
1924 {
e4200070 1925 fprintf (cgraph_dump_file, "Optimized ");
f79b6507 1926 dump_cgraph (cgraph_dump_file);
c1dcd13c 1927 dump_varpool (cgraph_dump_file);
f79b6507 1928 }
51949610 1929 if (post_ipa_mem_report)
1930 {
defa2fa6 1931 fprintf (stderr, "Memory consumption after IPA\n");
51949610 1932 dump_memory_report (false);
1933 }
f79b6507 1934 timevar_pop (TV_CGRAPHOPT);
ae01b312 1935
d7c6d889 1936 /* Output everything. */
47306a5d 1937 (*debug_hooks->assembly_start) ();
e4200070 1938 if (!quiet_flag)
1939 fprintf (stderr, "Assembling functions:\n");
b0cdf642 1940#ifdef ENABLE_CHECKING
1941 verify_cgraph ();
1942#endif
56af936e 1943
ccf4ab6b 1944 cgraph_materialize_all_clones ();
acc70efa 1945 cgraph_mark_functions_to_output ();
c1dcd13c 1946
523c1122 1947 cgraph_state = CGRAPH_STATE_EXPANSION;
56af936e 1948 if (!flag_toplevel_reorder)
1949 cgraph_output_in_order ();
1950 else
1951 {
1952 cgraph_output_pending_asms ();
1953
1954 cgraph_expand_all_functions ();
1d416bd7 1955 varpool_remove_unreferenced_decls ();
56af936e 1956
1d416bd7 1957 varpool_assemble_pending_decls ();
56af936e 1958 }
523c1122 1959 cgraph_process_new_functions ();
1960 cgraph_state = CGRAPH_STATE_FINISHED;
c1dcd13c 1961
f79b6507 1962 if (cgraph_dump_file)
1963 {
e4200070 1964 fprintf (cgraph_dump_file, "\nFinal ");
f79b6507 1965 dump_cgraph (cgraph_dump_file);
1966 }
b0cdf642 1967#ifdef ENABLE_CHECKING
1968 verify_cgraph ();
4ee9c684 1969 /* Double check that all inline clones are gone and that all
1970 function bodies have been released from memory. */
6329636b 1971 if (!(sorrycount || errorcount))
4ee9c684 1972 {
1973 struct cgraph_node *node;
1974 bool error_found = false;
1975
1976 for (node = cgraph_nodes; node; node = node->next)
1977 if (node->analyzed
1978 && (node->global.inlined_to
1a1a827a 1979 || gimple_has_body_p (node->decl)))
4ee9c684 1980 {
1981 error_found = true;
1982 dump_cgraph_node (stderr, node);
a0c938f0 1983 }
4ee9c684 1984 if (error_found)
c04e3894 1985 internal_error ("nodes with unreleased memory found");
4ee9c684 1986 }
b0cdf642 1987#endif
ae01b312 1988}
34e5cced 1989
1990
2c56f72e 1991/* Generate and emit a static constructor or destructor. WHICH must
1992 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1993 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
f0b5f617 1994 initialization priority for this constructor or destructor. */
b5530559 1995
1996void
c5344b58 1997cgraph_build_static_cdtor (char which, tree body, int priority)
b5530559 1998{
1999 static int counter = 0;
2000 char which_buf[16];
540edea7 2001 tree decl, name, resdecl;
b5530559 2002
2c56f72e 2003 /* The priority is encoded in the constructor or destructor name.
2004 collect2 will sort the names and arrange that they are called at
2005 program startup. */
2006 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
db85cc4f 2007 name = get_file_function_name (which_buf);
b5530559 2008
e60a6f7b 2009 decl = build_decl (input_location, FUNCTION_DECL, name,
b5530559 2010 build_function_type (void_type_node, void_list_node));
2011 current_function_decl = decl;
2012
e60a6f7b 2013 resdecl = build_decl (input_location,
2014 RESULT_DECL, NULL_TREE, void_type_node);
540edea7 2015 DECL_ARTIFICIAL (resdecl) = 1;
540edea7 2016 DECL_RESULT (decl) = resdecl;
8e5b4ed6 2017 DECL_CONTEXT (resdecl) = decl;
540edea7 2018
80f2ef47 2019 allocate_struct_function (decl, false);
b5530559 2020
2021 TREE_STATIC (decl) = 1;
2022 TREE_USED (decl) = 1;
2023 DECL_ARTIFICIAL (decl) = 1;
b5530559 2024 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
2025 DECL_SAVED_TREE (decl) = body;
b1be8e04 2026 if (!targetm.have_ctors_dtors)
2027 {
2028 TREE_PUBLIC (decl) = 1;
2029 DECL_PRESERVE_P (decl) = 1;
2030 }
b5530559 2031 DECL_UNINLINABLE (decl) = 1;
2032
2033 DECL_INITIAL (decl) = make_node (BLOCK);
2034 TREE_USED (DECL_INITIAL (decl)) = 1;
2035
2036 DECL_SOURCE_LOCATION (decl) = input_location;
2037 cfun->function_end_locus = input_location;
2038
cc636d56 2039 switch (which)
2040 {
2041 case 'I':
2042 DECL_STATIC_CONSTRUCTOR (decl) = 1;
64c2e9b0 2043 decl_init_priority_insert (decl, priority);
cc636d56 2044 break;
2045 case 'D':
2046 DECL_STATIC_DESTRUCTOR (decl) = 1;
64c2e9b0 2047 decl_fini_priority_insert (decl, priority);
cc636d56 2048 break;
2049 default:
2050 gcc_unreachable ();
2051 }
b5530559 2052
2053 gimplify_function_tree (decl);
2054
523c1122 2055 cgraph_add_new_function (decl, false);
2056 cgraph_mark_needed_node (cgraph_node (decl));
e3a37aef 2057 set_cfun (NULL);
b5530559 2058}
121f3051 2059
2060void
2061init_cgraph (void)
2062{
2063 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
2064}
b5d36404 2065
a0c938f0 2066/* The edges representing the callers of the NEW_VERSION node were
b5d36404 2067 fixed by cgraph_function_versioning (), now the call_expr in their
2068 respective tree code should be updated to call the NEW_VERSION. */
2069
2070static void
2071update_call_expr (struct cgraph_node *new_version)
2072{
2073 struct cgraph_edge *e;
2074
2075 gcc_assert (new_version);
75a70cf9 2076
2077 /* Update the call expr on the edges to call the new version. */
b5d36404 2078 for (e = new_version->callers; e; e = e->next_caller)
e03a95e7 2079 {
2080 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2081 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
e38def9c 2082 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
e03a95e7 2083 }
b5d36404 2084}
2085
2086
2087/* Create a new cgraph node which is the new version of
2088 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2089 edges which should be redirected to point to
2090 NEW_VERSION. ALL the callees edges of OLD_VERSION
2091 are cloned to the new version node. Return the new
2092 version node. */
2093
2094static struct cgraph_node *
2095cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
4460a647 2096 tree new_decl,
2097 VEC(cgraph_edge_p,heap) *redirect_callers)
b5d36404 2098 {
2099 struct cgraph_node *new_version;
32936803 2100 struct cgraph_edge *e;
b5d36404 2101 struct cgraph_edge *next_callee;
2102 unsigned i;
2103
2104 gcc_assert (old_version);
a0c938f0 2105
b5d36404 2106 new_version = cgraph_node (new_decl);
2107
2108 new_version->analyzed = true;
2109 new_version->local = old_version->local;
2110 new_version->global = old_version->global;
2111 new_version->rtl = new_version->rtl;
2112 new_version->reachable = true;
2113 new_version->count = old_version->count;
2114
2115 /* Clone the old node callees. Recursive calls are
2116 also cloned. */
2117 for (e = old_version->callees;e; e=e->next_callee)
2118 {
32936803 2119 cgraph_clone_edge (e, new_version, e->call_stmt,
2120 e->lto_stmt_uid, REG_BR_PROB_BASE,
2121 CGRAPH_FREQ_BASE,
2122 e->loop_nest, true);
b5d36404 2123 }
2124 /* Fix recursive calls.
2125 If OLD_VERSION has a recursive call after the
2126 previous edge cloning, the new version will have an edge
2127 pointing to the old version, which is wrong;
2128 Redirect it to point to the new version. */
2129 for (e = new_version->callees ; e; e = next_callee)
2130 {
2131 next_callee = e->next_callee;
2132 if (e->callee == old_version)
2133 cgraph_redirect_edge_callee (e, new_version);
a0c938f0 2134
b5d36404 2135 if (!next_callee)
2136 break;
2137 }
4460a647 2138 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
2139 {
2140 /* Redirect calls to the old version node to point to its new
2141 version. */
2142 cgraph_redirect_edge_callee (e, new_version);
2143 }
b5d36404 2144
2145 return new_version;
2146 }
2147
2148 /* Perform function versioning.
a0c938f0 2149 Function versioning includes copying of the tree and
b5d36404 2150 a callgraph update (creating a new cgraph node and updating
2151 its callees and callers).
2152
2153 REDIRECT_CALLERS varray includes the edges to be redirected
2154 to the new version.
2155
2156 TREE_MAP is a mapping of tree nodes we want to replace with
2157 new ones (according to results of prior analysis).
2158 OLD_VERSION_NODE is the node that is versioned.
48e1416a 2159 It returns the new version's cgraph node.
5afe38fe 2160 ARGS_TO_SKIP lists arguments to be omitted from functions
2161 */
b5d36404 2162
2163struct cgraph_node *
2164cgraph_function_versioning (struct cgraph_node *old_version_node,
4460a647 2165 VEC(cgraph_edge_p,heap) *redirect_callers,
ccf4ab6b 2166 VEC (ipa_replace_map_p,gc)* tree_map,
5afe38fe 2167 bitmap args_to_skip)
b5d36404 2168{
2169 tree old_decl = old_version_node->decl;
2170 struct cgraph_node *new_version_node = NULL;
2171 tree new_decl;
2172
2173 if (!tree_versionable_function_p (old_decl))
2174 return NULL;
2175
2176 /* Make a new FUNCTION_DECL tree node for the
2177 new version. */
5afe38fe 2178 if (!args_to_skip)
2179 new_decl = copy_node (old_decl);
2180 else
2181 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
b5d36404 2182
2183 /* Create the new version's call-graph node.
2184 and update the edges of the new node. */
2185 new_version_node =
2186 cgraph_copy_node_for_versioning (old_version_node, new_decl,
2187 redirect_callers);
2188
2189 /* Copy the OLD_VERSION_NODE function tree to the new version. */
5afe38fe 2190 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip);
b5d36404 2191
a0c938f0 2192 /* Update the new version's properties.
e03a95e7 2193 Make The new version visible only within this translation unit. Make sure
2194 that is not weak also.
a0c938f0 2195 ??? We cannot use COMDAT linkage because there is no
b5d36404 2196 ABI support for this. */
6137cc9f 2197 cgraph_make_decl_local (new_version_node->decl);
f014e39d 2198 DECL_VIRTUAL_P (new_version_node->decl) = 0;
b5d36404 2199 new_version_node->local.externally_visible = 0;
2200 new_version_node->local.local = 1;
2201 new_version_node->lowered = true;
f014e39d 2202
e03a95e7 2203 /* Update the call_expr on the edges to call the new version node. */
2204 update_call_expr (new_version_node);
48e1416a 2205
50828ed8 2206 cgraph_call_function_insertion_hooks (new_version_node);
b5d36404 2207 return new_version_node;
2208}
469679ab 2209
2210/* Produce separate function body for inline clones so the offline copy can be
2211 modified without affecting them. */
2212struct cgraph_node *
2213save_inline_function_body (struct cgraph_node *node)
2214{
ccf4ab6b 2215 struct cgraph_node *first_clone, *n;
469679ab 2216
2217 gcc_assert (node == cgraph_node (node->decl));
2218
2219 cgraph_lower_function (node);
2220
ccf4ab6b 2221 first_clone = node->clones;
469679ab 2222
2223 first_clone->decl = copy_node (node->decl);
469679ab 2224 cgraph_insert_node_to_hashtable (first_clone);
2225 gcc_assert (first_clone == cgraph_node (first_clone->decl));
ccf4ab6b 2226 if (first_clone->next_sibling_clone)
2227 {
2228 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2229 n->clone_of = first_clone;
2230 n->clone_of = first_clone;
2231 n->next_sibling_clone = first_clone->clones;
2232 if (first_clone->clones)
2233 first_clone->clones->prev_sibling_clone = n;
2234 first_clone->clones = first_clone->next_sibling_clone;
2235 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2236 first_clone->next_sibling_clone = NULL;
2237 gcc_assert (!first_clone->prev_sibling_clone);
2238 }
2239 first_clone->clone_of = NULL;
2240 node->clones = NULL;
2241
2242 if (first_clone->clones)
2243 for (n = first_clone->clones; n != first_clone;)
2244 {
2245 gcc_assert (n->decl == node->decl);
2246 n->decl = first_clone->decl;
2247 if (n->clones)
2248 n = n->clones;
2249 else if (n->next_sibling_clone)
2250 n = n->next_sibling_clone;
2251 else
2252 {
2253 while (n != first_clone && !n->next_sibling_clone)
2254 n = n->clone_of;
2255 if (n != first_clone)
2256 n = n->next_sibling_clone;
2257 }
2258 }
469679ab 2259
2260 /* Copy the OLD_VERSION_NODE function tree to the new version. */
5afe38fe 2261 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL);
469679ab 2262
2263 DECL_EXTERNAL (first_clone->decl) = 0;
ecd88073 2264 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
469679ab 2265 TREE_PUBLIC (first_clone->decl) = 0;
2266 DECL_COMDAT (first_clone->decl) = 0;
7fe9b425 2267 VEC_free (ipa_opt_pass, heap,
6d1cc52c 2268 first_clone->ipa_transforms_to_apply);
2269 first_clone->ipa_transforms_to_apply = NULL;
469679ab 2270
469679ab 2271#ifdef ENABLE_CHECKING
2272 verify_cgraph_node (first_clone);
2273#endif
2274 return first_clone;
2275}
a861fe52 2276
ccf4ab6b 2277/* Given virtual clone, turn it into actual clone. */
2278static void
2279cgraph_materialize_clone (struct cgraph_node *node)
2280{
2281 bitmap_obstack_initialize (NULL);
2282 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2283 tree_function_versioning (node->clone_of->decl, node->decl,
2284 node->clone.tree_map, true,
2285 node->clone.args_to_skip);
e20422ea 2286 if (cgraph_dump_file)
2287 {
2288 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2289 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2290 }
ccf4ab6b 2291
2292 /* Function is no longer clone. */
2293 if (node->next_sibling_clone)
2294 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2295 if (node->prev_sibling_clone)
2296 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2297 else
2298 node->clone_of->clones = node->next_sibling_clone;
2299 node->next_sibling_clone = NULL;
2300 node->prev_sibling_clone = NULL;
6d1cc52c 2301 if (!node->clone_of->analyzed && !node->clone_of->clones)
2302 cgraph_remove_node (node->clone_of);
ccf4ab6b 2303 node->clone_of = NULL;
2304 bitmap_obstack_release (NULL);
2305}
2306
c596d830 2307/* If necessary, change the function declaration in the call statement
2308 associated with E so that it corresponds to the edge callee. */
2309
2310gimple
2311cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2312{
2313 tree decl = gimple_call_fndecl (e->call_stmt);
2314 gimple new_stmt;
2315 gimple_stmt_iterator gsi;
2316
2317 if (!decl || decl == e->callee->decl
2318 /* Don't update call from same body alias to the real function. */
2319 || cgraph_get_node (decl) == cgraph_get_node (e->callee->decl))
2320 return e->call_stmt;
2321
2322 if (cgraph_dump_file)
2323 {
2324 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2325 cgraph_node_name (e->caller), e->caller->uid,
2326 cgraph_node_name (e->callee), e->callee->uid);
2327 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2328 }
2329
2330 if (e->callee->clone.combined_args_to_skip)
2331 new_stmt = gimple_call_copy_skip_args (e->call_stmt,
2332 e->callee->clone.combined_args_to_skip);
2333 else
2334 new_stmt = e->call_stmt;
2335 if (gimple_vdef (new_stmt)
2336 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2337 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2338 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2339
2340 gsi = gsi_for_stmt (e->call_stmt);
2341 gsi_replace (&gsi, new_stmt, true);
e32916b6 2342 update_stmt (new_stmt);
c596d830 2343
2344 /* Update EH information too, just in case. */
2345 maybe_clean_or_replace_eh_stmt (e->call_stmt, new_stmt);
2346
2347 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2348
2349 if (cgraph_dump_file)
2350 {
2351 fprintf (cgraph_dump_file, " updated to:");
2352 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2353 }
2354 return new_stmt;
2355}
2356
ccf4ab6b 2357/* Once all functions from compilation unit are in memory, produce all clones
c596d830 2358 and update all calls. We might also do this on demand if we don't want to
2359 bring all functions to memory prior compilation, but current WHOPR
2360 implementation does that and it is is bit easier to keep everything right in
2361 this order. */
ccf4ab6b 2362void
2363cgraph_materialize_all_clones (void)
2364{
2365 struct cgraph_node *node;
2366 bool stabilized = false;
2367
2368 if (cgraph_dump_file)
2369 fprintf (cgraph_dump_file, "Materializing clones\n");
2370#ifdef ENABLE_CHECKING
2371 verify_cgraph ();
2372#endif
2373
2374 /* We can also do topological order, but number of iterations should be
2375 bounded by number of IPA passes since single IPA pass is probably not
2376 going to create clones of clones it created itself. */
2377 while (!stabilized)
2378 {
2379 stabilized = true;
2380 for (node = cgraph_nodes; node; node = node->next)
2381 {
2382 if (node->clone_of && node->decl != node->clone_of->decl
2383 && !gimple_has_body_p (node->decl))
2384 {
2385 if (gimple_has_body_p (node->clone_of->decl))
2386 {
2387 if (cgraph_dump_file)
e20422ea 2388 {
2389 fprintf (cgraph_dump_file, "clonning %s to %s\n",
2390 cgraph_node_name (node->clone_of),
2391 cgraph_node_name (node));
2392 if (node->clone.tree_map)
2393 {
2394 unsigned int i;
2395 fprintf (cgraph_dump_file, " replace map: ");
2396 for (i = 0; i < VEC_length (ipa_replace_map_p,
2397 node->clone.tree_map);
2398 i++)
2399 {
2400 struct ipa_replace_map *replace_info;
2401 replace_info = VEC_index (ipa_replace_map_p,
2402 node->clone.tree_map,
2403 i);
2404 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2405 fprintf (cgraph_dump_file, " -> ");
2406 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2407 fprintf (cgraph_dump_file, "%s%s;",
2408 replace_info->replace_p ? "(replace)":"",
2409 replace_info->ref_p ? "(ref)":"");
2410 }
2411 fprintf (cgraph_dump_file, "\n");
2412 }
2413 if (node->clone.args_to_skip)
2414 {
2415 fprintf (cgraph_dump_file, " args_to_skip: ");
2416 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2417 }
2418 if (node->clone.args_to_skip)
2419 {
2420 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2421 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2422 }
2423 }
ccf4ab6b 2424 cgraph_materialize_clone (node);
2425 }
2426 else
2427 stabilized = false;
2428 }
2429 }
2430 }
ee3f5fc0 2431 for (node = cgraph_nodes; node; node = node->next)
2432 if (!node->analyzed && node->callees)
2433 cgraph_node_remove_callees (node);
ccf4ab6b 2434 if (cgraph_dump_file)
2435 fprintf (cgraph_dump_file, "Updating call sites\n");
2436 for (node = cgraph_nodes; node; node = node->next)
c596d830 2437 if (node->analyzed && !node->clone_of
2438 && gimple_has_body_p (node->decl))
ccf4ab6b 2439 {
2440 struct cgraph_edge *e;
2441
2442 current_function_decl = node->decl;
2443 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2444 for (e = node->callees; e; e = e->next_callee)
c596d830 2445 cgraph_redirect_edge_call_stmt_to_callee (e);
e32916b6 2446 gcc_assert (!need_ssa_update_p (cfun));
ccf4ab6b 2447 pop_cfun ();
2448 current_function_decl = NULL;
2449#ifdef ENABLE_CHECKING
2450 verify_cgraph_node (node);
2451#endif
2452 }
c596d830 2453 if (cgraph_dump_file)
2454 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
2455 /* All changes to parameters have been performed. In order not to
2456 incorrectly repeat them, we simply dispose of the bitmaps that drive the
2457 changes. */
2458 for (node = cgraph_nodes; node; node = node->next)
2459 node->clone.combined_args_to_skip = NULL;
947781ac 2460#ifdef ENABLE_CHECKING
2461 verify_cgraph ();
2462#endif
ccf4ab6b 2463 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2464}
2465
a861fe52 2466#include "gt-cgraphunit.h"