]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
* config.sub : Update from upstream sources.
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
cd6bca02 1/* Callgraph based interprocedural optimizations.
cfaf579d 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
e3a37aef 3 Free Software Foundation, Inc.
ae01b312 4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
ae01b312 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
ae01b312 21
b0cdf642 22/* This module implements main driver of compilation process as well as
cd6bca02 23 few basic interprocedural optimizers.
b0cdf642 24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
b326746d 35 (There is one exception needed for implementing GCC extern inline
36 function.)
b0cdf642 37
1d416bd7 38 - varpool_finalize_variable
b0cdf642 39
7bd28bba 40 This function has same behavior as the above but is used for static
b0cdf642 41 variables.
42
43 - cgraph_finalize_compilation_unit
44
b326746d 45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
b0cdf642 47
6329636b 48 In the the call-graph construction and local function
b0cdf642 49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
b326746d 52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
b0cdf642 54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
b0cdf642 62 - cgraph_mark_needed_node
1d416bd7 63 - varpool_mark_needed_node
b0cdf642 64
b326746d 65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
ccd2f3d1 69 used by C++ frontend to explicitly mark the keyed methods.
b0cdf642 70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
6329636b 80 Analyzing of all functions is deferred
b0cdf642 81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
ca67a72b 91 The intra-procedural information is produced and its existence
b0cdf642 92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
6329636b 106 where reference has been optimized out. */
121f3051 107
acc70efa 108
ae01b312 109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
b5530559 114#include "rtl.h"
acc70efa 115#include "tree-flow.h"
ae01b312 116#include "tree-inline.h"
117#include "langhooks.h"
c6224531 118#include "pointer-set.h"
ae01b312 119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
80a85d8a 125#include "diagnostic.h"
f79b6507 126#include "timevar.h"
d7c6d889 127#include "params.h"
128#include "fibheap.h"
611e5405 129#include "intl.h"
b69eb0ff 130#include "function.h"
b5d36404 131#include "ipa-prop.h"
75a70cf9 132#include "gimple.h"
133#include "tree-iterator.h"
f1e2a033 134#include "tree-pass.h"
bfec3452 135#include "tree-dump.h"
c1dcd13c 136#include "output.h"
9ed5b1f5 137#include "coverage.h"
d7c6d889 138
a6868229 139static void cgraph_expand_all_functions (void);
d9d9733a 140static void cgraph_mark_functions_to_output (void);
141static void cgraph_expand_function (struct cgraph_node *);
f788fff2 142static void cgraph_output_pending_asms (void);
bfec3452 143static void cgraph_analyze_function (struct cgraph_node *);
25bb88de 144
121f3051 145static FILE *cgraph_dump_file;
146
2c56f72e 147/* A vector of FUNCTION_DECLs declared as static constructors. */
148static GTY (()) VEC(tree, gc) *static_ctors;
149/* A vector of FUNCTION_DECLs declared as static destructors. */
150static GTY (()) VEC(tree, gc) *static_dtors;
a861fe52 151
152/* When target does not have ctors and dtors, we call all constructor
310d2511 153 and destructor by special initialization/destruction function
a861fe52 154 recognized by collect2.
155
156 When we are going to build this function, collect all constructors and
157 destructors and turn them into normal functions. */
158
159static void
160record_cdtor_fn (tree fndecl)
161{
2de29097 162 struct cgraph_node *node;
163 if (targetm.have_ctors_dtors
164 || (!DECL_STATIC_CONSTRUCTOR (fndecl)
165 && !DECL_STATIC_DESTRUCTOR (fndecl)))
a861fe52 166 return;
167
168 if (DECL_STATIC_CONSTRUCTOR (fndecl))
169 {
2c56f72e 170 VEC_safe_push (tree, gc, static_ctors, fndecl);
a861fe52 171 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
a861fe52 172 }
173 if (DECL_STATIC_DESTRUCTOR (fndecl))
174 {
2c56f72e 175 VEC_safe_push (tree, gc, static_dtors, fndecl);
a861fe52 176 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
a861fe52 177 }
2de29097 178 node = cgraph_node (fndecl);
179 node->local.disregard_inline_limits = 1;
180 cgraph_mark_reachable_node (node);
a861fe52 181}
182
2c56f72e 183/* Define global constructors/destructor functions for the CDTORS, of
184 which they are LEN. The CDTORS are sorted by initialization
185 priority. If CTOR_P is true, these are constructors; otherwise,
186 they are destructors. */
187
a861fe52 188static void
2c56f72e 189build_cdtor (bool ctor_p, tree *cdtors, size_t len)
a861fe52 190{
2c56f72e 191 size_t i;
a861fe52 192
2c56f72e 193 i = 0;
194 while (i < len)
195 {
196 tree body;
197 tree fn;
198 priority_type priority;
199
200 priority = 0;
201 body = NULL_TREE;
202 /* Find the next batch of constructors/destructors with the same
203 initialization priority. */
204 do
205 {
206 priority_type p;
207 fn = cdtors[i];
208 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
209 if (!body)
210 priority = p;
211 else if (p != priority)
212 break;
389dd41b 213 append_to_statement_list (build_function_call_expr (UNKNOWN_LOCATION,
214 fn, 0),
2c56f72e 215 &body);
216 ++i;
217 }
218 while (i < len);
219 gcc_assert (body != NULL_TREE);
220 /* Generate a function to call all the function of like
221 priority. */
222 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
223 }
224}
225
226/* Comparison function for qsort. P1 and P2 are actually of type
227 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
228 used to determine the sort order. */
a861fe52 229
2c56f72e 230static int
231compare_ctor (const void *p1, const void *p2)
232{
233 tree f1;
234 tree f2;
235 int priority1;
236 int priority2;
237
238 f1 = *(const tree *)p1;
239 f2 = *(const tree *)p2;
240 priority1 = DECL_INIT_PRIORITY (f1);
241 priority2 = DECL_INIT_PRIORITY (f2);
242
243 if (priority1 < priority2)
244 return -1;
245 else if (priority1 > priority2)
246 return 1;
247 else
248 /* Ensure a stable sort. */
249 return (const tree *)p1 - (const tree *)p2;
250}
251
252/* Comparison function for qsort. P1 and P2 are actually of type
253 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
254 used to determine the sort order. */
a861fe52 255
2c56f72e 256static int
257compare_dtor (const void *p1, const void *p2)
258{
259 tree f1;
260 tree f2;
261 int priority1;
262 int priority2;
263
264 f1 = *(const tree *)p1;
265 f2 = *(const tree *)p2;
266 priority1 = DECL_FINI_PRIORITY (f1);
267 priority2 = DECL_FINI_PRIORITY (f2);
268
269 if (priority1 < priority2)
270 return -1;
271 else if (priority1 > priority2)
272 return 1;
273 else
274 /* Ensure a stable sort. */
275 return (const tree *)p1 - (const tree *)p2;
a861fe52 276}
277
278/* Generate functions to call static constructors and destructors
279 for targets that do not support .ctors/.dtors sections. These
280 functions have magic names which are detected by collect2. */
281
282static void
283cgraph_build_cdtor_fns (void)
284{
2c56f72e 285 if (!VEC_empty (tree, static_ctors))
a861fe52 286 {
2c56f72e 287 gcc_assert (!targetm.have_ctors_dtors);
288 qsort (VEC_address (tree, static_ctors),
289 VEC_length (tree, static_ctors),
290 sizeof (tree),
291 compare_ctor);
292 build_cdtor (/*ctor_p=*/true,
293 VEC_address (tree, static_ctors),
294 VEC_length (tree, static_ctors));
295 VEC_truncate (tree, static_ctors, 0);
a861fe52 296 }
2c56f72e 297
298 if (!VEC_empty (tree, static_dtors))
a861fe52 299 {
2c56f72e 300 gcc_assert (!targetm.have_ctors_dtors);
301 qsort (VEC_address (tree, static_dtors),
302 VEC_length (tree, static_dtors),
303 sizeof (tree),
304 compare_dtor);
305 build_cdtor (/*ctor_p=*/false,
306 VEC_address (tree, static_dtors),
307 VEC_length (tree, static_dtors));
308 VEC_truncate (tree, static_dtors, 0);
a861fe52 309 }
310}
311
2c0b522d 312/* Determine if function DECL is needed. That is, visible to something
313 either outside this translation unit, something magic in the system
6329636b 314 configury. */
2c0b522d 315
7bfefa9d 316bool
317cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
2c0b522d 318{
62eec3b4 319 if (MAIN_NAME_P (DECL_NAME (decl))
320 && TREE_PUBLIC (decl))
321 {
322 node->local.externally_visible = true;
323 return true;
324 }
4ee9c684 325
3f82b628 326 /* If the user told us it is used, then it must be so. */
05806473 327 if (node->local.externally_visible)
328 return true;
329
3f82b628 330 /* ??? If the assembler name is set by hand, it is possible to assemble
331 the name later after finalizing the function and the fact is noticed
332 in assemble_name then. This is arguably a bug. */
333 if (DECL_ASSEMBLER_NAME_SET_P (decl)
334 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
335 return true;
336
55680bef 337 /* With -fkeep-inline-functions we are keeping all inline functions except
338 for extern inline ones. */
339 if (flag_keep_inline_functions
340 && DECL_DECLARED_INLINE_P (decl)
316ef6d8 341 && !DECL_EXTERNAL (decl)
342 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
55680bef 343 return true;
344
2c0b522d 345 /* If we decided it was needed before, but at the time we didn't have
346 the body of the function available, then it's still needed. We have
347 to go back and re-check its dependencies now. */
348 if (node->needed)
349 return true;
350
351 /* Externally visible functions must be output. The exception is
a0c938f0 352 COMDAT functions that must be output only when they are needed.
8baa9d15 353
354 When not optimizing, also output the static functions. (see
95da6220 355 PR24561), but don't do so for always_inline functions, functions
d3d410e1 356 declared inline and nested functions. These was optimized out
357 in the original implementation and it is unclear whether we want
554f2707 358 to change the behavior here. */
bba7ddf8 359 if (((TREE_PUBLIC (decl)
d3d410e1 360 || (!optimize && !node->local.disregard_inline_limits
361 && !DECL_DECLARED_INLINE_P (decl)
362 && !node->origin))
bba7ddf8 363 && !flag_whole_program)
62eec3b4 364 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
2c0b522d 365 return true;
366
367 /* Constructors and destructors are reachable from the runtime by
368 some mechanism. */
369 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
370 return true;
371
2c0b522d 372 return false;
373}
374
bdc40eb8 375/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
523c1122 376 functions into callgraph in a way so they look like ordinary reachable
377 functions inserted into callgraph already at construction time. */
378
379bool
380cgraph_process_new_functions (void)
381{
382 bool output = false;
383 tree fndecl;
384 struct cgraph_node *node;
385
386 /* Note that this queue may grow as its being processed, as the new
387 functions may generate new ones. */
388 while (cgraph_new_nodes)
389 {
390 node = cgraph_new_nodes;
391 fndecl = node->decl;
392 cgraph_new_nodes = cgraph_new_nodes->next_needed;
393 switch (cgraph_state)
394 {
395 case CGRAPH_STATE_CONSTRUCTION:
396 /* At construction time we just need to finalize function and move
397 it into reachable functions list. */
398
399 node->next_needed = NULL;
400 cgraph_finalize_function (fndecl, false);
401 cgraph_mark_reachable_node (node);
402 output = true;
403 break;
404
405 case CGRAPH_STATE_IPA:
f517b36e 406 case CGRAPH_STATE_IPA_SSA:
523c1122 407 /* When IPA optimization already started, do all essential
408 transformations that has been already performed on the whole
409 cgraph but not on this function. */
410
75a70cf9 411 gimple_register_cfg_hooks ();
523c1122 412 if (!node->analyzed)
413 cgraph_analyze_function (node);
414 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
415 current_function_decl = fndecl;
9c1bff7a 416 compute_inline_parameters (node);
f517b36e 417 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
418 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
419 /* When not optimizing, be sure we run early local passes anyway
420 to expand OMP. */
421 || !optimize)
20099e35 422 execute_pass_list (pass_early_local_passes.pass.sub);
523c1122 423 free_dominance_info (CDI_POST_DOMINATORS);
424 free_dominance_info (CDI_DOMINATORS);
425 pop_cfun ();
426 current_function_decl = NULL;
427 break;
428
429 case CGRAPH_STATE_EXPANSION:
430 /* Functions created during expansion shall be compiled
431 directly. */
09fc9532 432 node->process = 0;
523c1122 433 cgraph_expand_function (node);
434 break;
435
436 default:
437 gcc_unreachable ();
438 break;
439 }
50828ed8 440 cgraph_call_function_insertion_hooks (node);
523c1122 441 }
442 return output;
443}
444
9b8fb23a 445/* As an GCC extension we allow redefinition of the function. The
446 semantics when both copies of bodies differ is not well defined.
447 We replace the old body with new body so in unit at a time mode
448 we always use new body, while in normal mode we may end up with
449 old body inlined into some functions and new body expanded and
450 inlined in others.
451
452 ??? It may make more sense to use one body for inlining and other
453 body for expanding the function but this is difficult to do. */
454
455static void
456cgraph_reset_node (struct cgraph_node *node)
457{
09fc9532 458 /* If node->process is set, then we have already begun whole-unit analysis.
6329636b 459 This is *not* testing for whether we've already emitted the function.
460 That case can be sort-of legitimately seen with real function redefinition
461 errors. I would argue that the front end should never present us with
462 such a case, but don't enforce that for now. */
09fc9532 463 gcc_assert (!node->process);
9b8fb23a 464
465 /* Reset our data structures so we can analyze the function again. */
466 memset (&node->local, 0, sizeof (node->local));
467 memset (&node->global, 0, sizeof (node->global));
468 memset (&node->rtl, 0, sizeof (node->rtl));
469 node->analyzed = false;
470 node->local.redefined_extern_inline = true;
471 node->local.finalized = false;
472
9b8fb23a 473 cgraph_node_remove_callees (node);
474
475 /* We may need to re-queue the node for assembling in case
46beef9a 476 we already proceeded it and ignored as not needed or got
477 a re-declaration in IMA mode. */
478 if (node->reachable)
9b8fb23a 479 {
480 struct cgraph_node *n;
481
482 for (n = cgraph_nodes_queue; n; n = n->next_needed)
483 if (n == node)
484 break;
485 if (!n)
486 node->reachable = 0;
487 }
488}
c08871a9 489
1e8e9920 490static void
491cgraph_lower_function (struct cgraph_node *node)
492{
493 if (node->lowered)
494 return;
bfec3452 495
496 if (node->nested)
497 lower_nested_functions (node->decl);
498 gcc_assert (!node->nested);
499
1e8e9920 500 tree_lowering_passes (node->decl);
501 node->lowered = true;
502}
503
28df663b 504/* DECL has been parsed. Take it, queue it, compile it at the whim of the
505 logic in effect. If NESTED is true, then our caller cannot stand to have
506 the garbage collector run at the moment. We would need to either create
507 a new GC context, or just not compile right now. */
ae01b312 508
509void
28df663b 510cgraph_finalize_function (tree decl, bool nested)
ae01b312 511{
512 struct cgraph_node *node = cgraph_node (decl);
513
c08871a9 514 if (node->local.finalized)
9b8fb23a 515 cgraph_reset_node (node);
28df663b 516
167b550b 517 node->pid = cgraph_max_pid ++;
c08871a9 518 notice_global_symbol (decl);
79bb87b4 519 node->local.finalized = true;
e27482aa 520 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
22c748a1 521 node->finalized_by_frontend = true;
a861fe52 522 record_cdtor_fn (node->decl);
ae01b312 523
7bfefa9d 524 if (cgraph_decide_is_function_needed (node, decl))
2c0b522d 525 cgraph_mark_needed_node (node);
526
ecda6e51 527 /* Since we reclaim unreachable nodes at the end of every language
3f82b628 528 level unit, we need to be conservative about possible entry points
529 there. */
62eec3b4 530 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
3f82b628 531 cgraph_mark_reachable_node (node);
532
2c0b522d 533 /* If we've not yet emitted decl, tell the debug info about it. */
28df663b 534 if (!TREE_ASM_WRITTEN (decl))
2c0b522d 535 (*debug_hooks->deferred_inline_function) (decl);
4e8871a0 536
b69eb0ff 537 /* Possibly warn about unused parameters. */
538 if (warn_unused_parameter)
539 do_warn_unused_parameter (decl);
6329636b 540
541 if (!nested)
542 ggc_collect ();
ae01b312 543}
544
0da03d11 545/* C99 extern inline keywords allow changing of declaration after function
546 has been finalized. We need to re-decide if we want to mark the function as
547 needed then. */
548
549void
550cgraph_mark_if_needed (tree decl)
551{
552 struct cgraph_node *node = cgraph_node (decl);
7bfefa9d 553 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
0da03d11 554 cgraph_mark_needed_node (node);
555}
556
ccf4ab6b 557/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
558static bool
559clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
560{
561 while (node != node2 && node2)
562 node2 = node2->clone_of;
563 return node2 != NULL;
564}
565
b0cdf642 566/* Verify cgraph nodes of given cgraph node. */
567void
568verify_cgraph_node (struct cgraph_node *node)
569{
570 struct cgraph_edge *e;
e27482aa 571 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
117ef3d7 572 struct function *saved_cfun = cfun;
e27482aa 573 basic_block this_block;
75a70cf9 574 gimple_stmt_iterator gsi;
9bfec7c2 575 bool error_found = false;
b0cdf642 576
bd09cd3e 577 if (errorcount || sorrycount)
578 return;
579
b0cdf642 580 timevar_push (TV_CGRAPH_VERIFY);
117ef3d7 581 /* debug_generic_stmt needs correct cfun */
582 set_cfun (this_cfun);
b0cdf642 583 for (e = node->callees; e; e = e->next_callee)
584 if (e->aux)
585 {
0a81f5a0 586 error ("aux field set for edge %s->%s",
abd3e6b5 587 identifier_to_locale (cgraph_node_name (e->caller)),
588 identifier_to_locale (cgraph_node_name (e->callee)));
b0cdf642 589 error_found = true;
590 }
a2cb9b3b 591 if (node->count < 0)
592 {
593 error ("Execution count is negative");
594 error_found = true;
595 }
b0cdf642 596 for (e = node->callers; e; e = e->next_caller)
597 {
a2cb9b3b 598 if (e->count < 0)
599 {
600 error ("caller edge count is negative");
601 error_found = true;
602 }
4ae20857 603 if (e->frequency < 0)
604 {
605 error ("caller edge frequency is negative");
606 error_found = true;
607 }
608 if (e->frequency > CGRAPH_FREQ_MAX)
609 {
610 error ("caller edge frequency is too large");
611 error_found = true;
612 }
b0cdf642 613 if (!e->inline_failed)
614 {
615 if (node->global.inlined_to
616 != (e->caller->global.inlined_to
617 ? e->caller->global.inlined_to : e->caller))
618 {
0a81f5a0 619 error ("inlined_to pointer is wrong");
b0cdf642 620 error_found = true;
621 }
622 if (node->callers->next_caller)
623 {
0a81f5a0 624 error ("multiple inline callers");
b0cdf642 625 error_found = true;
626 }
627 }
628 else
629 if (node->global.inlined_to)
630 {
0a81f5a0 631 error ("inlined_to pointer set for noninline callers");
b0cdf642 632 error_found = true;
633 }
634 }
635 if (!node->callers && node->global.inlined_to)
636 {
5cd75817 637 error ("inlined_to pointer is set but no predecessors found");
b0cdf642 638 error_found = true;
639 }
640 if (node->global.inlined_to == node)
641 {
0a81f5a0 642 error ("inlined_to pointer refers to itself");
b0cdf642 643 error_found = true;
644 }
645
0f6439b9 646 if (!cgraph_node (node->decl))
b0cdf642 647 {
0f6439b9 648 error ("node not found in cgraph_hash");
b0cdf642 649 error_found = true;
650 }
a0c938f0 651
ccf4ab6b 652 if (node->clone_of)
653 {
654 struct cgraph_node *n;
655 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
656 if (n == node)
657 break;
658 if (!n)
659 {
660 error ("node has wrong clone_of");
661 error_found = true;
662 }
663 }
664 if (node->clones)
665 {
666 struct cgraph_node *n;
667 for (n = node->clones; n; n = n->next_sibling_clone)
668 if (n->clone_of != node)
669 break;
670 if (n)
671 {
672 error ("node has wrong clone list");
673 error_found = true;
674 }
675 }
676 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
677 {
678 error ("node is in clone list but it is not clone");
679 error_found = true;
680 }
681 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
682 {
683 error ("node has wrong prev_clone pointer");
684 error_found = true;
685 }
686 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
687 {
688 error ("double linked list of clones corrupted");
689 error_found = true;
690 }
691
692 if (node->analyzed && gimple_has_body_p (node->decl)
75a70cf9 693 && !TREE_ASM_WRITTEN (node->decl)
7bfefa9d 694 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
695 && !flag_wpa)
b0cdf642 696 {
e27482aa 697 if (this_cfun->cfg)
698 {
699 /* The nodes we're interested in are never shared, so walk
700 the tree ignoring duplicates. */
e7c352d1 701 struct pointer_set_t *visited_nodes = pointer_set_create ();
e27482aa 702 /* Reach the trees by walking over the CFG, and note the
703 enclosing basic-blocks in the call edges. */
704 FOR_EACH_BB_FN (this_block, this_cfun)
75a70cf9 705 for (gsi = gsi_start_bb (this_block);
706 !gsi_end_p (gsi);
707 gsi_next (&gsi))
9bfec7c2 708 {
75a70cf9 709 gimple stmt = gsi_stmt (gsi);
9bfec7c2 710 tree decl;
75a70cf9 711 if (is_gimple_call (stmt) && (decl = gimple_call_fndecl (stmt)))
9bfec7c2 712 {
713 struct cgraph_edge *e = cgraph_edge (node, stmt);
714 if (e)
715 {
716 if (e->aux)
717 {
0a81f5a0 718 error ("shared call_stmt:");
75a70cf9 719 debug_gimple_stmt (stmt);
9bfec7c2 720 error_found = true;
721 }
ccf4ab6b 722 if (!clone_of_p (cgraph_node (decl), e->callee)
723 && !e->callee->global.inlined_to)
9bfec7c2 724 {
0a81f5a0 725 error ("edge points to wrong declaration:");
9bfec7c2 726 debug_tree (e->callee->decl);
727 fprintf (stderr," Instead of:");
728 debug_tree (decl);
729 }
730 e->aux = (void *)1;
731 }
732 else
733 {
0a81f5a0 734 error ("missing callgraph edge for call stmt:");
75a70cf9 735 debug_gimple_stmt (stmt);
9bfec7c2 736 error_found = true;
737 }
738 }
739 }
e27482aa 740 pointer_set_destroy (visited_nodes);
e27482aa 741 }
742 else
743 /* No CFG available?! */
744 gcc_unreachable ();
745
b0cdf642 746 for (e = node->callees; e; e = e->next_callee)
747 {
f8daee9b 748 if (!e->aux && !e->indirect_call)
b0cdf642 749 {
0a81f5a0 750 error ("edge %s->%s has no corresponding call_stmt",
abd3e6b5 751 identifier_to_locale (cgraph_node_name (e->caller)),
752 identifier_to_locale (cgraph_node_name (e->callee)));
75a70cf9 753 debug_gimple_stmt (e->call_stmt);
b0cdf642 754 error_found = true;
755 }
756 e->aux = 0;
757 }
758 }
759 if (error_found)
760 {
761 dump_cgraph_node (stderr, node);
0a81f5a0 762 internal_error ("verify_cgraph_node failed");
b0cdf642 763 }
117ef3d7 764 set_cfun (saved_cfun);
b0cdf642 765 timevar_pop (TV_CGRAPH_VERIFY);
766}
767
768/* Verify whole cgraph structure. */
769void
770verify_cgraph (void)
771{
772 struct cgraph_node *node;
773
8ec2a798 774 if (sorrycount || errorcount)
775 return;
776
b0cdf642 777 for (node = cgraph_nodes; node; node = node->next)
778 verify_cgraph_node (node);
779}
780
56af936e 781/* Output all asm statements we have stored up to be output. */
782
783static void
784cgraph_output_pending_asms (void)
785{
786 struct cgraph_asm_node *can;
787
788 if (errorcount || sorrycount)
789 return;
790
791 for (can = cgraph_asm_nodes; can; can = can->next)
792 assemble_asm (can->asm_str);
793 cgraph_asm_nodes = NULL;
794}
795
0785e435 796/* Analyze the function scheduled to be output. */
bfec3452 797static void
0785e435 798cgraph_analyze_function (struct cgraph_node *node)
799{
bfec3452 800 tree save = current_function_decl;
0785e435 801 tree decl = node->decl;
802
ec1e35b2 803 current_function_decl = decl;
e27482aa 804 push_cfun (DECL_STRUCT_FUNCTION (decl));
bfec3452 805
806 /* Make sure to gimplify bodies only once. During analyzing a
807 function we lower it, which will require gimplified nested
808 functions, so we can end up here with an already gimplified
809 body. */
810 if (!gimple_body (decl))
811 gimplify_function_tree (decl);
812 dump_function (TDI_generic, decl);
813
e27482aa 814 cgraph_lower_function (node);
6e8d6e86 815 node->analyzed = true;
0785e435 816
e27482aa 817 pop_cfun ();
bfec3452 818 current_function_decl = save;
0785e435 819}
820
05806473 821/* Look for externally_visible and used attributes and mark cgraph nodes
822 accordingly.
823
824 We cannot mark the nodes at the point the attributes are processed (in
825 handle_*_attribute) because the copy of the declarations available at that
826 point may not be canonical. For example, in:
827
828 void f();
829 void f() __attribute__((used));
830
831 the declaration we see in handle_used_attribute will be the second
832 declaration -- but the front end will subsequently merge that declaration
833 with the original declaration and discard the second declaration.
834
835 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
836
837 void f() {}
838 void f() __attribute__((externally_visible));
839
840 is valid.
841
842 So, we walk the nodes at the end of the translation unit, applying the
843 attributes at that point. */
844
845static void
846process_function_and_variable_attributes (struct cgraph_node *first,
1d416bd7 847 struct varpool_node *first_var)
05806473 848{
849 struct cgraph_node *node;
1d416bd7 850 struct varpool_node *vnode;
05806473 851
852 for (node = cgraph_nodes; node != first; node = node->next)
853 {
854 tree decl = node->decl;
855 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
856 {
857 mark_decl_referenced (decl);
858 if (node->local.finalized)
859 cgraph_mark_needed_node (node);
860 }
861 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
862 {
ba12ea31 863 if (! TREE_PUBLIC (node->decl))
712d2297 864 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
865 "%<externally_visible%>"
866 " attribute have effect only on public objects");
ba12ea31 867 else
868 {
869 if (node->local.finalized)
870 cgraph_mark_needed_node (node);
871 node->local.externally_visible = true;
872 }
05806473 873 }
874 }
1d416bd7 875 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
05806473 876 {
877 tree decl = vnode->decl;
878 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
879 {
880 mark_decl_referenced (decl);
881 if (vnode->finalized)
1d416bd7 882 varpool_mark_needed_node (vnode);
05806473 883 }
884 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
885 {
ba12ea31 886 if (! TREE_PUBLIC (vnode->decl))
712d2297 887 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
888 "%<externally_visible%>"
889 " attribute have effect only on public objects");
ba12ea31 890 else
891 {
892 if (vnode->finalized)
1d416bd7 893 varpool_mark_needed_node (vnode);
ba12ea31 894 vnode->externally_visible = true;
895 }
05806473 896 }
897 }
898}
899
aeeb194b 900/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
901 each reachable functions) and build cgraph.
902 The function can be called multiple times after inserting new nodes
0d424440 903 into beginning of queue. Just the new part of queue is re-scanned then. */
ae01b312 904
aeeb194b 905static void
906cgraph_analyze_functions (void)
ae01b312 907{
c1dcd13c 908 /* Keep track of already processed nodes when called multiple times for
06b27565 909 intermodule optimization. */
c1dcd13c 910 static struct cgraph_node *first_analyzed;
c17d0de1 911 struct cgraph_node *first_processed = first_analyzed;
1d416bd7 912 static struct varpool_node *first_analyzed_var;
aeeb194b 913 struct cgraph_node *node, *next;
ae01b312 914
c17d0de1 915 process_function_and_variable_attributes (first_processed,
916 first_analyzed_var);
917 first_processed = cgraph_nodes;
1d416bd7 918 first_analyzed_var = varpool_nodes;
919 varpool_analyze_pending_decls ();
f79b6507 920 if (cgraph_dump_file)
ae01b312 921 {
e4200070 922 fprintf (cgraph_dump_file, "Initial entry points:");
c1dcd13c 923 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 924 if (node->needed)
f79b6507 925 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
926 fprintf (cgraph_dump_file, "\n");
ae01b312 927 }
aeeb194b 928 cgraph_process_new_functions ();
ae01b312 929
e6d2b2d8 930 /* Propagate reachability flag and lower representation of all reachable
931 functions. In the future, lowering will introduce new functions and
932 new entry points on the way (by template instantiation and virtual
933 method table generation for instance). */
3d7bfc56 934 while (cgraph_nodes_queue)
ae01b312 935 {
0785e435 936 struct cgraph_edge *edge;
3d7bfc56 937 tree decl = cgraph_nodes_queue->decl;
938
939 node = cgraph_nodes_queue;
d87976fb 940 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 941 node->next_needed = NULL;
ae01b312 942
638531ad 943 /* ??? It is possible to create extern inline function and later using
bbd5cba2 944 weak alias attribute to kill its body. See
638531ad 945 gcc.c-torture/compile/20011119-1.c */
75a70cf9 946 if (!DECL_STRUCT_FUNCTION (decl))
9b8fb23a 947 {
948 cgraph_reset_node (node);
949 continue;
950 }
638531ad 951
7bfefa9d 952 if (!node->analyzed)
953 cgraph_analyze_function (node);
2c0b522d 954
ae01b312 955 for (edge = node->callees; edge; edge = edge->next_callee)
0785e435 956 if (!edge->callee->reachable)
2c0b522d 957 cgraph_mark_reachable_node (edge->callee);
958
d544ceff 959 /* If decl is a clone of an abstract function, mark that abstract
960 function so that we don't release its body. The DECL_INITIAL() of that
961 abstract function declaration will be later needed to output debug info. */
962 if (DECL_ABSTRACT_ORIGIN (decl))
963 {
964 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
965 origin_node->abstract_and_needed = true;
966 }
967
c17d0de1 968 /* We finalize local static variables during constructing callgraph
969 edges. Process their attributes too. */
970 process_function_and_variable_attributes (first_processed,
971 first_analyzed_var);
972 first_processed = cgraph_nodes;
1d416bd7 973 first_analyzed_var = varpool_nodes;
974 varpool_analyze_pending_decls ();
aeeb194b 975 cgraph_process_new_functions ();
ae01b312 976 }
2c0b522d 977
aa5e06c7 978 /* Collect entry points to the unit. */
f79b6507 979 if (cgraph_dump_file)
3d7bfc56 980 {
e4200070 981 fprintf (cgraph_dump_file, "Unit entry points:");
c1dcd13c 982 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 983 if (node->needed)
f79b6507 984 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
e4200070 985 fprintf (cgraph_dump_file, "\n\nInitial ");
0785e435 986 dump_cgraph (cgraph_dump_file);
3d7bfc56 987 }
e6d2b2d8 988
f79b6507 989 if (cgraph_dump_file)
990 fprintf (cgraph_dump_file, "\nReclaiming functions:");
ae01b312 991
f4ec5ce1 992 for (node = cgraph_nodes; node != first_analyzed; node = next)
ae01b312 993 {
994 tree decl = node->decl;
f4ec5ce1 995 next = node->next;
ae01b312 996
1a1a827a 997 if (node->local.finalized && !gimple_has_body_p (decl))
a0c938f0 998 cgraph_reset_node (node);
9b8fb23a 999
1a1a827a 1000 if (!node->reachable && gimple_has_body_p (decl))
ae01b312 1001 {
f79b6507 1002 if (cgraph_dump_file)
1003 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
b0cdf642 1004 cgraph_remove_node (node);
9b8fb23a 1005 continue;
ae01b312 1006 }
bc5cab3b 1007 else
1008 node->next_needed = NULL;
1a1a827a 1009 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
9b8fb23a 1010 gcc_assert (node->analyzed == node->local.finalized);
ae01b312 1011 }
f79b6507 1012 if (cgraph_dump_file)
e4200070 1013 {
1014 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1015 dump_cgraph (cgraph_dump_file);
1016 }
c1dcd13c 1017 first_analyzed = cgraph_nodes;
ae01b312 1018 ggc_collect ();
aeeb194b 1019}
1020
8f69fd82 1021
1022/* Emit thunks for every node in the cgraph.
1023 FIXME: We really ought to emit thunks only for functions that are needed. */
1024
1025static void
1026cgraph_emit_thunks (void)
1027{
1028 struct cgraph_node *n;
1029
1030 for (n = cgraph_nodes; n; n = n->next)
1031 {
1032 /* Only emit thunks on functions defined in this TU.
1033 Note that this may emit more thunks than strictly necessary.
1034 During optimization some nodes may disappear. It would be
1035 nice to only emit thunks only for the functions that will be
1036 emitted, but we cannot know that until the inliner and other
1037 IPA passes have run (see the sequencing of the call to
1038 cgraph_mark_functions_to_output in cgraph_optimize). */
9929334e 1039 if (n->reachable
1040 && !DECL_EXTERNAL (n->decl))
8f69fd82 1041 lang_hooks.callgraph.emit_associated_thunks (n->decl);
1042 }
1043}
1044
1045
aeeb194b 1046/* Analyze the whole compilation unit once it is parsed completely. */
1047
1048void
1049cgraph_finalize_compilation_unit (void)
1050{
9929334e 1051 timevar_push (TV_CGRAPH);
1052
bfec3452 1053 /* Do not skip analyzing the functions if there were errors, we
1054 miss diagnostics for following functions otherwise. */
aeeb194b 1055
8f69fd82 1056 /* Emit size functions we didn't inline. */
4189e677 1057 finalize_size_functions ();
8f69fd82 1058
8f69fd82 1059 /* Call functions declared with the "constructor" or "destructor"
1060 attribute. */
1061 cgraph_build_cdtor_fns ();
aeeb194b 1062
9929334e 1063 /* Mark alias targets necessary and emit diagnostics. */
1064 finish_aliases_1 ();
1065
aeeb194b 1066 if (!quiet_flag)
1067 {
1068 fprintf (stderr, "\nAnalyzing compilation unit\n");
1069 fflush (stderr);
1070 }
1071
9929334e 1072 /* Gimplify and lower all functions, compute reachability and
1073 remove unreachable nodes. */
1074 cgraph_analyze_functions ();
1075
1076 /* Emit thunks for reachable nodes, if needed. */
1077 if (lang_hooks.callgraph.emit_associated_thunks)
1078 cgraph_emit_thunks ();
1079
8f69fd82 1080 /* Mark alias targets necessary and emit diagnostics. */
1081 finish_aliases_1 ();
1082
9929334e 1083 /* Gimplify and lower thunks. */
aeeb194b 1084 cgraph_analyze_functions ();
bfec3452 1085
9929334e 1086 /* Finally drive the pass manager. */
bfec3452 1087 cgraph_optimize ();
9929334e 1088
1089 timevar_pop (TV_CGRAPH);
ae01b312 1090}
9ed5b1f5 1091
1092
ae01b312 1093/* Figure out what functions we want to assemble. */
1094
1095static void
d9d9733a 1096cgraph_mark_functions_to_output (void)
ae01b312 1097{
1098 struct cgraph_node *node;
1099
ae01b312 1100 for (node = cgraph_nodes; node; node = node->next)
1101 {
1102 tree decl = node->decl;
d7c6d889 1103 struct cgraph_edge *e;
a0c938f0 1104
09fc9532 1105 gcc_assert (!node->process);
d7c6d889 1106
1107 for (e = node->callers; e; e = e->next_caller)
611e5405 1108 if (e->inline_failed)
d7c6d889 1109 break;
ae01b312 1110
e6d2b2d8 1111 /* We need to output all local functions that are used and not
1112 always inlined, as well as those that are reachable from
1113 outside the current compilation unit. */
1a1a827a 1114 if (node->analyzed
b0cdf642 1115 && !node->global.inlined_to
ae01b312 1116 && (node->needed
d7c6d889 1117 || (e && node->reachable))
4ee9c684 1118 && !TREE_ASM_WRITTEN (decl)
ae01b312 1119 && !DECL_EXTERNAL (decl))
09fc9532 1120 node->process = 1;
cc636d56 1121 else
9cee7c3f 1122 {
1123 /* We should've reclaimed all functions that are not needed. */
1124#ifdef ENABLE_CHECKING
75a70cf9 1125 if (!node->global.inlined_to
1a1a827a 1126 && gimple_has_body_p (decl)
9cee7c3f 1127 && !DECL_EXTERNAL (decl))
1128 {
1129 dump_cgraph_node (stderr, node);
1130 internal_error ("failed to reclaim unneeded function");
1131 }
1132#endif
75a70cf9 1133 gcc_assert (node->global.inlined_to
1a1a827a 1134 || !gimple_has_body_p (decl)
9cee7c3f 1135 || DECL_EXTERNAL (decl));
1136
1137 }
a0c938f0 1138
961e3b13 1139 }
1140}
1141
ae01b312 1142/* Expand function specified by NODE. */
e6d2b2d8 1143
ae01b312 1144static void
d9d9733a 1145cgraph_expand_function (struct cgraph_node *node)
ae01b312 1146{
1147 tree decl = node->decl;
1148
b0cdf642 1149 /* We ought to not compile any inline clones. */
cc636d56 1150 gcc_assert (!node->global.inlined_to);
b0cdf642 1151
6329636b 1152 announce_function (decl);
09fc9532 1153 node->process = 0;
961e3b13 1154
e7c352d1 1155 gcc_assert (node->lowered);
f8deefc1 1156
794da2bb 1157 /* Generate RTL for the body of DECL. */
84e10000 1158 tree_rest_of_compilation (decl);
961e3b13 1159
4ee9c684 1160 /* Make sure that BE didn't give up on compiling. */
c04e3894 1161 gcc_assert (TREE_ASM_WRITTEN (decl));
ae01b312 1162 current_function_decl = NULL;
1a1a827a 1163 gcc_assert (!cgraph_preserve_function_body_p (decl));
1164 cgraph_release_function_body (node);
1165 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1166 points to the dead function body. */
1167 cgraph_node_remove_callees (node);
e1be32b8 1168
1169 cgraph_function_flags_ready = true;
ae01b312 1170}
1171
b0cdf642 1172/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
d7c6d889 1173
1174bool
326a9581 1175cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
d7c6d889 1176{
b0cdf642 1177 *reason = e->inline_failed;
1178 return !e->inline_failed;
d7c6d889 1179}
b0cdf642 1180
acc70efa 1181
acc70efa 1182
d9d9733a 1183/* Expand all functions that must be output.
1184
d7c6d889 1185 Attempt to topologically sort the nodes so function is output when
1186 all called functions are already assembled to allow data to be
91c82c20 1187 propagated across the callgraph. Use a stack to get smaller distance
3927afe0 1188 between a function and its callees (later we may choose to use a more
d7c6d889 1189 sophisticated algorithm for function reordering; we will likely want
1190 to use subsections to make the output functions appear in top-down
1191 order). */
1192
1193static void
a6868229 1194cgraph_expand_all_functions (void)
d7c6d889 1195{
1196 struct cgraph_node *node;
4c36ffe6 1197 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
c04e3894 1198 int order_pos, new_order_pos = 0;
d7c6d889 1199 int i;
1200
d7c6d889 1201 order_pos = cgraph_postorder (order);
cc636d56 1202 gcc_assert (order_pos == cgraph_n_nodes);
d7c6d889 1203
7bd28bba 1204 /* Garbage collector may remove inline clones we eliminate during
b0cdf642 1205 optimization. So we must be sure to not reference them. */
1206 for (i = 0; i < order_pos; i++)
09fc9532 1207 if (order[i]->process)
b0cdf642 1208 order[new_order_pos++] = order[i];
1209
1210 for (i = new_order_pos - 1; i >= 0; i--)
d7c6d889 1211 {
1212 node = order[i];
09fc9532 1213 if (node->process)
d7c6d889 1214 {
cc636d56 1215 gcc_assert (node->reachable);
09fc9532 1216 node->process = 0;
d7c6d889 1217 cgraph_expand_function (node);
1218 }
1219 }
523c1122 1220 cgraph_process_new_functions ();
773c5ba7 1221
d7c6d889 1222 free (order);
773c5ba7 1223
d7c6d889 1224}
1225
56af936e 1226/* This is used to sort the node types by the cgraph order number. */
1227
0b09525f 1228enum cgraph_order_sort_kind
1229{
1230 ORDER_UNDEFINED = 0,
1231 ORDER_FUNCTION,
1232 ORDER_VAR,
1233 ORDER_ASM
1234};
1235
56af936e 1236struct cgraph_order_sort
1237{
0b09525f 1238 enum cgraph_order_sort_kind kind;
56af936e 1239 union
1240 {
1241 struct cgraph_node *f;
1d416bd7 1242 struct varpool_node *v;
56af936e 1243 struct cgraph_asm_node *a;
1244 } u;
1245};
1246
1247/* Output all functions, variables, and asm statements in the order
1248 according to their order fields, which is the order in which they
1249 appeared in the file. This implements -fno-toplevel-reorder. In
1250 this mode we may output functions and variables which don't really
1251 need to be output. */
1252
1253static void
1254cgraph_output_in_order (void)
1255{
1256 int max;
1257 size_t size;
1258 struct cgraph_order_sort *nodes;
1259 int i;
1260 struct cgraph_node *pf;
1d416bd7 1261 struct varpool_node *pv;
56af936e 1262 struct cgraph_asm_node *pa;
1263
1264 max = cgraph_order;
1265 size = max * sizeof (struct cgraph_order_sort);
1266 nodes = (struct cgraph_order_sort *) alloca (size);
1267 memset (nodes, 0, size);
1268
1d416bd7 1269 varpool_analyze_pending_decls ();
56af936e 1270
1271 for (pf = cgraph_nodes; pf; pf = pf->next)
1272 {
09fc9532 1273 if (pf->process)
56af936e 1274 {
1275 i = pf->order;
1276 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1277 nodes[i].kind = ORDER_FUNCTION;
1278 nodes[i].u.f = pf;
1279 }
1280 }
1281
1d416bd7 1282 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
56af936e 1283 {
1284 i = pv->order;
1285 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1286 nodes[i].kind = ORDER_VAR;
1287 nodes[i].u.v = pv;
1288 }
1289
1290 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1291 {
1292 i = pa->order;
1293 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1294 nodes[i].kind = ORDER_ASM;
1295 nodes[i].u.a = pa;
1296 }
56af936e 1297
304e5318 1298 /* In toplevel reorder mode we output all statics; mark them as needed. */
1299 for (i = 0; i < max; ++i)
1300 {
1301 if (nodes[i].kind == ORDER_VAR)
1302 {
1303 varpool_mark_needed_node (nodes[i].u.v);
1304 }
1305 }
1306 varpool_empty_needed_queue ();
1307
56af936e 1308 for (i = 0; i < max; ++i)
1309 {
1310 switch (nodes[i].kind)
1311 {
1312 case ORDER_FUNCTION:
09fc9532 1313 nodes[i].u.f->process = 0;
56af936e 1314 cgraph_expand_function (nodes[i].u.f);
1315 break;
1316
1317 case ORDER_VAR:
1d416bd7 1318 varpool_assemble_decl (nodes[i].u.v);
56af936e 1319 break;
1320
1321 case ORDER_ASM:
1322 assemble_asm (nodes[i].u.a->asm_str);
1323 break;
1324
1325 case ORDER_UNDEFINED:
1326 break;
1327
1328 default:
1329 gcc_unreachable ();
1330 }
1331 }
4b4ea2db 1332
1333 cgraph_asm_nodes = NULL;
56af936e 1334}
1335
b0cdf642 1336/* Return true when function body of DECL still needs to be kept around
1337 for later re-use. */
1338bool
1339cgraph_preserve_function_body_p (tree decl)
1340{
1341 struct cgraph_node *node;
8d8c4c8d 1342
1343 gcc_assert (cgraph_global_info_ready);
b0cdf642 1344 /* Look if there is any clone around. */
ccf4ab6b 1345 node = cgraph_node (decl);
1346 if (node->clones)
1347 return true;
b0cdf642 1348 return false;
1349}
1350
77fce4cd 1351static void
1352ipa_passes (void)
1353{
87d4aa85 1354 set_cfun (NULL);
4b14adf9 1355 current_function_decl = NULL;
75a70cf9 1356 gimple_register_cfg_hooks ();
77fce4cd 1357 bitmap_obstack_initialize (NULL);
7bfefa9d 1358 execute_ipa_pass_list (all_small_ipa_passes);
9ed5b1f5 1359
7bfefa9d 1360 /* If pass_all_early_optimizations was not scheduled, the state of
1361 the cgraph will not be properly updated. Update it now. */
1362 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1363 cgraph_state = CGRAPH_STATE_IPA_SSA;
9ed5b1f5 1364
7bfefa9d 1365 if (!in_lto_p)
1366 {
1367 /* Generate coverage variables and constructors. */
1368 coverage_finish ();
1369
1370 /* Process new functions added. */
1371 set_cfun (NULL);
1372 current_function_decl = NULL;
1373 cgraph_process_new_functions ();
1374 }
1375
1376 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1377 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1378
1379 if (!in_lto_p)
1380 ipa_write_summaries ();
1381
1382 execute_ipa_pass_list (all_regular_ipa_passes);
9ed5b1f5 1383
77fce4cd 1384 bitmap_obstack_release (NULL);
1385}
1386
34e5cced 1387
ae01b312 1388/* Perform simple optimizations based on callgraph. */
1389
7bfefa9d 1390void
d9d9733a 1391cgraph_optimize (void)
ae01b312 1392{
cb2b5570 1393 if (errorcount || sorrycount)
1394 return;
1395
b0cdf642 1396#ifdef ENABLE_CHECKING
1397 verify_cgraph ();
1398#endif
a861fe52 1399
c1dcd13c 1400 /* Frontend may output common variables after the unit has been finalized.
1401 It is safe to deal with them here as they are always zero initialized. */
1d416bd7 1402 varpool_analyze_pending_decls ();
e9f08e82 1403
f79b6507 1404 timevar_push (TV_CGRAPHOPT);
51949610 1405 if (pre_ipa_mem_report)
1406 {
1407 fprintf (stderr, "Memory consumption before IPA\n");
1408 dump_memory_report (false);
1409 }
d7c6d889 1410 if (!quiet_flag)
cd6bca02 1411 fprintf (stderr, "Performing interprocedural optimizations\n");
523c1122 1412 cgraph_state = CGRAPH_STATE_IPA;
c04e3894 1413
be4d0974 1414 /* Don't run the IPA passes if there was any error or sorry messages. */
1415 if (errorcount == 0 && sorrycount == 0)
1416 ipa_passes ();
1417
34e5cced 1418 /* Do nothing else if any IPA pass found errors. */
1419 if (errorcount || sorrycount)
1420 return;
1421
e1be32b8 1422 /* This pass remove bodies of extern inline functions we never inlined.
1423 Do this later so other IPA passes see what is really going on. */
1424 cgraph_remove_unreachable_nodes (false, dump_file);
80a85d8a 1425 cgraph_global_info_ready = true;
f79b6507 1426 if (cgraph_dump_file)
1427 {
e4200070 1428 fprintf (cgraph_dump_file, "Optimized ");
f79b6507 1429 dump_cgraph (cgraph_dump_file);
c1dcd13c 1430 dump_varpool (cgraph_dump_file);
f79b6507 1431 }
51949610 1432 if (post_ipa_mem_report)
1433 {
defa2fa6 1434 fprintf (stderr, "Memory consumption after IPA\n");
51949610 1435 dump_memory_report (false);
1436 }
f79b6507 1437 timevar_pop (TV_CGRAPHOPT);
ae01b312 1438
d7c6d889 1439 /* Output everything. */
e4200070 1440 if (!quiet_flag)
1441 fprintf (stderr, "Assembling functions:\n");
b0cdf642 1442#ifdef ENABLE_CHECKING
1443 verify_cgraph ();
1444#endif
56af936e 1445
ccf4ab6b 1446 cgraph_materialize_all_clones ();
acc70efa 1447 cgraph_mark_functions_to_output ();
c1dcd13c 1448
523c1122 1449 cgraph_state = CGRAPH_STATE_EXPANSION;
56af936e 1450 if (!flag_toplevel_reorder)
1451 cgraph_output_in_order ();
1452 else
1453 {
1454 cgraph_output_pending_asms ();
1455
1456 cgraph_expand_all_functions ();
1d416bd7 1457 varpool_remove_unreferenced_decls ();
56af936e 1458
1d416bd7 1459 varpool_assemble_pending_decls ();
56af936e 1460 }
523c1122 1461 cgraph_process_new_functions ();
1462 cgraph_state = CGRAPH_STATE_FINISHED;
c1dcd13c 1463
f79b6507 1464 if (cgraph_dump_file)
1465 {
e4200070 1466 fprintf (cgraph_dump_file, "\nFinal ");
f79b6507 1467 dump_cgraph (cgraph_dump_file);
1468 }
b0cdf642 1469#ifdef ENABLE_CHECKING
1470 verify_cgraph ();
4ee9c684 1471 /* Double check that all inline clones are gone and that all
1472 function bodies have been released from memory. */
6329636b 1473 if (!(sorrycount || errorcount))
4ee9c684 1474 {
1475 struct cgraph_node *node;
1476 bool error_found = false;
1477
1478 for (node = cgraph_nodes; node; node = node->next)
1479 if (node->analyzed
1480 && (node->global.inlined_to
1a1a827a 1481 || gimple_has_body_p (node->decl)))
4ee9c684 1482 {
1483 error_found = true;
1484 dump_cgraph_node (stderr, node);
a0c938f0 1485 }
4ee9c684 1486 if (error_found)
c04e3894 1487 internal_error ("nodes with unreleased memory found");
4ee9c684 1488 }
b0cdf642 1489#endif
ae01b312 1490}
34e5cced 1491
1492
2c56f72e 1493/* Generate and emit a static constructor or destructor. WHICH must
1494 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1495 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
f0b5f617 1496 initialization priority for this constructor or destructor. */
b5530559 1497
1498void
c5344b58 1499cgraph_build_static_cdtor (char which, tree body, int priority)
b5530559 1500{
1501 static int counter = 0;
1502 char which_buf[16];
540edea7 1503 tree decl, name, resdecl;
b5530559 1504
2c56f72e 1505 /* The priority is encoded in the constructor or destructor name.
1506 collect2 will sort the names and arrange that they are called at
1507 program startup. */
1508 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
db85cc4f 1509 name = get_file_function_name (which_buf);
b5530559 1510
e60a6f7b 1511 decl = build_decl (input_location, FUNCTION_DECL, name,
b5530559 1512 build_function_type (void_type_node, void_list_node));
1513 current_function_decl = decl;
1514
e60a6f7b 1515 resdecl = build_decl (input_location,
1516 RESULT_DECL, NULL_TREE, void_type_node);
540edea7 1517 DECL_ARTIFICIAL (resdecl) = 1;
540edea7 1518 DECL_RESULT (decl) = resdecl;
8e5b4ed6 1519 DECL_CONTEXT (resdecl) = decl;
540edea7 1520
80f2ef47 1521 allocate_struct_function (decl, false);
b5530559 1522
1523 TREE_STATIC (decl) = 1;
1524 TREE_USED (decl) = 1;
1525 DECL_ARTIFICIAL (decl) = 1;
b5530559 1526 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1527 DECL_SAVED_TREE (decl) = body;
1528 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1529 DECL_UNINLINABLE (decl) = 1;
1530
1531 DECL_INITIAL (decl) = make_node (BLOCK);
1532 TREE_USED (DECL_INITIAL (decl)) = 1;
1533
1534 DECL_SOURCE_LOCATION (decl) = input_location;
1535 cfun->function_end_locus = input_location;
1536
cc636d56 1537 switch (which)
1538 {
1539 case 'I':
1540 DECL_STATIC_CONSTRUCTOR (decl) = 1;
64c2e9b0 1541 decl_init_priority_insert (decl, priority);
cc636d56 1542 break;
1543 case 'D':
1544 DECL_STATIC_DESTRUCTOR (decl) = 1;
64c2e9b0 1545 decl_fini_priority_insert (decl, priority);
cc636d56 1546 break;
1547 default:
1548 gcc_unreachable ();
1549 }
b5530559 1550
1551 gimplify_function_tree (decl);
1552
523c1122 1553 cgraph_add_new_function (decl, false);
1554 cgraph_mark_needed_node (cgraph_node (decl));
e3a37aef 1555 set_cfun (NULL);
b5530559 1556}
121f3051 1557
1558void
1559init_cgraph (void)
1560{
1561 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1562}
b5d36404 1563
a0c938f0 1564/* The edges representing the callers of the NEW_VERSION node were
b5d36404 1565 fixed by cgraph_function_versioning (), now the call_expr in their
1566 respective tree code should be updated to call the NEW_VERSION. */
1567
1568static void
1569update_call_expr (struct cgraph_node *new_version)
1570{
1571 struct cgraph_edge *e;
1572
1573 gcc_assert (new_version);
75a70cf9 1574
1575 /* Update the call expr on the edges to call the new version. */
b5d36404 1576 for (e = new_version->callers; e; e = e->next_caller)
e03a95e7 1577 {
1578 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
1579 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
e38def9c 1580 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
e03a95e7 1581 }
b5d36404 1582}
1583
1584
1585/* Create a new cgraph node which is the new version of
1586 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1587 edges which should be redirected to point to
1588 NEW_VERSION. ALL the callees edges of OLD_VERSION
1589 are cloned to the new version node. Return the new
1590 version node. */
1591
1592static struct cgraph_node *
1593cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
4460a647 1594 tree new_decl,
1595 VEC(cgraph_edge_p,heap) *redirect_callers)
b5d36404 1596 {
1597 struct cgraph_node *new_version;
1598 struct cgraph_edge *e, *new_e;
1599 struct cgraph_edge *next_callee;
1600 unsigned i;
1601
1602 gcc_assert (old_version);
a0c938f0 1603
b5d36404 1604 new_version = cgraph_node (new_decl);
1605
1606 new_version->analyzed = true;
1607 new_version->local = old_version->local;
1608 new_version->global = old_version->global;
1609 new_version->rtl = new_version->rtl;
1610 new_version->reachable = true;
1611 new_version->count = old_version->count;
1612
1613 /* Clone the old node callees. Recursive calls are
1614 also cloned. */
1615 for (e = old_version->callees;e; e=e->next_callee)
1616 {
7bfefa9d 1617 new_e = cgraph_clone_edge (e, new_version, e->call_stmt,
1618 e->lto_stmt_uid, 0, e->frequency,
4ae20857 1619 e->loop_nest, true);
b5d36404 1620 new_e->count = e->count;
1621 }
1622 /* Fix recursive calls.
1623 If OLD_VERSION has a recursive call after the
1624 previous edge cloning, the new version will have an edge
1625 pointing to the old version, which is wrong;
1626 Redirect it to point to the new version. */
1627 for (e = new_version->callees ; e; e = next_callee)
1628 {
1629 next_callee = e->next_callee;
1630 if (e->callee == old_version)
1631 cgraph_redirect_edge_callee (e, new_version);
a0c938f0 1632
b5d36404 1633 if (!next_callee)
1634 break;
1635 }
4460a647 1636 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1637 {
1638 /* Redirect calls to the old version node to point to its new
1639 version. */
1640 cgraph_redirect_edge_callee (e, new_version);
1641 }
b5d36404 1642
1643 return new_version;
1644 }
1645
1646 /* Perform function versioning.
a0c938f0 1647 Function versioning includes copying of the tree and
b5d36404 1648 a callgraph update (creating a new cgraph node and updating
1649 its callees and callers).
1650
1651 REDIRECT_CALLERS varray includes the edges to be redirected
1652 to the new version.
1653
1654 TREE_MAP is a mapping of tree nodes we want to replace with
1655 new ones (according to results of prior analysis).
1656 OLD_VERSION_NODE is the node that is versioned.
5afe38fe 1657 It returns the new version's cgraph node.
1658 ARGS_TO_SKIP lists arguments to be omitted from functions
1659 */
b5d36404 1660
1661struct cgraph_node *
1662cgraph_function_versioning (struct cgraph_node *old_version_node,
4460a647 1663 VEC(cgraph_edge_p,heap) *redirect_callers,
ccf4ab6b 1664 VEC (ipa_replace_map_p,gc)* tree_map,
5afe38fe 1665 bitmap args_to_skip)
b5d36404 1666{
1667 tree old_decl = old_version_node->decl;
1668 struct cgraph_node *new_version_node = NULL;
1669 tree new_decl;
1670
1671 if (!tree_versionable_function_p (old_decl))
1672 return NULL;
1673
1674 /* Make a new FUNCTION_DECL tree node for the
1675 new version. */
5afe38fe 1676 if (!args_to_skip)
1677 new_decl = copy_node (old_decl);
1678 else
1679 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
b5d36404 1680
1681 /* Create the new version's call-graph node.
1682 and update the edges of the new node. */
1683 new_version_node =
1684 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1685 redirect_callers);
1686
1687 /* Copy the OLD_VERSION_NODE function tree to the new version. */
5afe38fe 1688 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip);
b5d36404 1689
a0c938f0 1690 /* Update the new version's properties.
e03a95e7 1691 Make The new version visible only within this translation unit. Make sure
1692 that is not weak also.
a0c938f0 1693 ??? We cannot use COMDAT linkage because there is no
b5d36404 1694 ABI support for this. */
1695 DECL_EXTERNAL (new_version_node->decl) = 0;
ecd88073 1696 DECL_COMDAT_GROUP (new_version_node->decl) = NULL_TREE;
b5d36404 1697 TREE_PUBLIC (new_version_node->decl) = 0;
1698 DECL_COMDAT (new_version_node->decl) = 0;
e03a95e7 1699 DECL_WEAK (new_version_node->decl) = 0;
f014e39d 1700 DECL_VIRTUAL_P (new_version_node->decl) = 0;
b5d36404 1701 new_version_node->local.externally_visible = 0;
1702 new_version_node->local.local = 1;
1703 new_version_node->lowered = true;
f014e39d 1704
e03a95e7 1705 /* Update the call_expr on the edges to call the new version node. */
1706 update_call_expr (new_version_node);
1707
50828ed8 1708 cgraph_call_function_insertion_hooks (new_version_node);
b5d36404 1709 return new_version_node;
1710}
469679ab 1711
1712/* Produce separate function body for inline clones so the offline copy can be
1713 modified without affecting them. */
1714struct cgraph_node *
1715save_inline_function_body (struct cgraph_node *node)
1716{
ccf4ab6b 1717 struct cgraph_node *first_clone, *n;
469679ab 1718
1719 gcc_assert (node == cgraph_node (node->decl));
1720
1721 cgraph_lower_function (node);
1722
ccf4ab6b 1723 first_clone = node->clones;
469679ab 1724
1725 first_clone->decl = copy_node (node->decl);
469679ab 1726 cgraph_insert_node_to_hashtable (first_clone);
1727 gcc_assert (first_clone == cgraph_node (first_clone->decl));
ccf4ab6b 1728 if (first_clone->next_sibling_clone)
1729 {
1730 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
1731 n->clone_of = first_clone;
1732 n->clone_of = first_clone;
1733 n->next_sibling_clone = first_clone->clones;
1734 if (first_clone->clones)
1735 first_clone->clones->prev_sibling_clone = n;
1736 first_clone->clones = first_clone->next_sibling_clone;
1737 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
1738 first_clone->next_sibling_clone = NULL;
1739 gcc_assert (!first_clone->prev_sibling_clone);
1740 }
1741 first_clone->clone_of = NULL;
1742 node->clones = NULL;
1743
1744 if (first_clone->clones)
1745 for (n = first_clone->clones; n != first_clone;)
1746 {
1747 gcc_assert (n->decl == node->decl);
1748 n->decl = first_clone->decl;
1749 if (n->clones)
1750 n = n->clones;
1751 else if (n->next_sibling_clone)
1752 n = n->next_sibling_clone;
1753 else
1754 {
1755 while (n != first_clone && !n->next_sibling_clone)
1756 n = n->clone_of;
1757 if (n != first_clone)
1758 n = n->next_sibling_clone;
1759 }
1760 }
469679ab 1761
1762 /* Copy the OLD_VERSION_NODE function tree to the new version. */
5afe38fe 1763 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL);
469679ab 1764
1765 DECL_EXTERNAL (first_clone->decl) = 0;
ecd88073 1766 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
469679ab 1767 TREE_PUBLIC (first_clone->decl) = 0;
1768 DECL_COMDAT (first_clone->decl) = 0;
7fe9b425 1769 VEC_free (ipa_opt_pass, heap,
1770 DECL_STRUCT_FUNCTION (first_clone->decl)->ipa_transforms_to_apply);
1771 DECL_STRUCT_FUNCTION (first_clone->decl)->ipa_transforms_to_apply = NULL;
469679ab 1772
469679ab 1773#ifdef ENABLE_CHECKING
1774 verify_cgraph_node (first_clone);
1775#endif
1776 return first_clone;
1777}
a861fe52 1778
ccf4ab6b 1779/* Given virtual clone, turn it into actual clone. */
1780static void
1781cgraph_materialize_clone (struct cgraph_node *node)
1782{
1783 bitmap_obstack_initialize (NULL);
1784 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1785 tree_function_versioning (node->clone_of->decl, node->decl,
1786 node->clone.tree_map, true,
1787 node->clone.args_to_skip);
e20422ea 1788 if (cgraph_dump_file)
1789 {
1790 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
1791 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
1792 }
ccf4ab6b 1793
1794 /* Function is no longer clone. */
1795 if (node->next_sibling_clone)
1796 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1797 if (node->prev_sibling_clone)
1798 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1799 else
1800 node->clone_of->clones = node->next_sibling_clone;
1801 node->next_sibling_clone = NULL;
1802 node->prev_sibling_clone = NULL;
1803 node->clone_of = NULL;
1804 bitmap_obstack_release (NULL);
1805}
1806
1807/* Once all functions from compilation unit are in memory, produce all clones
1808 and update all calls.
1809 We might also do this on demand if we don't want to bring all functions to
1810 memory prior compilation, but current WHOPR implementation does that and it is
1811 is bit easier to keep everything right in this order. */
1812void
1813cgraph_materialize_all_clones (void)
1814{
1815 struct cgraph_node *node;
1816 bool stabilized = false;
1817
1818 if (cgraph_dump_file)
1819 fprintf (cgraph_dump_file, "Materializing clones\n");
1820#ifdef ENABLE_CHECKING
1821 verify_cgraph ();
1822#endif
1823
1824 /* We can also do topological order, but number of iterations should be
1825 bounded by number of IPA passes since single IPA pass is probably not
1826 going to create clones of clones it created itself. */
1827 while (!stabilized)
1828 {
1829 stabilized = true;
1830 for (node = cgraph_nodes; node; node = node->next)
1831 {
1832 if (node->clone_of && node->decl != node->clone_of->decl
1833 && !gimple_has_body_p (node->decl))
1834 {
1835 if (gimple_has_body_p (node->clone_of->decl))
1836 {
1837 if (cgraph_dump_file)
e20422ea 1838 {
1839 fprintf (cgraph_dump_file, "clonning %s to %s\n",
1840 cgraph_node_name (node->clone_of),
1841 cgraph_node_name (node));
1842 if (node->clone.tree_map)
1843 {
1844 unsigned int i;
1845 fprintf (cgraph_dump_file, " replace map: ");
1846 for (i = 0; i < VEC_length (ipa_replace_map_p,
1847 node->clone.tree_map);
1848 i++)
1849 {
1850 struct ipa_replace_map *replace_info;
1851 replace_info = VEC_index (ipa_replace_map_p,
1852 node->clone.tree_map,
1853 i);
1854 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
1855 fprintf (cgraph_dump_file, " -> ");
1856 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
1857 fprintf (cgraph_dump_file, "%s%s;",
1858 replace_info->replace_p ? "(replace)":"",
1859 replace_info->ref_p ? "(ref)":"");
1860 }
1861 fprintf (cgraph_dump_file, "\n");
1862 }
1863 if (node->clone.args_to_skip)
1864 {
1865 fprintf (cgraph_dump_file, " args_to_skip: ");
1866 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
1867 }
1868 if (node->clone.args_to_skip)
1869 {
1870 fprintf (cgraph_dump_file, " combined_args_to_skip:");
1871 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
1872 }
1873 }
ccf4ab6b 1874 cgraph_materialize_clone (node);
1875 }
1876 else
1877 stabilized = false;
1878 }
1879 }
1880 }
1881 if (cgraph_dump_file)
1882 fprintf (cgraph_dump_file, "Updating call sites\n");
1883 for (node = cgraph_nodes; node; node = node->next)
1884 if (node->analyzed && gimple_has_body_p (node->decl)
1885 && (!node->clone_of || node->clone_of->decl != node->decl))
1886 {
1887 struct cgraph_edge *e;
1888
1889 current_function_decl = node->decl;
1890 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1891 for (e = node->callees; e; e = e->next_callee)
1892 {
1893 tree decl = gimple_call_fndecl (e->call_stmt);
947781ac 1894 /* When function gets inlined, indirect inlining might've invented
1895 new edge for orginally indirect stmt. Since we are not
1896 preserving clones in the original form, we must not update here
1897 since other inline clones don't need to contain call to the same
1898 call. Inliner will do the substitution for us later. */
1899 if (decl && decl != e->callee->decl)
ccf4ab6b 1900 {
1901 gimple new_stmt;
1902 gimple_stmt_iterator gsi;
1903
1904 if (cgraph_dump_file)
1905 {
1906 fprintf (cgraph_dump_file, "updating call of %s in %s:",
1907 cgraph_node_name (node),
1908 cgraph_node_name (e->callee));
1909 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
1910 }
1911
e20422ea 1912 if (e->callee->clone.combined_args_to_skip)
ccf4ab6b 1913 new_stmt = gimple_call_copy_skip_args (e->call_stmt,
e20422ea 1914 e->callee->clone.combined_args_to_skip);
ccf4ab6b 1915 else
1916 new_stmt = e->call_stmt;
1917 if (gimple_vdef (new_stmt)
1918 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1919 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1920 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1921
1922 gsi = gsi_for_stmt (e->call_stmt);
1923 gsi_replace (&gsi, new_stmt, true);
1924
1925 /* Update EH information too, just in case. */
e38def9c 1926 maybe_clean_or_replace_eh_stmt (e->call_stmt, new_stmt);
ccf4ab6b 1927
1928 cgraph_set_call_stmt_including_clones (node, e->call_stmt, new_stmt);
1929
1930 if (cgraph_dump_file)
1931 {
1932 fprintf (cgraph_dump_file, " updated to:");
1933 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
1934 }
1935 }
1936 }
1937 pop_cfun ();
1938 current_function_decl = NULL;
1939#ifdef ENABLE_CHECKING
1940 verify_cgraph_node (node);
1941#endif
1942 }
947781ac 1943#ifdef ENABLE_CHECKING
1944 verify_cgraph ();
1945#endif
ccf4ab6b 1946 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
1947}
1948
a861fe52 1949#include "gt-cgraphunit.h"