]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
2011-04-10 Jim Meyering <meyering@redhat.com>
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
cd6bca02 1/* Callgraph based interprocedural optimizations.
aed6e608 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011 Free Software Foundation, Inc.
ae01b312 4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
ae01b312 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
ae01b312 21
b0cdf642 22/* This module implements main driver of compilation process as well as
cd6bca02 23 few basic interprocedural optimizers.
b0cdf642 24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
b326746d 35 (There is one exception needed for implementing GCC extern inline
36 function.)
b0cdf642 37
1d416bd7 38 - varpool_finalize_variable
b0cdf642 39
7bd28bba 40 This function has same behavior as the above but is used for static
b0cdf642 41 variables.
42
43 - cgraph_finalize_compilation_unit
44
b326746d 45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
b0cdf642 47
6329636b 48 In the the call-graph construction and local function
b0cdf642 49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
b326746d 52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
b0cdf642 54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
b0cdf642 62 - cgraph_mark_needed_node
1d416bd7 63 - varpool_mark_needed_node
b0cdf642 64
b326746d 65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
ccd2f3d1 69 used by C++ frontend to explicitly mark the keyed methods.
b0cdf642 70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
6329636b 80 Analyzing of all functions is deferred
b0cdf642 81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
ca67a72b 91 The intra-procedural information is produced and its existence
b0cdf642 92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
6329636b 106 where reference has been optimized out. */
121f3051 107
acc70efa 108
ae01b312 109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
b5530559 114#include "rtl.h"
acc70efa 115#include "tree-flow.h"
ae01b312 116#include "tree-inline.h"
117#include "langhooks.h"
c6224531 118#include "pointer-set.h"
ae01b312 119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
80a85d8a 125#include "diagnostic.h"
ce084dfc 126#include "tree-pretty-print.h"
127#include "gimple-pretty-print.h"
f79b6507 128#include "timevar.h"
d7c6d889 129#include "params.h"
130#include "fibheap.h"
611e5405 131#include "intl.h"
b69eb0ff 132#include "function.h"
b5d36404 133#include "ipa-prop.h"
75a70cf9 134#include "gimple.h"
135#include "tree-iterator.h"
f1e2a033 136#include "tree-pass.h"
bfec3452 137#include "tree-dump.h"
c1dcd13c 138#include "output.h"
9ed5b1f5 139#include "coverage.h"
c9036234 140#include "plugin.h"
d7c6d889 141
a6868229 142static void cgraph_expand_all_functions (void);
d9d9733a 143static void cgraph_mark_functions_to_output (void);
144static void cgraph_expand_function (struct cgraph_node *);
f788fff2 145static void cgraph_output_pending_asms (void);
25bb88de 146
ecb08119 147FILE *cgraph_dump_file;
121f3051 148
28454517 149/* Used for vtable lookup in thunk adjusting. */
150static GTY (()) tree vtable_entry_type;
151
2c0b522d 152/* Determine if function DECL is needed. That is, visible to something
153 either outside this translation unit, something magic in the system
6329636b 154 configury. */
2c0b522d 155
7bfefa9d 156bool
157cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
2c0b522d 158{
3f82b628 159 /* If the user told us it is used, then it must be so. */
05806473 160 if (node->local.externally_visible)
161 return true;
162
3f82b628 163 /* ??? If the assembler name is set by hand, it is possible to assemble
164 the name later after finalizing the function and the fact is noticed
165 in assemble_name then. This is arguably a bug. */
166 if (DECL_ASSEMBLER_NAME_SET_P (decl)
167 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
168 return true;
169
55680bef 170 /* With -fkeep-inline-functions we are keeping all inline functions except
171 for extern inline ones. */
172 if (flag_keep_inline_functions
173 && DECL_DECLARED_INLINE_P (decl)
316ef6d8 174 && !DECL_EXTERNAL (decl)
175 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
55680bef 176 return true;
177
2c0b522d 178 /* If we decided it was needed before, but at the time we didn't have
179 the body of the function available, then it's still needed. We have
180 to go back and re-check its dependencies now. */
181 if (node->needed)
182 return true;
183
184 /* Externally visible functions must be output. The exception is
a0c938f0 185 COMDAT functions that must be output only when they are needed.
8baa9d15 186
187 When not optimizing, also output the static functions. (see
95da6220 188 PR24561), but don't do so for always_inline functions, functions
0f9238c0 189 declared inline and nested functions. These were optimized out
d3d410e1 190 in the original implementation and it is unclear whether we want
554f2707 191 to change the behavior here. */
bba7ddf8 192 if (((TREE_PUBLIC (decl)
0f9238c0 193 || (!optimize
194 && !node->local.disregard_inline_limits
d3d410e1 195 && !DECL_DECLARED_INLINE_P (decl)
0f9238c0 196 && !(DECL_CONTEXT (decl)
197 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
59dd4830 198 && !flag_whole_program
cbcf2791 199 && !flag_lto)
62eec3b4 200 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
2c0b522d 201 return true;
202
2c0b522d 203 return false;
204}
205
bdc40eb8 206/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
523c1122 207 functions into callgraph in a way so they look like ordinary reachable
208 functions inserted into callgraph already at construction time. */
209
210bool
211cgraph_process_new_functions (void)
212{
213 bool output = false;
214 tree fndecl;
215 struct cgraph_node *node;
216
0cddb138 217 varpool_analyze_pending_decls ();
523c1122 218 /* Note that this queue may grow as its being processed, as the new
219 functions may generate new ones. */
220 while (cgraph_new_nodes)
221 {
222 node = cgraph_new_nodes;
223 fndecl = node->decl;
224 cgraph_new_nodes = cgraph_new_nodes->next_needed;
225 switch (cgraph_state)
226 {
227 case CGRAPH_STATE_CONSTRUCTION:
228 /* At construction time we just need to finalize function and move
229 it into reachable functions list. */
230
231 node->next_needed = NULL;
232 cgraph_finalize_function (fndecl, false);
233 cgraph_mark_reachable_node (node);
234 output = true;
235 break;
236
237 case CGRAPH_STATE_IPA:
f517b36e 238 case CGRAPH_STATE_IPA_SSA:
523c1122 239 /* When IPA optimization already started, do all essential
240 transformations that has been already performed on the whole
241 cgraph but not on this function. */
242
75a70cf9 243 gimple_register_cfg_hooks ();
523c1122 244 if (!node->analyzed)
245 cgraph_analyze_function (node);
246 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
247 current_function_decl = fndecl;
f517b36e 248 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
249 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
250 /* When not optimizing, be sure we run early local passes anyway
251 to expand OMP. */
252 || !optimize)
20099e35 253 execute_pass_list (pass_early_local_passes.pass.sub);
649597af 254 else
255 compute_inline_parameters (node);
523c1122 256 free_dominance_info (CDI_POST_DOMINATORS);
257 free_dominance_info (CDI_DOMINATORS);
258 pop_cfun ();
259 current_function_decl = NULL;
260 break;
261
262 case CGRAPH_STATE_EXPANSION:
263 /* Functions created during expansion shall be compiled
264 directly. */
09fc9532 265 node->process = 0;
523c1122 266 cgraph_expand_function (node);
267 break;
268
269 default:
270 gcc_unreachable ();
271 break;
272 }
50828ed8 273 cgraph_call_function_insertion_hooks (node);
0cddb138 274 varpool_analyze_pending_decls ();
523c1122 275 }
276 return output;
277}
278
9b8fb23a 279/* As an GCC extension we allow redefinition of the function. The
280 semantics when both copies of bodies differ is not well defined.
281 We replace the old body with new body so in unit at a time mode
282 we always use new body, while in normal mode we may end up with
283 old body inlined into some functions and new body expanded and
284 inlined in others.
285
286 ??? It may make more sense to use one body for inlining and other
287 body for expanding the function but this is difficult to do. */
288
289static void
290cgraph_reset_node (struct cgraph_node *node)
291{
09fc9532 292 /* If node->process is set, then we have already begun whole-unit analysis.
6329636b 293 This is *not* testing for whether we've already emitted the function.
294 That case can be sort-of legitimately seen with real function redefinition
295 errors. I would argue that the front end should never present us with
296 such a case, but don't enforce that for now. */
09fc9532 297 gcc_assert (!node->process);
9b8fb23a 298
299 /* Reset our data structures so we can analyze the function again. */
300 memset (&node->local, 0, sizeof (node->local));
301 memset (&node->global, 0, sizeof (node->global));
302 memset (&node->rtl, 0, sizeof (node->rtl));
303 node->analyzed = false;
304 node->local.redefined_extern_inline = true;
305 node->local.finalized = false;
306
9b8fb23a 307 cgraph_node_remove_callees (node);
308
309 /* We may need to re-queue the node for assembling in case
46beef9a 310 we already proceeded it and ignored as not needed or got
311 a re-declaration in IMA mode. */
312 if (node->reachable)
9b8fb23a 313 {
314 struct cgraph_node *n;
315
316 for (n = cgraph_nodes_queue; n; n = n->next_needed)
317 if (n == node)
318 break;
319 if (!n)
320 node->reachable = 0;
321 }
322}
c08871a9 323
1e8e9920 324static void
325cgraph_lower_function (struct cgraph_node *node)
326{
327 if (node->lowered)
328 return;
bfec3452 329
330 if (node->nested)
331 lower_nested_functions (node->decl);
332 gcc_assert (!node->nested);
333
1e8e9920 334 tree_lowering_passes (node->decl);
335 node->lowered = true;
336}
337
28df663b 338/* DECL has been parsed. Take it, queue it, compile it at the whim of the
339 logic in effect. If NESTED is true, then our caller cannot stand to have
340 the garbage collector run at the moment. We would need to either create
341 a new GC context, or just not compile right now. */
ae01b312 342
343void
28df663b 344cgraph_finalize_function (tree decl, bool nested)
ae01b312 345{
346 struct cgraph_node *node = cgraph_node (decl);
347
c08871a9 348 if (node->local.finalized)
9b8fb23a 349 cgraph_reset_node (node);
28df663b 350
167b550b 351 node->pid = cgraph_max_pid ++;
c08871a9 352 notice_global_symbol (decl);
79bb87b4 353 node->local.finalized = true;
e27482aa 354 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
22c748a1 355 node->finalized_by_frontend = true;
ae01b312 356
7bfefa9d 357 if (cgraph_decide_is_function_needed (node, decl))
2c0b522d 358 cgraph_mark_needed_node (node);
359
ecda6e51 360 /* Since we reclaim unreachable nodes at the end of every language
3f82b628 361 level unit, we need to be conservative about possible entry points
362 there. */
1e3aebec 363 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
364 || DECL_STATIC_CONSTRUCTOR (decl)
d050bafd 365 || DECL_STATIC_DESTRUCTOR (decl)
366 /* COMDAT virtual functions may be referenced by vtable from
0a10fd82 367 other compilation unit. Still we want to devirtualize calls
d050bafd 368 to those so we need to analyze them.
369 FIXME: We should introduce may edges for this purpose and update
370 their handling in unreachable function removal and inliner too. */
371 || (DECL_VIRTUAL_P (decl) && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
3f82b628 372 cgraph_mark_reachable_node (node);
373
2c0b522d 374 /* If we've not yet emitted decl, tell the debug info about it. */
28df663b 375 if (!TREE_ASM_WRITTEN (decl))
2c0b522d 376 (*debug_hooks->deferred_inline_function) (decl);
4e8871a0 377
b69eb0ff 378 /* Possibly warn about unused parameters. */
379 if (warn_unused_parameter)
380 do_warn_unused_parameter (decl);
6329636b 381
382 if (!nested)
383 ggc_collect ();
ae01b312 384}
385
0da03d11 386/* C99 extern inline keywords allow changing of declaration after function
387 has been finalized. We need to re-decide if we want to mark the function as
388 needed then. */
389
390void
391cgraph_mark_if_needed (tree decl)
392{
393 struct cgraph_node *node = cgraph_node (decl);
7bfefa9d 394 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
0da03d11 395 cgraph_mark_needed_node (node);
396}
397
ccf4ab6b 398/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
399static bool
400clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
401{
402 while (node != node2 && node2)
403 node2 = node2->clone_of;
404 return node2 != NULL;
405}
406
1a036a3b 407/* Verify edge E count and frequency. */
408
409static bool
410verify_edge_count_and_frequency (struct cgraph_edge *e)
411{
412 bool error_found = false;
413 if (e->count < 0)
414 {
415 error ("caller edge count is negative");
416 error_found = true;
417 }
418 if (e->frequency < 0)
419 {
420 error ("caller edge frequency is negative");
421 error_found = true;
422 }
423 if (e->frequency > CGRAPH_FREQ_MAX)
424 {
425 error ("caller edge frequency is too large");
426 error_found = true;
427 }
428 if (gimple_has_body_p (e->caller->decl)
429 && !e->caller->global.inlined_to
430 && (e->frequency
431 != compute_call_stmt_bb_frequency (e->caller->decl,
432 gimple_bb (e->call_stmt))))
433 {
0a10fd82 434 error ("caller edge frequency %i does not match BB frequency %i",
1a036a3b 435 e->frequency,
436 compute_call_stmt_bb_frequency (e->caller->decl,
437 gimple_bb (e->call_stmt)));
438 error_found = true;
439 }
440 return error_found;
441}
442
7b29dd2f 443/* Switch to THIS_CFUN if needed and print STMT to stderr. */
444static void
445cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt)
446{
447 /* debug_gimple_stmt needs correct cfun */
448 if (cfun != this_cfun)
449 set_cfun (this_cfun);
450 debug_gimple_stmt (stmt);
451}
452
b0cdf642 453/* Verify cgraph nodes of given cgraph node. */
4b987fac 454DEBUG_FUNCTION void
b0cdf642 455verify_cgraph_node (struct cgraph_node *node)
456{
457 struct cgraph_edge *e;
e27482aa 458 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
459 basic_block this_block;
75a70cf9 460 gimple_stmt_iterator gsi;
9bfec7c2 461 bool error_found = false;
b0cdf642 462
852f689e 463 if (seen_error ())
bd09cd3e 464 return;
465
b0cdf642 466 timevar_push (TV_CGRAPH_VERIFY);
b0cdf642 467 for (e = node->callees; e; e = e->next_callee)
468 if (e->aux)
469 {
0a81f5a0 470 error ("aux field set for edge %s->%s",
abd3e6b5 471 identifier_to_locale (cgraph_node_name (e->caller)),
472 identifier_to_locale (cgraph_node_name (e->callee)));
b0cdf642 473 error_found = true;
474 }
a2cb9b3b 475 if (node->count < 0)
476 {
bf776685 477 error ("execution count is negative");
a2cb9b3b 478 error_found = true;
479 }
59dd4830 480 if (node->global.inlined_to && node->local.externally_visible)
481 {
bf776685 482 error ("externally visible inline clone");
59dd4830 483 error_found = true;
484 }
485 if (node->global.inlined_to && node->address_taken)
486 {
bf776685 487 error ("inline clone with address taken");
59dd4830 488 error_found = true;
489 }
490 if (node->global.inlined_to && node->needed)
491 {
bf776685 492 error ("inline clone is needed");
59dd4830 493 error_found = true;
494 }
799c8711 495 for (e = node->indirect_calls; e; e = e->next_callee)
496 {
497 if (e->aux)
498 {
499 error ("aux field set for indirect edge from %s",
500 identifier_to_locale (cgraph_node_name (e->caller)));
501 error_found = true;
502 }
503 if (!e->indirect_unknown_callee
504 || !e->indirect_info)
505 {
506 error ("An indirect edge from %s is not marked as indirect or has "
507 "associated indirect_info, the corresponding statement is: ",
508 identifier_to_locale (cgraph_node_name (e->caller)));
7b29dd2f 509 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
799c8711 510 error_found = true;
511 }
512 }
b0cdf642 513 for (e = node->callers; e; e = e->next_caller)
514 {
1a036a3b 515 if (verify_edge_count_and_frequency (e))
516 error_found = true;
b0cdf642 517 if (!e->inline_failed)
518 {
519 if (node->global.inlined_to
520 != (e->caller->global.inlined_to
521 ? e->caller->global.inlined_to : e->caller))
522 {
0a81f5a0 523 error ("inlined_to pointer is wrong");
b0cdf642 524 error_found = true;
525 }
526 if (node->callers->next_caller)
527 {
0a81f5a0 528 error ("multiple inline callers");
b0cdf642 529 error_found = true;
530 }
531 }
532 else
533 if (node->global.inlined_to)
534 {
0a81f5a0 535 error ("inlined_to pointer set for noninline callers");
b0cdf642 536 error_found = true;
537 }
538 }
1a036a3b 539 for (e = node->indirect_calls; e; e = e->next_callee)
540 if (verify_edge_count_and_frequency (e))
541 error_found = true;
b0cdf642 542 if (!node->callers && node->global.inlined_to)
543 {
5cd75817 544 error ("inlined_to pointer is set but no predecessors found");
b0cdf642 545 error_found = true;
546 }
547 if (node->global.inlined_to == node)
548 {
0a81f5a0 549 error ("inlined_to pointer refers to itself");
b0cdf642 550 error_found = true;
551 }
552
7019fd3f 553 if (!cgraph_get_node (node->decl))
b0cdf642 554 {
0f6439b9 555 error ("node not found in cgraph_hash");
b0cdf642 556 error_found = true;
557 }
a0c938f0 558
ccf4ab6b 559 if (node->clone_of)
560 {
561 struct cgraph_node *n;
562 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
563 if (n == node)
564 break;
565 if (!n)
566 {
567 error ("node has wrong clone_of");
568 error_found = true;
569 }
570 }
571 if (node->clones)
572 {
573 struct cgraph_node *n;
574 for (n = node->clones; n; n = n->next_sibling_clone)
575 if (n->clone_of != node)
576 break;
577 if (n)
578 {
579 error ("node has wrong clone list");
580 error_found = true;
581 }
582 }
583 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
584 {
585 error ("node is in clone list but it is not clone");
586 error_found = true;
587 }
588 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
589 {
590 error ("node has wrong prev_clone pointer");
591 error_found = true;
592 }
593 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
594 {
595 error ("double linked list of clones corrupted");
596 error_found = true;
597 }
c524ac5d 598 if (node->same_comdat_group)
599 {
600 struct cgraph_node *n = node->same_comdat_group;
601
602 if (!DECL_ONE_ONLY (node->decl))
603 {
604 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
605 error_found = true;
606 }
607 if (n == node)
608 {
609 error ("node is alone in a comdat group");
610 error_found = true;
611 }
612 do
613 {
614 if (!n->same_comdat_group)
615 {
616 error ("same_comdat_group is not a circular list");
617 error_found = true;
618 break;
619 }
620 n = n->same_comdat_group;
621 }
622 while (n != node);
623 }
ccf4ab6b 624
625 if (node->analyzed && gimple_has_body_p (node->decl)
75a70cf9 626 && !TREE_ASM_WRITTEN (node->decl)
7bfefa9d 627 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
628 && !flag_wpa)
b0cdf642 629 {
e27482aa 630 if (this_cfun->cfg)
631 {
632 /* The nodes we're interested in are never shared, so walk
633 the tree ignoring duplicates. */
e7c352d1 634 struct pointer_set_t *visited_nodes = pointer_set_create ();
e27482aa 635 /* Reach the trees by walking over the CFG, and note the
636 enclosing basic-blocks in the call edges. */
637 FOR_EACH_BB_FN (this_block, this_cfun)
75a70cf9 638 for (gsi = gsi_start_bb (this_block);
639 !gsi_end_p (gsi);
640 gsi_next (&gsi))
9bfec7c2 641 {
75a70cf9 642 gimple stmt = gsi_stmt (gsi);
799c8711 643 if (is_gimple_call (stmt))
9bfec7c2 644 {
645 struct cgraph_edge *e = cgraph_edge (node, stmt);
799c8711 646 tree decl = gimple_call_fndecl (stmt);
9bfec7c2 647 if (e)
648 {
649 if (e->aux)
650 {
0a81f5a0 651 error ("shared call_stmt:");
7b29dd2f 652 cgraph_debug_gimple_stmt (this_cfun, stmt);
9bfec7c2 653 error_found = true;
654 }
799c8711 655 if (!e->indirect_unknown_callee)
28454517 656 {
76f3f3ab 657 struct cgraph_node *n;
658
799c8711 659 if (e->callee->same_body_alias)
660 {
661 error ("edge points to same body alias:");
662 debug_tree (e->callee->decl);
663 error_found = true;
664 }
e748b31d 665 else if (!e->callee->global.inlined_to
799c8711 666 && decl
e748b31d 667 && cgraph_get_node (decl)
668 && (e->callee->former_clone_of
669 != cgraph_get_node (decl)->decl)
799c8711 670 && !clone_of_p (cgraph_node (decl),
671 e->callee))
672 {
673 error ("edge points to wrong declaration:");
674 debug_tree (e->callee->decl);
675 fprintf (stderr," Instead of:");
676 debug_tree (decl);
677 error_found = true;
678 }
76f3f3ab 679 else if (decl
680 && (n = cgraph_get_node_or_alias (decl))
681 && (n->same_body_alias
682 && n->thunk.thunk_p))
683 {
684 error ("a call to thunk improperly represented "
685 "in the call graph:");
7b29dd2f 686 cgraph_debug_gimple_stmt (this_cfun, stmt);
687 error_found = true;
76f3f3ab 688 }
28454517 689 }
799c8711 690 else if (decl)
9bfec7c2 691 {
799c8711 692 error ("an indirect edge with unknown callee "
693 "corresponding to a call_stmt with "
694 "a known declaration:");
ee3f5fc0 695 error_found = true;
7b29dd2f 696 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
9bfec7c2 697 }
698 e->aux = (void *)1;
699 }
799c8711 700 else if (decl)
9bfec7c2 701 {
0a81f5a0 702 error ("missing callgraph edge for call stmt:");
7b29dd2f 703 cgraph_debug_gimple_stmt (this_cfun, stmt);
9bfec7c2 704 error_found = true;
705 }
706 }
707 }
e27482aa 708 pointer_set_destroy (visited_nodes);
e27482aa 709 }
710 else
711 /* No CFG available?! */
712 gcc_unreachable ();
713
b0cdf642 714 for (e = node->callees; e; e = e->next_callee)
715 {
799c8711 716 if (!e->aux)
b0cdf642 717 {
0a81f5a0 718 error ("edge %s->%s has no corresponding call_stmt",
abd3e6b5 719 identifier_to_locale (cgraph_node_name (e->caller)),
720 identifier_to_locale (cgraph_node_name (e->callee)));
7b29dd2f 721 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
b0cdf642 722 error_found = true;
723 }
724 e->aux = 0;
725 }
799c8711 726 for (e = node->indirect_calls; e; e = e->next_callee)
727 {
728 if (!e->aux)
729 {
730 error ("an indirect edge from %s has no corresponding call_stmt",
731 identifier_to_locale (cgraph_node_name (e->caller)));
7b29dd2f 732 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
799c8711 733 error_found = true;
734 }
735 e->aux = 0;
736 }
b0cdf642 737 }
738 if (error_found)
739 {
740 dump_cgraph_node (stderr, node);
0a81f5a0 741 internal_error ("verify_cgraph_node failed");
b0cdf642 742 }
743 timevar_pop (TV_CGRAPH_VERIFY);
744}
745
746/* Verify whole cgraph structure. */
4b987fac 747DEBUG_FUNCTION void
b0cdf642 748verify_cgraph (void)
749{
750 struct cgraph_node *node;
751
852f689e 752 if (seen_error ())
8ec2a798 753 return;
754
b0cdf642 755 for (node = cgraph_nodes; node; node = node->next)
756 verify_cgraph_node (node);
757}
758
56af936e 759/* Output all asm statements we have stored up to be output. */
760
761static void
762cgraph_output_pending_asms (void)
763{
764 struct cgraph_asm_node *can;
765
852f689e 766 if (seen_error ())
56af936e 767 return;
768
769 for (can = cgraph_asm_nodes; can; can = can->next)
770 assemble_asm (can->asm_str);
771 cgraph_asm_nodes = NULL;
772}
773
0785e435 774/* Analyze the function scheduled to be output. */
222bc9b9 775void
0785e435 776cgraph_analyze_function (struct cgraph_node *node)
777{
bfec3452 778 tree save = current_function_decl;
0785e435 779 tree decl = node->decl;
780
ec1e35b2 781 current_function_decl = decl;
e27482aa 782 push_cfun (DECL_STRUCT_FUNCTION (decl));
bfec3452 783
6816d0c4 784 assign_assembler_name_if_neeeded (node->decl);
785
649597af 786 /* disregard_inline_limits affects topological order of the early optimization,
787 so we need to compute it ahead of rest of inline parameters. */
788 node->local.disregard_inline_limits
789 = DECL_DISREGARD_INLINE_LIMITS (node->decl);
790
bfec3452 791 /* Make sure to gimplify bodies only once. During analyzing a
792 function we lower it, which will require gimplified nested
793 functions, so we can end up here with an already gimplified
794 body. */
795 if (!gimple_body (decl))
796 gimplify_function_tree (decl);
797 dump_function (TDI_generic, decl);
798
e27482aa 799 cgraph_lower_function (node);
6e8d6e86 800 node->analyzed = true;
0785e435 801
e27482aa 802 pop_cfun ();
bfec3452 803 current_function_decl = save;
0785e435 804}
805
d05db70d 806/* Process attributes common for vars and functions. */
807
808static void
809process_common_attributes (tree decl)
810{
811 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
812
813 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
814 {
815 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
816 "%<weakref%> attribute should be accompanied with"
817 " an %<alias%> attribute");
818 DECL_WEAK (decl) = 0;
40b32d93 819 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
820 DECL_ATTRIBUTES (decl));
d05db70d 821 }
822}
823
05806473 824/* Look for externally_visible and used attributes and mark cgraph nodes
825 accordingly.
826
827 We cannot mark the nodes at the point the attributes are processed (in
828 handle_*_attribute) because the copy of the declarations available at that
829 point may not be canonical. For example, in:
830
831 void f();
832 void f() __attribute__((used));
833
834 the declaration we see in handle_used_attribute will be the second
835 declaration -- but the front end will subsequently merge that declaration
836 with the original declaration and discard the second declaration.
837
838 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
839
840 void f() {}
841 void f() __attribute__((externally_visible));
842
843 is valid.
844
845 So, we walk the nodes at the end of the translation unit, applying the
846 attributes at that point. */
847
848static void
849process_function_and_variable_attributes (struct cgraph_node *first,
1d416bd7 850 struct varpool_node *first_var)
05806473 851{
852 struct cgraph_node *node;
1d416bd7 853 struct varpool_node *vnode;
05806473 854
855 for (node = cgraph_nodes; node != first; node = node->next)
856 {
857 tree decl = node->decl;
83a23b05 858 if (DECL_PRESERVE_P (decl))
0b49f8f8 859 cgraph_mark_needed_node (node);
62433d51 860 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
861 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
862 && TREE_PUBLIC (node->decl))
863 {
864 if (node->local.finalized)
865 cgraph_mark_needed_node (node);
866 }
867 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
05806473 868 {
ba12ea31 869 if (! TREE_PUBLIC (node->decl))
712d2297 870 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
871 "%<externally_visible%>"
872 " attribute have effect only on public objects");
59dd4830 873 else if (node->local.finalized)
874 cgraph_mark_needed_node (node);
05806473 875 }
40b32d93 876 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
877 && node->local.finalized)
878 {
879 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
880 "%<weakref%> attribute ignored"
881 " because function is defined");
882 DECL_WEAK (decl) = 0;
883 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
884 DECL_ATTRIBUTES (decl));
885 }
d05db70d 886 process_common_attributes (decl);
05806473 887 }
1d416bd7 888 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
05806473 889 {
890 tree decl = vnode->decl;
83a23b05 891 if (DECL_PRESERVE_P (decl))
05806473 892 {
22671757 893 vnode->force_output = true;
05806473 894 if (vnode->finalized)
1d416bd7 895 varpool_mark_needed_node (vnode);
05806473 896 }
62433d51 897 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
898 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
0d9d5d69 899 && TREE_PUBLIC (vnode->decl))
62433d51 900 {
901 if (vnode->finalized)
902 varpool_mark_needed_node (vnode);
903 }
904 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
05806473 905 {
ba12ea31 906 if (! TREE_PUBLIC (vnode->decl))
712d2297 907 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
908 "%<externally_visible%>"
909 " attribute have effect only on public objects");
59dd4830 910 else if (vnode->finalized)
911 varpool_mark_needed_node (vnode);
05806473 912 }
40b32d93 913 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
914 && vnode->finalized
915 && DECL_INITIAL (decl))
916 {
917 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
918 "%<weakref%> attribute ignored"
919 " because variable is initialized");
920 DECL_WEAK (decl) = 0;
921 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
922 DECL_ATTRIBUTES (decl));
923 }
d05db70d 924 process_common_attributes (decl);
05806473 925 }
926}
927
aeeb194b 928/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
929 each reachable functions) and build cgraph.
930 The function can be called multiple times after inserting new nodes
0d424440 931 into beginning of queue. Just the new part of queue is re-scanned then. */
ae01b312 932
aeeb194b 933static void
934cgraph_analyze_functions (void)
ae01b312 935{
c1dcd13c 936 /* Keep track of already processed nodes when called multiple times for
06b27565 937 intermodule optimization. */
c1dcd13c 938 static struct cgraph_node *first_analyzed;
c17d0de1 939 struct cgraph_node *first_processed = first_analyzed;
1d416bd7 940 static struct varpool_node *first_analyzed_var;
aeeb194b 941 struct cgraph_node *node, *next;
ae01b312 942
f1c35659 943 bitmap_obstack_initialize (NULL);
c17d0de1 944 process_function_and_variable_attributes (first_processed,
945 first_analyzed_var);
946 first_processed = cgraph_nodes;
1d416bd7 947 first_analyzed_var = varpool_nodes;
948 varpool_analyze_pending_decls ();
f79b6507 949 if (cgraph_dump_file)
ae01b312 950 {
e4200070 951 fprintf (cgraph_dump_file, "Initial entry points:");
c1dcd13c 952 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 953 if (node->needed)
f79b6507 954 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
955 fprintf (cgraph_dump_file, "\n");
ae01b312 956 }
aeeb194b 957 cgraph_process_new_functions ();
ae01b312 958
e6d2b2d8 959 /* Propagate reachability flag and lower representation of all reachable
960 functions. In the future, lowering will introduce new functions and
961 new entry points on the way (by template instantiation and virtual
962 method table generation for instance). */
3d7bfc56 963 while (cgraph_nodes_queue)
ae01b312 964 {
0785e435 965 struct cgraph_edge *edge;
3d7bfc56 966 tree decl = cgraph_nodes_queue->decl;
967
968 node = cgraph_nodes_queue;
d87976fb 969 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 970 node->next_needed = NULL;
ae01b312 971
638531ad 972 /* ??? It is possible to create extern inline function and later using
bbd5cba2 973 weak alias attribute to kill its body. See
638531ad 974 gcc.c-torture/compile/20011119-1.c */
75a70cf9 975 if (!DECL_STRUCT_FUNCTION (decl))
9b8fb23a 976 {
977 cgraph_reset_node (node);
978 continue;
979 }
638531ad 980
7bfefa9d 981 if (!node->analyzed)
982 cgraph_analyze_function (node);
2c0b522d 983
ae01b312 984 for (edge = node->callees; edge; edge = edge->next_callee)
0785e435 985 if (!edge->callee->reachable)
2c0b522d 986 cgraph_mark_reachable_node (edge->callee);
987
61c2c7b1 988 if (node->same_comdat_group)
989 {
990 for (next = node->same_comdat_group;
991 next != node;
992 next = next->same_comdat_group)
993 cgraph_mark_reachable_node (next);
994 }
995
d544ceff 996 /* If decl is a clone of an abstract function, mark that abstract
997 function so that we don't release its body. The DECL_INITIAL() of that
998 abstract function declaration will be later needed to output debug info. */
999 if (DECL_ABSTRACT_ORIGIN (decl))
1000 {
1001 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
1002 origin_node->abstract_and_needed = true;
1003 }
1004
c17d0de1 1005 /* We finalize local static variables during constructing callgraph
1006 edges. Process their attributes too. */
1007 process_function_and_variable_attributes (first_processed,
1008 first_analyzed_var);
1009 first_processed = cgraph_nodes;
1d416bd7 1010 first_analyzed_var = varpool_nodes;
1011 varpool_analyze_pending_decls ();
aeeb194b 1012 cgraph_process_new_functions ();
ae01b312 1013 }
2c0b522d 1014
aa5e06c7 1015 /* Collect entry points to the unit. */
f79b6507 1016 if (cgraph_dump_file)
3d7bfc56 1017 {
e4200070 1018 fprintf (cgraph_dump_file, "Unit entry points:");
c1dcd13c 1019 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 1020 if (node->needed)
f79b6507 1021 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
e4200070 1022 fprintf (cgraph_dump_file, "\n\nInitial ");
0785e435 1023 dump_cgraph (cgraph_dump_file);
7410370b 1024 dump_varpool (cgraph_dump_file);
3d7bfc56 1025 }
e6d2b2d8 1026
f79b6507 1027 if (cgraph_dump_file)
1028 fprintf (cgraph_dump_file, "\nReclaiming functions:");
ae01b312 1029
f4ec5ce1 1030 for (node = cgraph_nodes; node != first_analyzed; node = next)
ae01b312 1031 {
1032 tree decl = node->decl;
f4ec5ce1 1033 next = node->next;
ae01b312 1034
1a1a827a 1035 if (node->local.finalized && !gimple_has_body_p (decl))
a0c938f0 1036 cgraph_reset_node (node);
9b8fb23a 1037
1a1a827a 1038 if (!node->reachable && gimple_has_body_p (decl))
ae01b312 1039 {
f79b6507 1040 if (cgraph_dump_file)
1041 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
b0cdf642 1042 cgraph_remove_node (node);
9b8fb23a 1043 continue;
ae01b312 1044 }
bc5cab3b 1045 else
1046 node->next_needed = NULL;
1a1a827a 1047 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
9b8fb23a 1048 gcc_assert (node->analyzed == node->local.finalized);
ae01b312 1049 }
f79b6507 1050 if (cgraph_dump_file)
e4200070 1051 {
1052 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1053 dump_cgraph (cgraph_dump_file);
7410370b 1054 dump_varpool (cgraph_dump_file);
e4200070 1055 }
f1c35659 1056 bitmap_obstack_release (NULL);
c1dcd13c 1057 first_analyzed = cgraph_nodes;
ae01b312 1058 ggc_collect ();
aeeb194b 1059}
1060
8f69fd82 1061
aeeb194b 1062/* Analyze the whole compilation unit once it is parsed completely. */
1063
1064void
1065cgraph_finalize_compilation_unit (void)
1066{
9929334e 1067 timevar_push (TV_CGRAPH);
1068
bb903e9c 1069 /* If we're here there's no current function anymore. Some frontends
1070 are lazy in clearing these. */
1071 current_function_decl = NULL;
1072 set_cfun (NULL);
1073
bfec3452 1074 /* Do not skip analyzing the functions if there were errors, we
1075 miss diagnostics for following functions otherwise. */
aeeb194b 1076
8f69fd82 1077 /* Emit size functions we didn't inline. */
4189e677 1078 finalize_size_functions ();
8f69fd82 1079
9929334e 1080 /* Mark alias targets necessary and emit diagnostics. */
1081 finish_aliases_1 ();
1082
aeeb194b 1083 if (!quiet_flag)
1084 {
1085 fprintf (stderr, "\nAnalyzing compilation unit\n");
1086 fflush (stderr);
1087 }
1088
9929334e 1089 /* Gimplify and lower all functions, compute reachability and
1090 remove unreachable nodes. */
1091 cgraph_analyze_functions ();
1092
8f69fd82 1093 /* Mark alias targets necessary and emit diagnostics. */
1094 finish_aliases_1 ();
1095
9929334e 1096 /* Gimplify and lower thunks. */
aeeb194b 1097 cgraph_analyze_functions ();
bfec3452 1098
9929334e 1099 /* Finally drive the pass manager. */
bfec3452 1100 cgraph_optimize ();
9929334e 1101
1102 timevar_pop (TV_CGRAPH);
ae01b312 1103}
9ed5b1f5 1104
1105
ae01b312 1106/* Figure out what functions we want to assemble. */
1107
1108static void
d9d9733a 1109cgraph_mark_functions_to_output (void)
ae01b312 1110{
1111 struct cgraph_node *node;
61c2c7b1 1112#ifdef ENABLE_CHECKING
1113 bool check_same_comdat_groups = false;
1114
1115 for (node = cgraph_nodes; node; node = node->next)
1116 gcc_assert (!node->process);
1117#endif
ae01b312 1118
ae01b312 1119 for (node = cgraph_nodes; node; node = node->next)
1120 {
1121 tree decl = node->decl;
d7c6d889 1122 struct cgraph_edge *e;
a0c938f0 1123
61c2c7b1 1124 gcc_assert (!node->process || node->same_comdat_group);
1125 if (node->process)
1126 continue;
d7c6d889 1127
1128 for (e = node->callers; e; e = e->next_caller)
611e5405 1129 if (e->inline_failed)
d7c6d889 1130 break;
ae01b312 1131
e6d2b2d8 1132 /* We need to output all local functions that are used and not
1133 always inlined, as well as those that are reachable from
1134 outside the current compilation unit. */
1a1a827a 1135 if (node->analyzed
b0cdf642 1136 && !node->global.inlined_to
1e3aebec 1137 && (!cgraph_only_called_directly_p (node)
d7c6d889 1138 || (e && node->reachable))
4ee9c684 1139 && !TREE_ASM_WRITTEN (decl)
ae01b312 1140 && !DECL_EXTERNAL (decl))
61c2c7b1 1141 {
1142 node->process = 1;
1143 if (node->same_comdat_group)
1144 {
1145 struct cgraph_node *next;
1146 for (next = node->same_comdat_group;
1147 next != node;
1148 next = next->same_comdat_group)
1149 next->process = 1;
1150 }
1151 }
1152 else if (node->same_comdat_group)
1153 {
1154#ifdef ENABLE_CHECKING
1155 check_same_comdat_groups = true;
1156#endif
1157 }
cc636d56 1158 else
9cee7c3f 1159 {
1160 /* We should've reclaimed all functions that are not needed. */
1161#ifdef ENABLE_CHECKING
75a70cf9 1162 if (!node->global.inlined_to
1a1a827a 1163 && gimple_has_body_p (decl)
08843223 1164 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1165 are inside partition, we can end up not removing the body since we no longer
1166 have analyzed node pointing to it. */
1167 && !node->in_other_partition
9cee7c3f 1168 && !DECL_EXTERNAL (decl))
1169 {
1170 dump_cgraph_node (stderr, node);
1171 internal_error ("failed to reclaim unneeded function");
1172 }
1173#endif
75a70cf9 1174 gcc_assert (node->global.inlined_to
1a1a827a 1175 || !gimple_has_body_p (decl)
08843223 1176 || node->in_other_partition
9cee7c3f 1177 || DECL_EXTERNAL (decl));
1178
1179 }
a0c938f0 1180
961e3b13 1181 }
61c2c7b1 1182#ifdef ENABLE_CHECKING
1183 if (check_same_comdat_groups)
1184 for (node = cgraph_nodes; node; node = node->next)
1185 if (node->same_comdat_group && !node->process)
1186 {
1187 tree decl = node->decl;
1188 if (!node->global.inlined_to
1189 && gimple_has_body_p (decl)
08843223 1190 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1191 are inside partition, we can end up not removing the body since we no longer
1192 have analyzed node pointing to it. */
1193 && !node->in_other_partition
61c2c7b1 1194 && !DECL_EXTERNAL (decl))
1195 {
1196 dump_cgraph_node (stderr, node);
1197 internal_error ("failed to reclaim unneeded function");
1198 }
1199 }
1200#endif
961e3b13 1201}
1202
28454517 1203/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1204 in lowered gimple form.
1205
1206 Set current_function_decl and cfun to newly constructed empty function body.
1207 return basic block in the function body. */
1208
1209static basic_block
1210init_lowered_empty_function (tree decl)
1211{
1212 basic_block bb;
1213
1214 current_function_decl = decl;
1215 allocate_struct_function (decl, false);
1216 gimple_register_cfg_hooks ();
1217 init_empty_tree_cfg ();
1218 init_tree_ssa (cfun);
1219 init_ssa_operands ();
1220 cfun->gimple_df->in_ssa_p = true;
1221 DECL_INITIAL (decl) = make_node (BLOCK);
1222
1223 DECL_SAVED_TREE (decl) = error_mark_node;
1224 cfun->curr_properties |=
1225 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1226 PROP_ssa);
1227
1228 /* Create BB for body of the function and connect it properly. */
1229 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1230 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1231 make_edge (bb, EXIT_BLOCK_PTR, 0);
1232
1233 return bb;
1234}
1235
1236/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1237 offset indicated by VIRTUAL_OFFSET, if that is
1238 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1239 zero for a result adjusting thunk. */
1240
1241static tree
1242thunk_adjust (gimple_stmt_iterator * bsi,
1243 tree ptr, bool this_adjusting,
1244 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1245{
1246 gimple stmt;
1247 tree ret;
1248
55d6cb23 1249 if (this_adjusting
1250 && fixed_offset != 0)
28454517 1251 {
1252 stmt = gimple_build_assign (ptr,
1253 fold_build2_loc (input_location,
1254 POINTER_PLUS_EXPR,
1255 TREE_TYPE (ptr), ptr,
1256 size_int (fixed_offset)));
1257 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1258 }
1259
1260 /* If there's a virtual offset, look up that value in the vtable and
1261 adjust the pointer again. */
1262 if (virtual_offset)
1263 {
1264 tree vtabletmp;
1265 tree vtabletmp2;
1266 tree vtabletmp3;
1267 tree offsettmp;
1268
1269 if (!vtable_entry_type)
1270 {
1271 tree vfunc_type = make_node (FUNCTION_TYPE);
1272 TREE_TYPE (vfunc_type) = integer_type_node;
1273 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1274 layout_type (vfunc_type);
1275
1276 vtable_entry_type = build_pointer_type (vfunc_type);
1277 }
1278
1279 vtabletmp =
1280 create_tmp_var (build_pointer_type
1281 (build_pointer_type (vtable_entry_type)), "vptr");
1282
1283 /* The vptr is always at offset zero in the object. */
1284 stmt = gimple_build_assign (vtabletmp,
1285 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1286 ptr));
1287 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1288 mark_symbols_for_renaming (stmt);
1289 find_referenced_vars_in (stmt);
1290
1291 /* Form the vtable address. */
1292 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1293 "vtableaddr");
1294 stmt = gimple_build_assign (vtabletmp2,
182cf5a9 1295 build_simple_mem_ref (vtabletmp));
28454517 1296 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1297 mark_symbols_for_renaming (stmt);
1298 find_referenced_vars_in (stmt);
1299
1300 /* Find the entry with the vcall offset. */
1301 stmt = gimple_build_assign (vtabletmp2,
1302 fold_build2_loc (input_location,
1303 POINTER_PLUS_EXPR,
1304 TREE_TYPE (vtabletmp2),
1305 vtabletmp2,
1306 fold_convert (sizetype,
1307 virtual_offset)));
1308 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1309
1310 /* Get the offset itself. */
1311 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1312 "vcalloffset");
1313 stmt = gimple_build_assign (vtabletmp3,
182cf5a9 1314 build_simple_mem_ref (vtabletmp2));
28454517 1315 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1316 mark_symbols_for_renaming (stmt);
1317 find_referenced_vars_in (stmt);
1318
1319 /* Cast to sizetype. */
1320 offsettmp = create_tmp_var (sizetype, "offset");
1321 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1322 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1323 mark_symbols_for_renaming (stmt);
1324 find_referenced_vars_in (stmt);
1325
1326 /* Adjust the `this' pointer. */
1327 ptr = fold_build2_loc (input_location,
1328 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1329 offsettmp);
1330 }
1331
55d6cb23 1332 if (!this_adjusting
1333 && fixed_offset != 0)
28454517 1334 /* Adjust the pointer by the constant. */
1335 {
1336 tree ptrtmp;
1337
1338 if (TREE_CODE (ptr) == VAR_DECL)
1339 ptrtmp = ptr;
1340 else
1341 {
1342 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1343 stmt = gimple_build_assign (ptrtmp, ptr);
1344 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1345 mark_symbols_for_renaming (stmt);
1346 find_referenced_vars_in (stmt);
1347 }
1348 ptr = fold_build2_loc (input_location,
1349 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1350 size_int (fixed_offset));
1351 }
1352
1353 /* Emit the statement and gimplify the adjustment expression. */
1354 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1355 stmt = gimple_build_assign (ret, ptr);
1356 mark_symbols_for_renaming (stmt);
1357 find_referenced_vars_in (stmt);
1358 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1359
1360 return ret;
1361}
1362
1363/* Produce assembler for thunk NODE. */
1364
1365static void
1366assemble_thunk (struct cgraph_node *node)
1367{
1368 bool this_adjusting = node->thunk.this_adjusting;
1369 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1370 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1371 tree virtual_offset = NULL;
1372 tree alias = node->thunk.alias;
1373 tree thunk_fndecl = node->decl;
1374 tree a = DECL_ARGUMENTS (thunk_fndecl);
1375
1376 current_function_decl = thunk_fndecl;
1377
aed6e608 1378 /* Ensure thunks are emitted in their correct sections. */
1379 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1380
28454517 1381 if (this_adjusting
1382 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1383 virtual_value, alias))
1384 {
1385 const char *fnname;
1386 tree fn_block;
1387
1388 DECL_RESULT (thunk_fndecl)
1389 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1390 RESULT_DECL, 0, integer_type_node);
22ea3b47 1391 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
28454517 1392
1393 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1394 create one. */
1395 fn_block = make_node (BLOCK);
1396 BLOCK_VARS (fn_block) = a;
1397 DECL_INITIAL (thunk_fndecl) = fn_block;
1398 init_function_start (thunk_fndecl);
1399 cfun->is_thunk = 1;
1400 assemble_start_function (thunk_fndecl, fnname);
1401
1402 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1403 fixed_offset, virtual_value, alias);
1404
1405 assemble_end_function (thunk_fndecl, fnname);
1406 init_insn_lengths ();
1407 free_after_compilation (cfun);
1408 set_cfun (NULL);
1409 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1410 }
1411 else
1412 {
1413 tree restype;
1414 basic_block bb, then_bb, else_bb, return_bb;
1415 gimple_stmt_iterator bsi;
1416 int nargs = 0;
1417 tree arg;
1418 int i;
1419 tree resdecl;
1420 tree restmp = NULL;
1421 VEC(tree, heap) *vargs;
1422
1423 gimple call;
1424 gimple ret;
1425
1426 DECL_IGNORED_P (thunk_fndecl) = 1;
1427 bitmap_obstack_initialize (NULL);
1428
1429 if (node->thunk.virtual_offset_p)
1430 virtual_offset = size_int (virtual_value);
1431
1432 /* Build the return declaration for the function. */
1433 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1434 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1435 {
1436 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1437 DECL_ARTIFICIAL (resdecl) = 1;
1438 DECL_IGNORED_P (resdecl) = 1;
1439 DECL_RESULT (thunk_fndecl) = resdecl;
1440 }
1441 else
1442 resdecl = DECL_RESULT (thunk_fndecl);
1443
1444 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1445
1446 bsi = gsi_start_bb (bb);
1447
1448 /* Build call to the function being thunked. */
1449 if (!VOID_TYPE_P (restype))
1450 {
1451 if (!is_gimple_reg_type (restype))
1452 {
1453 restmp = resdecl;
2ab2ce89 1454 add_local_decl (cfun, restmp);
28454517 1455 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1456 }
1457 else
1458 restmp = create_tmp_var_raw (restype, "retval");
1459 }
1460
1767a056 1461 for (arg = a; arg; arg = DECL_CHAIN (arg))
28454517 1462 nargs++;
1463 vargs = VEC_alloc (tree, heap, nargs);
1464 if (this_adjusting)
1465 VEC_quick_push (tree, vargs,
1466 thunk_adjust (&bsi,
1467 a, 1, fixed_offset,
1468 virtual_offset));
1469 else
1470 VEC_quick_push (tree, vargs, a);
1767a056 1471 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
28454517 1472 VEC_quick_push (tree, vargs, arg);
1473 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1474 VEC_free (tree, heap, vargs);
1475 gimple_call_set_cannot_inline (call, true);
1476 gimple_call_set_from_thunk (call, true);
1477 if (restmp)
1478 gimple_call_set_lhs (call, restmp);
1479 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1480 mark_symbols_for_renaming (call);
1481 find_referenced_vars_in (call);
1482 update_stmt (call);
1483
1484 if (restmp && !this_adjusting)
1485 {
57ab8ec3 1486 tree true_label = NULL_TREE;
28454517 1487
1488 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1489 {
1490 gimple stmt;
1491 /* If the return type is a pointer, we need to
1492 protect against NULL. We know there will be an
1493 adjustment, because that's why we're emitting a
1494 thunk. */
1495 then_bb = create_basic_block (NULL, (void *) 0, bb);
1496 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1497 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1498 remove_edge (single_succ_edge (bb));
1499 true_label = gimple_block_label (then_bb);
28454517 1500 stmt = gimple_build_cond (NE_EXPR, restmp,
385f3f36 1501 build_zero_cst (TREE_TYPE (restmp)),
28454517 1502 NULL_TREE, NULL_TREE);
1503 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1504 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1505 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1506 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1507 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1508 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1509 bsi = gsi_last_bb (then_bb);
1510 }
1511
1512 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1513 fixed_offset, virtual_offset);
1514 if (true_label)
1515 {
1516 gimple stmt;
1517 bsi = gsi_last_bb (else_bb);
385f3f36 1518 stmt = gimple_build_assign (restmp,
1519 build_zero_cst (TREE_TYPE (restmp)));
28454517 1520 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1521 bsi = gsi_last_bb (return_bb);
1522 }
1523 }
1524 else
1525 gimple_call_set_tail (call, true);
1526
1527 /* Build return value. */
1528 ret = gimple_build_return (restmp);
1529 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1530
1531 delete_unreachable_blocks ();
1532 update_ssa (TODO_update_ssa);
1533
1534 cgraph_remove_same_body_alias (node);
1535 /* Since we want to emit the thunk, we explicitly mark its name as
1536 referenced. */
28454517 1537 cgraph_add_new_function (thunk_fndecl, true);
1538 bitmap_obstack_release (NULL);
1539 }
1540 current_function_decl = NULL;
1541}
1542
ae01b312 1543/* Expand function specified by NODE. */
e6d2b2d8 1544
ae01b312 1545static void
d9d9733a 1546cgraph_expand_function (struct cgraph_node *node)
ae01b312 1547{
1548 tree decl = node->decl;
1549
b0cdf642 1550 /* We ought to not compile any inline clones. */
cc636d56 1551 gcc_assert (!node->global.inlined_to);
b0cdf642 1552
6329636b 1553 announce_function (decl);
09fc9532 1554 node->process = 0;
ed772161 1555 if (node->same_body)
1556 {
28454517 1557 struct cgraph_node *alias, *next;
ed772161 1558 bool saved_alias = node->alias;
28454517 1559 for (alias = node->same_body;
1560 alias && alias->next; alias = alias->next)
1561 ;
1562 /* Walk aliases in the order they were created; it is possible that
0a10fd82 1563 thunks refers to the aliases made earlier. */
28454517 1564 for (; alias; alias = next)
1565 {
1566 next = alias->previous;
1567 if (!alias->thunk.thunk_p)
1568 assemble_alias (alias->decl,
1569 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1570 else
1571 assemble_thunk (alias);
1572 }
ed772161 1573 node->alias = saved_alias;
f7777314 1574 cgraph_process_new_functions ();
ed772161 1575 }
f7777314 1576
1577 gcc_assert (node->lowered);
1578
1579 /* Generate RTL for the body of DECL. */
1580 tree_rest_of_compilation (decl);
1581
1582 /* Make sure that BE didn't give up on compiling. */
1583 gcc_assert (TREE_ASM_WRITTEN (decl));
1584 current_function_decl = NULL;
1a1a827a 1585 gcc_assert (!cgraph_preserve_function_body_p (decl));
1586 cgraph_release_function_body (node);
1587 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1588 points to the dead function body. */
1589 cgraph_node_remove_callees (node);
e1be32b8 1590
1591 cgraph_function_flags_ready = true;
ae01b312 1592}
1593
b0cdf642 1594/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
d7c6d889 1595
1596bool
326a9581 1597cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
d7c6d889 1598{
b0cdf642 1599 *reason = e->inline_failed;
1600 return !e->inline_failed;
d7c6d889 1601}
b0cdf642 1602
acc70efa 1603
acc70efa 1604
d9d9733a 1605/* Expand all functions that must be output.
1606
d7c6d889 1607 Attempt to topologically sort the nodes so function is output when
1608 all called functions are already assembled to allow data to be
91c82c20 1609 propagated across the callgraph. Use a stack to get smaller distance
3927afe0 1610 between a function and its callees (later we may choose to use a more
d7c6d889 1611 sophisticated algorithm for function reordering; we will likely want
1612 to use subsections to make the output functions appear in top-down
1613 order). */
1614
1615static void
a6868229 1616cgraph_expand_all_functions (void)
d7c6d889 1617{
1618 struct cgraph_node *node;
4c36ffe6 1619 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
c04e3894 1620 int order_pos, new_order_pos = 0;
d7c6d889 1621 int i;
1622
d7c6d889 1623 order_pos = cgraph_postorder (order);
cc636d56 1624 gcc_assert (order_pos == cgraph_n_nodes);
d7c6d889 1625
7bd28bba 1626 /* Garbage collector may remove inline clones we eliminate during
b0cdf642 1627 optimization. So we must be sure to not reference them. */
1628 for (i = 0; i < order_pos; i++)
09fc9532 1629 if (order[i]->process)
b0cdf642 1630 order[new_order_pos++] = order[i];
1631
1632 for (i = new_order_pos - 1; i >= 0; i--)
d7c6d889 1633 {
1634 node = order[i];
09fc9532 1635 if (node->process)
d7c6d889 1636 {
cc636d56 1637 gcc_assert (node->reachable);
09fc9532 1638 node->process = 0;
d7c6d889 1639 cgraph_expand_function (node);
1640 }
1641 }
523c1122 1642 cgraph_process_new_functions ();
773c5ba7 1643
d7c6d889 1644 free (order);
773c5ba7 1645
d7c6d889 1646}
1647
56af936e 1648/* This is used to sort the node types by the cgraph order number. */
1649
0b09525f 1650enum cgraph_order_sort_kind
1651{
1652 ORDER_UNDEFINED = 0,
1653 ORDER_FUNCTION,
1654 ORDER_VAR,
1655 ORDER_ASM
1656};
1657
56af936e 1658struct cgraph_order_sort
1659{
0b09525f 1660 enum cgraph_order_sort_kind kind;
56af936e 1661 union
1662 {
1663 struct cgraph_node *f;
1d416bd7 1664 struct varpool_node *v;
56af936e 1665 struct cgraph_asm_node *a;
1666 } u;
1667};
1668
1669/* Output all functions, variables, and asm statements in the order
1670 according to their order fields, which is the order in which they
1671 appeared in the file. This implements -fno-toplevel-reorder. In
1672 this mode we may output functions and variables which don't really
1673 need to be output. */
1674
1675static void
1676cgraph_output_in_order (void)
1677{
1678 int max;
56af936e 1679 struct cgraph_order_sort *nodes;
1680 int i;
1681 struct cgraph_node *pf;
1d416bd7 1682 struct varpool_node *pv;
56af936e 1683 struct cgraph_asm_node *pa;
1684
1685 max = cgraph_order;
3e1cde87 1686 nodes = XCNEWVEC (struct cgraph_order_sort, max);
56af936e 1687
1d416bd7 1688 varpool_analyze_pending_decls ();
56af936e 1689
1690 for (pf = cgraph_nodes; pf; pf = pf->next)
1691 {
09fc9532 1692 if (pf->process)
56af936e 1693 {
1694 i = pf->order;
1695 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1696 nodes[i].kind = ORDER_FUNCTION;
1697 nodes[i].u.f = pf;
1698 }
1699 }
1700
1d416bd7 1701 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
56af936e 1702 {
1703 i = pv->order;
1704 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1705 nodes[i].kind = ORDER_VAR;
1706 nodes[i].u.v = pv;
1707 }
1708
1709 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1710 {
1711 i = pa->order;
1712 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1713 nodes[i].kind = ORDER_ASM;
1714 nodes[i].u.a = pa;
1715 }
56af936e 1716
304e5318 1717 /* In toplevel reorder mode we output all statics; mark them as needed. */
1718 for (i = 0; i < max; ++i)
1719 {
1720 if (nodes[i].kind == ORDER_VAR)
1721 {
1722 varpool_mark_needed_node (nodes[i].u.v);
1723 }
1724 }
1725 varpool_empty_needed_queue ();
1726
91da0f1c 1727 for (i = 0; i < max; ++i)
1728 if (nodes[i].kind == ORDER_VAR)
1729 varpool_finalize_named_section_flags (nodes[i].u.v);
1730
56af936e 1731 for (i = 0; i < max; ++i)
1732 {
1733 switch (nodes[i].kind)
1734 {
1735 case ORDER_FUNCTION:
09fc9532 1736 nodes[i].u.f->process = 0;
56af936e 1737 cgraph_expand_function (nodes[i].u.f);
1738 break;
1739
1740 case ORDER_VAR:
1d416bd7 1741 varpool_assemble_decl (nodes[i].u.v);
56af936e 1742 break;
1743
1744 case ORDER_ASM:
1745 assemble_asm (nodes[i].u.a->asm_str);
1746 break;
1747
1748 case ORDER_UNDEFINED:
1749 break;
1750
1751 default:
1752 gcc_unreachable ();
1753 }
1754 }
4b4ea2db 1755
1756 cgraph_asm_nodes = NULL;
3e1cde87 1757 free (nodes);
56af936e 1758}
1759
b0cdf642 1760/* Return true when function body of DECL still needs to be kept around
1761 for later re-use. */
1762bool
1763cgraph_preserve_function_body_p (tree decl)
1764{
1765 struct cgraph_node *node;
8d8c4c8d 1766
1767 gcc_assert (cgraph_global_info_ready);
b0cdf642 1768 /* Look if there is any clone around. */
ccf4ab6b 1769 node = cgraph_node (decl);
1770 if (node->clones)
1771 return true;
b0cdf642 1772 return false;
1773}
1774
77fce4cd 1775static void
1776ipa_passes (void)
1777{
87d4aa85 1778 set_cfun (NULL);
4b14adf9 1779 current_function_decl = NULL;
75a70cf9 1780 gimple_register_cfg_hooks ();
77fce4cd 1781 bitmap_obstack_initialize (NULL);
59dd4830 1782
c9036234 1783 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1784
59dd4830 1785 if (!in_lto_p)
7b2e8956 1786 {
1787 execute_ipa_pass_list (all_small_ipa_passes);
1788 if (seen_error ())
1789 return;
1790 }
9ed5b1f5 1791
7bfefa9d 1792 /* If pass_all_early_optimizations was not scheduled, the state of
1793 the cgraph will not be properly updated. Update it now. */
1794 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1795 cgraph_state = CGRAPH_STATE_IPA_SSA;
9ed5b1f5 1796
7bfefa9d 1797 if (!in_lto_p)
1798 {
1799 /* Generate coverage variables and constructors. */
1800 coverage_finish ();
1801
1802 /* Process new functions added. */
1803 set_cfun (NULL);
1804 current_function_decl = NULL;
1805 cgraph_process_new_functions ();
7bfefa9d 1806
c9036234 1807 execute_ipa_summary_passes
1808 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
8867b500 1809 }
23433d72 1810
1811 /* Some targets need to handle LTO assembler output specially. */
1812 if (flag_generate_lto)
1813 targetm.asm_out.lto_start ();
1814
7bfefa9d 1815 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1816
1817 if (!in_lto_p)
1818 ipa_write_summaries ();
1819
23433d72 1820 if (flag_generate_lto)
1821 targetm.asm_out.lto_end ();
1822
8867b500 1823 if (!flag_ltrans)
1824 execute_ipa_pass_list (all_regular_ipa_passes);
c9036234 1825 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
9ed5b1f5 1826
77fce4cd 1827 bitmap_obstack_release (NULL);
1828}
1829
34e5cced 1830
ae01b312 1831/* Perform simple optimizations based on callgraph. */
1832
7bfefa9d 1833void
d9d9733a 1834cgraph_optimize (void)
ae01b312 1835{
852f689e 1836 if (seen_error ())
cb2b5570 1837 return;
1838
b0cdf642 1839#ifdef ENABLE_CHECKING
1840 verify_cgraph ();
1841#endif
a861fe52 1842
c1dcd13c 1843 /* Frontend may output common variables after the unit has been finalized.
1844 It is safe to deal with them here as they are always zero initialized. */
1d416bd7 1845 varpool_analyze_pending_decls ();
e9f08e82 1846
f79b6507 1847 timevar_push (TV_CGRAPHOPT);
51949610 1848 if (pre_ipa_mem_report)
1849 {
1850 fprintf (stderr, "Memory consumption before IPA\n");
1851 dump_memory_report (false);
1852 }
d7c6d889 1853 if (!quiet_flag)
cd6bca02 1854 fprintf (stderr, "Performing interprocedural optimizations\n");
523c1122 1855 cgraph_state = CGRAPH_STATE_IPA;
c04e3894 1856
be4d0974 1857 /* Don't run the IPA passes if there was any error or sorry messages. */
852f689e 1858 if (!seen_error ())
be4d0974 1859 ipa_passes ();
1860
34e5cced 1861 /* Do nothing else if any IPA pass found errors. */
852f689e 1862 if (seen_error ())
021c1c18 1863 {
1864 timevar_pop (TV_CGRAPHOPT);
1865 return;
1866 }
34e5cced 1867
e1be32b8 1868 /* This pass remove bodies of extern inline functions we never inlined.
1869 Do this later so other IPA passes see what is really going on. */
1870 cgraph_remove_unreachable_nodes (false, dump_file);
80a85d8a 1871 cgraph_global_info_ready = true;
f79b6507 1872 if (cgraph_dump_file)
1873 {
e4200070 1874 fprintf (cgraph_dump_file, "Optimized ");
f79b6507 1875 dump_cgraph (cgraph_dump_file);
c1dcd13c 1876 dump_varpool (cgraph_dump_file);
f79b6507 1877 }
51949610 1878 if (post_ipa_mem_report)
1879 {
defa2fa6 1880 fprintf (stderr, "Memory consumption after IPA\n");
51949610 1881 dump_memory_report (false);
1882 }
f79b6507 1883 timevar_pop (TV_CGRAPHOPT);
ae01b312 1884
d7c6d889 1885 /* Output everything. */
47306a5d 1886 (*debug_hooks->assembly_start) ();
e4200070 1887 if (!quiet_flag)
1888 fprintf (stderr, "Assembling functions:\n");
b0cdf642 1889#ifdef ENABLE_CHECKING
1890 verify_cgraph ();
1891#endif
56af936e 1892
ccf4ab6b 1893 cgraph_materialize_all_clones ();
acc70efa 1894 cgraph_mark_functions_to_output ();
c1dcd13c 1895
523c1122 1896 cgraph_state = CGRAPH_STATE_EXPANSION;
56af936e 1897 if (!flag_toplevel_reorder)
1898 cgraph_output_in_order ();
1899 else
1900 {
1901 cgraph_output_pending_asms ();
1902
1903 cgraph_expand_all_functions ();
1d416bd7 1904 varpool_remove_unreferenced_decls ();
56af936e 1905
1d416bd7 1906 varpool_assemble_pending_decls ();
56af936e 1907 }
523c1122 1908 cgraph_process_new_functions ();
1909 cgraph_state = CGRAPH_STATE_FINISHED;
c1dcd13c 1910
f79b6507 1911 if (cgraph_dump_file)
1912 {
e4200070 1913 fprintf (cgraph_dump_file, "\nFinal ");
f79b6507 1914 dump_cgraph (cgraph_dump_file);
7410370b 1915 dump_varpool (cgraph_dump_file);
f79b6507 1916 }
b0cdf642 1917#ifdef ENABLE_CHECKING
1918 verify_cgraph ();
4ee9c684 1919 /* Double check that all inline clones are gone and that all
1920 function bodies have been released from memory. */
852f689e 1921 if (!seen_error ())
4ee9c684 1922 {
1923 struct cgraph_node *node;
1924 bool error_found = false;
1925
1926 for (node = cgraph_nodes; node; node = node->next)
1927 if (node->analyzed
1928 && (node->global.inlined_to
1a1a827a 1929 || gimple_has_body_p (node->decl)))
4ee9c684 1930 {
1931 error_found = true;
1932 dump_cgraph_node (stderr, node);
a0c938f0 1933 }
4ee9c684 1934 if (error_found)
c04e3894 1935 internal_error ("nodes with unreleased memory found");
4ee9c684 1936 }
b0cdf642 1937#endif
ae01b312 1938}
34e5cced 1939
121f3051 1940void
1941init_cgraph (void)
1942{
01ec0a6c 1943 if (!cgraph_dump_file)
1944 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
121f3051 1945}
b5d36404 1946
a0c938f0 1947/* The edges representing the callers of the NEW_VERSION node were
b5d36404 1948 fixed by cgraph_function_versioning (), now the call_expr in their
1949 respective tree code should be updated to call the NEW_VERSION. */
1950
1951static void
1952update_call_expr (struct cgraph_node *new_version)
1953{
1954 struct cgraph_edge *e;
1955
1956 gcc_assert (new_version);
75a70cf9 1957
1958 /* Update the call expr on the edges to call the new version. */
b5d36404 1959 for (e = new_version->callers; e; e = e->next_caller)
e03a95e7 1960 {
1961 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
1962 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
e38def9c 1963 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
e03a95e7 1964 }
b5d36404 1965}
1966
1967
1968/* Create a new cgraph node which is the new version of
1969 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1970 edges which should be redirected to point to
1971 NEW_VERSION. ALL the callees edges of OLD_VERSION
1972 are cloned to the new version node. Return the new
b06ab5fa 1973 version node.
1974
1975 If non-NULL BLOCK_TO_COPY determine what basic blocks
1976 was copied to prevent duplications of calls that are dead
1977 in the clone. */
b5d36404 1978
1979static struct cgraph_node *
1980cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
4460a647 1981 tree new_decl,
b06ab5fa 1982 VEC(cgraph_edge_p,heap) *redirect_callers,
1983 bitmap bbs_to_copy)
1984 {
b5d36404 1985 struct cgraph_node *new_version;
32936803 1986 struct cgraph_edge *e;
b5d36404 1987 unsigned i;
1988
1989 gcc_assert (old_version);
a0c938f0 1990
b5d36404 1991 new_version = cgraph_node (new_decl);
1992
1993 new_version->analyzed = true;
1994 new_version->local = old_version->local;
a70a5e2c 1995 new_version->local.externally_visible = false;
1996 new_version->local.local = true;
1997 new_version->local.vtable_method = false;
b5d36404 1998 new_version->global = old_version->global;
a93f1c3b 1999 new_version->rtl = old_version->rtl;
b5d36404 2000 new_version->reachable = true;
2001 new_version->count = old_version->count;
2002
a70a5e2c 2003 for (e = old_version->callees; e; e=e->next_callee)
b06ab5fa 2004 if (!bbs_to_copy
2005 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2006 cgraph_clone_edge (e, new_version, e->call_stmt,
2007 e->lto_stmt_uid, REG_BR_PROB_BASE,
2008 CGRAPH_FREQ_BASE,
2009 e->loop_nest, true);
a70a5e2c 2010 for (e = old_version->indirect_calls; e; e=e->next_callee)
b06ab5fa 2011 if (!bbs_to_copy
2012 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2013 cgraph_clone_edge (e, new_version, e->call_stmt,
2014 e->lto_stmt_uid, REG_BR_PROB_BASE,
2015 CGRAPH_FREQ_BASE,
2016 e->loop_nest, true);
48148244 2017 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
4460a647 2018 {
2019 /* Redirect calls to the old version node to point to its new
2020 version. */
2021 cgraph_redirect_edge_callee (e, new_version);
2022 }
b5d36404 2023
2024 return new_version;
2025 }
2026
2027 /* Perform function versioning.
a0c938f0 2028 Function versioning includes copying of the tree and
b5d36404 2029 a callgraph update (creating a new cgraph node and updating
2030 its callees and callers).
2031
2032 REDIRECT_CALLERS varray includes the edges to be redirected
2033 to the new version.
2034
2035 TREE_MAP is a mapping of tree nodes we want to replace with
2036 new ones (according to results of prior analysis).
2037 OLD_VERSION_NODE is the node that is versioned.
48e1416a 2038 It returns the new version's cgraph node.
b06ab5fa 2039 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2040 from new version.
2041 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
2042 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
b5d36404 2043
2044struct cgraph_node *
2045cgraph_function_versioning (struct cgraph_node *old_version_node,
4460a647 2046 VEC(cgraph_edge_p,heap) *redirect_callers,
ccf4ab6b 2047 VEC (ipa_replace_map_p,gc)* tree_map,
a70a5e2c 2048 bitmap args_to_skip,
b06ab5fa 2049 bitmap bbs_to_copy,
2050 basic_block new_entry_block,
a70a5e2c 2051 const char *clone_name)
b5d36404 2052{
2053 tree old_decl = old_version_node->decl;
2054 struct cgraph_node *new_version_node = NULL;
2055 tree new_decl;
2056
2057 if (!tree_versionable_function_p (old_decl))
2058 return NULL;
2059
3c97c75d 2060 gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
2061
b5d36404 2062 /* Make a new FUNCTION_DECL tree node for the
2063 new version. */
5afe38fe 2064 if (!args_to_skip)
2065 new_decl = copy_node (old_decl);
2066 else
2067 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
b5d36404 2068
df0b8dfb 2069 /* Generate a new name for the new version. */
2070 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2071 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2072 SET_DECL_RTL (new_decl, NULL);
2073
b5d36404 2074 /* Create the new version's call-graph node.
2075 and update the edges of the new node. */
2076 new_version_node =
2077 cgraph_copy_node_for_versioning (old_version_node, new_decl,
b06ab5fa 2078 redirect_callers, bbs_to_copy);
b5d36404 2079
2080 /* Copy the OLD_VERSION_NODE function tree to the new version. */
b06ab5fa 2081 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2082 bbs_to_copy, new_entry_block);
b5d36404 2083
a0c938f0 2084 /* Update the new version's properties.
e03a95e7 2085 Make The new version visible only within this translation unit. Make sure
2086 that is not weak also.
a0c938f0 2087 ??? We cannot use COMDAT linkage because there is no
b5d36404 2088 ABI support for this. */
6137cc9f 2089 cgraph_make_decl_local (new_version_node->decl);
f014e39d 2090 DECL_VIRTUAL_P (new_version_node->decl) = 0;
b5d36404 2091 new_version_node->local.externally_visible = 0;
2092 new_version_node->local.local = 1;
2093 new_version_node->lowered = true;
f014e39d 2094
e03a95e7 2095 /* Update the call_expr on the edges to call the new version node. */
2096 update_call_expr (new_version_node);
48e1416a 2097
50828ed8 2098 cgraph_call_function_insertion_hooks (new_version_node);
b5d36404 2099 return new_version_node;
2100}
469679ab 2101
2102/* Produce separate function body for inline clones so the offline copy can be
2103 modified without affecting them. */
2104struct cgraph_node *
2105save_inline_function_body (struct cgraph_node *node)
2106{
ccf4ab6b 2107 struct cgraph_node *first_clone, *n;
469679ab 2108
2109 gcc_assert (node == cgraph_node (node->decl));
2110
2111 cgraph_lower_function (node);
2112
ccf4ab6b 2113 first_clone = node->clones;
469679ab 2114
2115 first_clone->decl = copy_node (node->decl);
469679ab 2116 cgraph_insert_node_to_hashtable (first_clone);
2117 gcc_assert (first_clone == cgraph_node (first_clone->decl));
ccf4ab6b 2118 if (first_clone->next_sibling_clone)
2119 {
2120 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2121 n->clone_of = first_clone;
2122 n->clone_of = first_clone;
2123 n->next_sibling_clone = first_clone->clones;
2124 if (first_clone->clones)
2125 first_clone->clones->prev_sibling_clone = n;
2126 first_clone->clones = first_clone->next_sibling_clone;
2127 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2128 first_clone->next_sibling_clone = NULL;
2129 gcc_assert (!first_clone->prev_sibling_clone);
2130 }
2131 first_clone->clone_of = NULL;
2132 node->clones = NULL;
2133
2134 if (first_clone->clones)
2135 for (n = first_clone->clones; n != first_clone;)
2136 {
2137 gcc_assert (n->decl == node->decl);
2138 n->decl = first_clone->decl;
2139 if (n->clones)
2140 n = n->clones;
2141 else if (n->next_sibling_clone)
2142 n = n->next_sibling_clone;
2143 else
2144 {
2145 while (n != first_clone && !n->next_sibling_clone)
2146 n = n->clone_of;
2147 if (n != first_clone)
2148 n = n->next_sibling_clone;
2149 }
2150 }
469679ab 2151
2152 /* Copy the OLD_VERSION_NODE function tree to the new version. */
b06ab5fa 2153 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL,
2154 NULL, NULL);
469679ab 2155
2156 DECL_EXTERNAL (first_clone->decl) = 0;
ecd88073 2157 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
469679ab 2158 TREE_PUBLIC (first_clone->decl) = 0;
2159 DECL_COMDAT (first_clone->decl) = 0;
7fe9b425 2160 VEC_free (ipa_opt_pass, heap,
6d1cc52c 2161 first_clone->ipa_transforms_to_apply);
2162 first_clone->ipa_transforms_to_apply = NULL;
469679ab 2163
469679ab 2164#ifdef ENABLE_CHECKING
2165 verify_cgraph_node (first_clone);
2166#endif
2167 return first_clone;
2168}
a861fe52 2169
ccf4ab6b 2170/* Given virtual clone, turn it into actual clone. */
2171static void
2172cgraph_materialize_clone (struct cgraph_node *node)
2173{
2174 bitmap_obstack_initialize (NULL);
e748b31d 2175 node->former_clone_of = node->clone_of->decl;
2176 if (node->clone_of->former_clone_of)
2177 node->former_clone_of = node->clone_of->former_clone_of;
ccf4ab6b 2178 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2179 tree_function_versioning (node->clone_of->decl, node->decl,
2180 node->clone.tree_map, true,
b06ab5fa 2181 node->clone.args_to_skip, NULL, NULL);
e20422ea 2182 if (cgraph_dump_file)
2183 {
2184 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2185 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2186 }
ccf4ab6b 2187
2188 /* Function is no longer clone. */
2189 if (node->next_sibling_clone)
2190 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2191 if (node->prev_sibling_clone)
2192 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2193 else
2194 node->clone_of->clones = node->next_sibling_clone;
2195 node->next_sibling_clone = NULL;
2196 node->prev_sibling_clone = NULL;
6d1cc52c 2197 if (!node->clone_of->analyzed && !node->clone_of->clones)
7d6a1ec8 2198 {
2199 cgraph_release_function_body (node->clone_of);
2200 cgraph_node_remove_callees (node->clone_of);
2201 ipa_remove_all_references (&node->clone_of->ref_list);
2202 }
ccf4ab6b 2203 node->clone_of = NULL;
2204 bitmap_obstack_release (NULL);
2205}
2206
c596d830 2207/* If necessary, change the function declaration in the call statement
2208 associated with E so that it corresponds to the edge callee. */
2209
2210gimple
2211cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2212{
2213 tree decl = gimple_call_fndecl (e->call_stmt);
2214 gimple new_stmt;
3fd0ca33 2215 gimple_stmt_iterator gsi;
2216 bool gsi_computed = false;
1f449108 2217#ifdef ENABLE_CHECKING
2218 struct cgraph_node *node;
2219#endif
c596d830 2220
1caef38b 2221 if (e->indirect_unknown_callee
2222 || decl == e->callee->decl
c596d830 2223 /* Don't update call from same body alias to the real function. */
1caef38b 2224 || (decl && cgraph_get_node (decl) == cgraph_get_node (e->callee->decl)))
c596d830 2225 return e->call_stmt;
2226
1f449108 2227#ifdef ENABLE_CHECKING
1caef38b 2228 if (decl)
2229 {
2230 node = cgraph_get_node (decl);
2231 gcc_assert (!node || !node->clone.combined_args_to_skip);
2232 }
1f449108 2233#endif
e748b31d 2234
c596d830 2235 if (cgraph_dump_file)
2236 {
2237 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2238 cgraph_node_name (e->caller), e->caller->uid,
2239 cgraph_node_name (e->callee), e->callee->uid);
2240 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
e748b31d 2241 if (e->callee->clone.combined_args_to_skip)
91aba934 2242 {
2243 fprintf (cgraph_dump_file, " combined args to skip: ");
2244 dump_bitmap (cgraph_dump_file,
2245 e->callee->clone.combined_args_to_skip);
e748b31d 2246 }
c596d830 2247 }
2248
9bab6a70 2249 if (e->indirect_info &&
2250 e->indirect_info->thunk_delta != 0
3fd0ca33 2251 && (!e->callee->clone.combined_args_to_skip
2252 || !bitmap_bit_p (e->callee->clone.combined_args_to_skip, 0)))
2253 {
2254 if (cgraph_dump_file)
9bab6a70 2255 fprintf (cgraph_dump_file, " Thunk delta is "
2256 HOST_WIDE_INT_PRINT_DEC "\n", e->indirect_info->thunk_delta);
3fd0ca33 2257 gsi = gsi_for_stmt (e->call_stmt);
2258 gsi_computed = true;
9bab6a70 2259 gimple_adjust_this_by_delta (&gsi,
2260 build_int_cst (sizetype,
2261 e->indirect_info->thunk_delta));
2262 e->indirect_info->thunk_delta = 0;
3fd0ca33 2263 }
2264
c596d830 2265 if (e->callee->clone.combined_args_to_skip)
91aba934 2266 {
092cd838 2267 int lp_nr;
91aba934 2268
2269 new_stmt
2270 = gimple_call_copy_skip_args (e->call_stmt,
2271 e->callee->clone.combined_args_to_skip);
75c7f5a5 2272 gimple_call_set_fndecl (new_stmt, e->callee->decl);
91aba934 2273
2274 if (gimple_vdef (new_stmt)
2275 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2276 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2277
3fd0ca33 2278 if (!gsi_computed)
2279 gsi = gsi_for_stmt (e->call_stmt);
9126b675 2280 gsi_replace (&gsi, new_stmt, false);
092cd838 2281 /* We need to defer cleaning EH info on the new statement to
2282 fixup-cfg. We may not have dominator information at this point
2283 and thus would end up with unreachable blocks and have no way
2284 to communicate that we need to run CFG cleanup then. */
2285 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2286 if (lp_nr != 0)
2287 {
2288 remove_stmt_from_eh_lp (e->call_stmt);
2289 add_stmt_to_eh_lp (new_stmt, lp_nr);
2290 }
91aba934 2291 }
c596d830 2292 else
75c7f5a5 2293 {
2294 new_stmt = e->call_stmt;
2295 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2296 update_stmt (new_stmt);
2297 }
c596d830 2298
c596d830 2299 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2300
2301 if (cgraph_dump_file)
2302 {
2303 fprintf (cgraph_dump_file, " updated to:");
2304 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2305 }
2306 return new_stmt;
2307}
2308
ccf4ab6b 2309/* Once all functions from compilation unit are in memory, produce all clones
c596d830 2310 and update all calls. We might also do this on demand if we don't want to
2311 bring all functions to memory prior compilation, but current WHOPR
2312 implementation does that and it is is bit easier to keep everything right in
2313 this order. */
ccf4ab6b 2314void
2315cgraph_materialize_all_clones (void)
2316{
2317 struct cgraph_node *node;
2318 bool stabilized = false;
2319
2320 if (cgraph_dump_file)
2321 fprintf (cgraph_dump_file, "Materializing clones\n");
2322#ifdef ENABLE_CHECKING
2323 verify_cgraph ();
2324#endif
2325
2326 /* We can also do topological order, but number of iterations should be
2327 bounded by number of IPA passes since single IPA pass is probably not
2328 going to create clones of clones it created itself. */
2329 while (!stabilized)
2330 {
2331 stabilized = true;
2332 for (node = cgraph_nodes; node; node = node->next)
2333 {
2334 if (node->clone_of && node->decl != node->clone_of->decl
2335 && !gimple_has_body_p (node->decl))
2336 {
2337 if (gimple_has_body_p (node->clone_of->decl))
2338 {
2339 if (cgraph_dump_file)
e20422ea 2340 {
0a10fd82 2341 fprintf (cgraph_dump_file, "cloning %s to %s\n",
e20422ea 2342 cgraph_node_name (node->clone_of),
2343 cgraph_node_name (node));
2344 if (node->clone.tree_map)
2345 {
2346 unsigned int i;
2347 fprintf (cgraph_dump_file, " replace map: ");
2348 for (i = 0; i < VEC_length (ipa_replace_map_p,
2349 node->clone.tree_map);
2350 i++)
2351 {
2352 struct ipa_replace_map *replace_info;
2353 replace_info = VEC_index (ipa_replace_map_p,
2354 node->clone.tree_map,
2355 i);
2356 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2357 fprintf (cgraph_dump_file, " -> ");
2358 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2359 fprintf (cgraph_dump_file, "%s%s;",
2360 replace_info->replace_p ? "(replace)":"",
2361 replace_info->ref_p ? "(ref)":"");
2362 }
2363 fprintf (cgraph_dump_file, "\n");
2364 }
2365 if (node->clone.args_to_skip)
2366 {
2367 fprintf (cgraph_dump_file, " args_to_skip: ");
2368 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2369 }
2370 if (node->clone.args_to_skip)
2371 {
2372 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2373 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2374 }
2375 }
ccf4ab6b 2376 cgraph_materialize_clone (node);
a510bd8d 2377 stabilized = false;
ccf4ab6b 2378 }
ccf4ab6b 2379 }
2380 }
2381 }
ee3f5fc0 2382 for (node = cgraph_nodes; node; node = node->next)
2383 if (!node->analyzed && node->callees)
2384 cgraph_node_remove_callees (node);
c596d830 2385 if (cgraph_dump_file)
2386 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
947781ac 2387#ifdef ENABLE_CHECKING
2388 verify_cgraph ();
2389#endif
ccf4ab6b 2390 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2391}
2392
a861fe52 2393#include "gt-cgraphunit.h"