]> git.ipfire.org Git - thirdparty/gcc.git/blame_incremental - gcc/cgraphunit.c
* config/mips/mips16.S (DELAYf): Alias to DELAYt for the MIPS IV
[thirdparty/gcc.git] / gcc / cgraphunit.c
... / ...
CommitLineData
1/* Driver of optimization process
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21/* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - cgraph_finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158*/
159
160#include "config.h"
161#include "system.h"
162#include "coretypes.h"
163#include "tm.h"
164#include "tree.h"
165#include "output.h"
166#include "rtl.h"
167#include "tree-flow.h"
168#include "tree-inline.h"
169#include "langhooks.h"
170#include "pointer-set.h"
171#include "toplev.h"
172#include "flags.h"
173#include "ggc.h"
174#include "debug.h"
175#include "target.h"
176#include "cgraph.h"
177#include "diagnostic.h"
178#include "params.h"
179#include "fibheap.h"
180#include "intl.h"
181#include "function.h"
182#include "ipa-prop.h"
183#include "gimple.h"
184#include "tree-iterator.h"
185#include "tree-pass.h"
186#include "tree-dump.h"
187#include "gimple-pretty-print.h"
188#include "output.h"
189#include "coverage.h"
190#include "plugin.h"
191#include "ipa-inline.h"
192#include "ipa-utils.h"
193#include "lto-streamer.h"
194#include "except.h"
195#include "cfgloop.h"
196#include "regset.h" /* FIXME: For reg_obstack. */
197
198/* Queue of cgraph nodes scheduled to be added into cgraph. This is a
199 secondary queue used during optimization to accommodate passes that
200 may generate new functions that need to be optimized and expanded. */
201cgraph_node_set cgraph_new_nodes;
202
203static void expand_all_functions (void);
204static void mark_functions_to_output (void);
205static void expand_function (struct cgraph_node *);
206static void analyze_function (struct cgraph_node *);
207static void handle_alias_pairs (void);
208
209FILE *cgraph_dump_file;
210
211/* Linked list of cgraph asm nodes. */
212struct asm_node *asm_nodes;
213
214/* Last node in cgraph_asm_nodes. */
215static GTY(()) struct asm_node *asm_last_node;
216
217/* Used for vtable lookup in thunk adjusting. */
218static GTY (()) tree vtable_entry_type;
219
220/* Determine if symbol DECL is needed. That is, visible to something
221 either outside this translation unit, something magic in the system
222 configury */
223bool
224decide_is_symbol_needed (symtab_node node)
225{
226 tree decl = node->symbol.decl;
227
228 /* Double check that no one output the function into assembly file
229 early. */
230 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
231 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
232
233 if (!node->symbol.definition)
234 return false;
235
236 /* Devirtualization may access these. */
237 if (DECL_VIRTUAL_P (decl) && optimize)
238 return true;
239
240 if (DECL_EXTERNAL (decl))
241 return false;
242
243 /* If the user told us it is used, then it must be so. */
244 if (node->symbol.force_output)
245 return true;
246
247 /* ABI forced symbols are needed when they are external. */
248 if (node->symbol.forced_by_abi && TREE_PUBLIC (decl))
249 return true;
250
251 /* Keep constructors, destructors and virtual functions. */
252 if (TREE_CODE (decl) == FUNCTION_DECL
253 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
254 return true;
255
256 /* Externally visible variables must be output. The exception is
257 COMDAT variables that must be output only when they are needed. */
258 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
259 return true;
260
261 return false;
262}
263
264/* Head of the queue of nodes to be processed while building callgraph */
265
266static symtab_node first = (symtab_node)(void *)1;
267
268/* Add NODE to queue starting at FIRST.
269 The queue is linked via AUX pointers and terminated by pointer to 1. */
270
271static void
272enqueue_node (symtab_node node)
273{
274 if (node->symbol.aux)
275 return;
276 gcc_checking_assert (first);
277 node->symbol.aux = first;
278 first = node;
279}
280
281/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
282 functions into callgraph in a way so they look like ordinary reachable
283 functions inserted into callgraph already at construction time. */
284
285bool
286cgraph_process_new_functions (void)
287{
288 bool output = false;
289 tree fndecl;
290 struct cgraph_node *node;
291 cgraph_node_set_iterator csi;
292
293 if (!cgraph_new_nodes)
294 return false;
295 handle_alias_pairs ();
296 /* Note that this queue may grow as its being processed, as the new
297 functions may generate new ones. */
298 for (csi = csi_start (cgraph_new_nodes); !csi_end_p (csi); csi_next (&csi))
299 {
300 node = csi_node (csi);
301 fndecl = node->symbol.decl;
302 switch (cgraph_state)
303 {
304 case CGRAPH_STATE_CONSTRUCTION:
305 /* At construction time we just need to finalize function and move
306 it into reachable functions list. */
307
308 cgraph_finalize_function (fndecl, false);
309 output = true;
310 cgraph_call_function_insertion_hooks (node);
311 enqueue_node ((symtab_node) node);
312 break;
313
314 case CGRAPH_STATE_IPA:
315 case CGRAPH_STATE_IPA_SSA:
316 /* When IPA optimization already started, do all essential
317 transformations that has been already performed on the whole
318 cgraph but not on this function. */
319
320 gimple_register_cfg_hooks ();
321 if (!node->symbol.analyzed)
322 analyze_function (node);
323 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
324 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
325 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
326 /* When not optimizing, be sure we run early local passes anyway
327 to expand OMP. */
328 || !optimize)
329 execute_pass_list (pass_early_local_passes.pass.sub);
330 else
331 compute_inline_parameters (node, true);
332 free_dominance_info (CDI_POST_DOMINATORS);
333 free_dominance_info (CDI_DOMINATORS);
334 pop_cfun ();
335 cgraph_call_function_insertion_hooks (node);
336 break;
337
338 case CGRAPH_STATE_EXPANSION:
339 /* Functions created during expansion shall be compiled
340 directly. */
341 node->process = 0;
342 cgraph_call_function_insertion_hooks (node);
343 expand_function (node);
344 break;
345
346 default:
347 gcc_unreachable ();
348 break;
349 }
350 }
351 free_cgraph_node_set (cgraph_new_nodes);
352 cgraph_new_nodes = NULL;
353 return output;
354}
355
356/* As an GCC extension we allow redefinition of the function. The
357 semantics when both copies of bodies differ is not well defined.
358 We replace the old body with new body so in unit at a time mode
359 we always use new body, while in normal mode we may end up with
360 old body inlined into some functions and new body expanded and
361 inlined in others.
362
363 ??? It may make more sense to use one body for inlining and other
364 body for expanding the function but this is difficult to do. */
365
366void
367cgraph_reset_node (struct cgraph_node *node)
368{
369 /* If node->process is set, then we have already begun whole-unit analysis.
370 This is *not* testing for whether we've already emitted the function.
371 That case can be sort-of legitimately seen with real function redefinition
372 errors. I would argue that the front end should never present us with
373 such a case, but don't enforce that for now. */
374 gcc_assert (!node->process);
375
376 /* Reset our data structures so we can analyze the function again. */
377 memset (&node->local, 0, sizeof (node->local));
378 memset (&node->global, 0, sizeof (node->global));
379 memset (&node->rtl, 0, sizeof (node->rtl));
380 node->symbol.analyzed = false;
381 node->symbol.definition = false;
382 node->symbol.alias = false;
383 node->symbol.weakref = false;
384 node->symbol.cpp_implicit_alias = false;
385
386 cgraph_node_remove_callees (node);
387 ipa_remove_all_references (&node->symbol.ref_list);
388}
389
390/* Return true when there are references to NODE. */
391
392static bool
393referred_to_p (symtab_node node)
394{
395 struct ipa_ref *ref;
396
397 /* See if there are any references at all. */
398 if (ipa_ref_list_referring_iterate (&node->symbol.ref_list, 0, ref))
399 return true;
400 /* For functions check also calls. */
401 cgraph_node *cn = dyn_cast <cgraph_node> (node);
402 if (cn && cn->callers)
403 return true;
404 return false;
405}
406
407/* DECL has been parsed. Take it, queue it, compile it at the whim of the
408 logic in effect. If NESTED is true, then our caller cannot stand to have
409 the garbage collector run at the moment. We would need to either create
410 a new GC context, or just not compile right now. */
411
412void
413cgraph_finalize_function (tree decl, bool nested)
414{
415 struct cgraph_node *node = cgraph_get_create_node (decl);
416
417 if (node->symbol.definition)
418 {
419 cgraph_reset_node (node);
420 node->local.redefined_extern_inline = true;
421 }
422
423 notice_global_symbol (decl);
424 node->symbol.definition = true;
425 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
426
427 /* With -fkeep-inline-functions we are keeping all inline functions except
428 for extern inline ones. */
429 if (flag_keep_inline_functions
430 && DECL_DECLARED_INLINE_P (decl)
431 && !DECL_EXTERNAL (decl)
432 && !DECL_DISREGARD_INLINE_LIMITS (decl))
433 node->symbol.force_output = 1;
434
435 /* When not optimizing, also output the static functions. (see
436 PR24561), but don't do so for always_inline functions, functions
437 declared inline and nested functions. These were optimized out
438 in the original implementation and it is unclear whether we want
439 to change the behavior here. */
440 if ((!optimize
441 && !node->symbol.cpp_implicit_alias
442 && !DECL_DISREGARD_INLINE_LIMITS (decl)
443 && !DECL_DECLARED_INLINE_P (decl)
444 && !(DECL_CONTEXT (decl)
445 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
446 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
447 node->symbol.force_output = 1;
448
449 /* If we've not yet emitted decl, tell the debug info about it. */
450 if (!TREE_ASM_WRITTEN (decl))
451 (*debug_hooks->deferred_inline_function) (decl);
452
453 /* Possibly warn about unused parameters. */
454 if (warn_unused_parameter)
455 do_warn_unused_parameter (decl);
456
457 if (!nested)
458 ggc_collect ();
459
460 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
461 && (decide_is_symbol_needed ((symtab_node) node)
462 || referred_to_p ((symtab_node)node)))
463 enqueue_node ((symtab_node)node);
464}
465
466/* Add the function FNDECL to the call graph.
467 Unlike cgraph_finalize_function, this function is intended to be used
468 by middle end and allows insertion of new function at arbitrary point
469 of compilation. The function can be either in high, low or SSA form
470 GIMPLE.
471
472 The function is assumed to be reachable and have address taken (so no
473 API breaking optimizations are performed on it).
474
475 Main work done by this function is to enqueue the function for later
476 processing to avoid need the passes to be re-entrant. */
477
478void
479cgraph_add_new_function (tree fndecl, bool lowered)
480{
481 struct cgraph_node *node;
482 switch (cgraph_state)
483 {
484 case CGRAPH_STATE_PARSING:
485 cgraph_finalize_function (fndecl, false);
486 break;
487 case CGRAPH_STATE_CONSTRUCTION:
488 /* Just enqueue function to be processed at nearest occurrence. */
489 node = cgraph_create_node (fndecl);
490 if (lowered)
491 node->lowered = true;
492 if (!cgraph_new_nodes)
493 cgraph_new_nodes = cgraph_node_set_new ();
494 cgraph_node_set_add (cgraph_new_nodes, node);
495 break;
496
497 case CGRAPH_STATE_IPA:
498 case CGRAPH_STATE_IPA_SSA:
499 case CGRAPH_STATE_EXPANSION:
500 /* Bring the function into finalized state and enqueue for later
501 analyzing and compilation. */
502 node = cgraph_get_create_node (fndecl);
503 node->local.local = false;
504 node->symbol.definition = true;
505 node->symbol.force_output = true;
506 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
507 {
508 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
509 gimple_register_cfg_hooks ();
510 bitmap_obstack_initialize (NULL);
511 execute_pass_list (all_lowering_passes);
512 execute_pass_list (pass_early_local_passes.pass.sub);
513 bitmap_obstack_release (NULL);
514 pop_cfun ();
515
516 lowered = true;
517 }
518 if (lowered)
519 node->lowered = true;
520 if (!cgraph_new_nodes)
521 cgraph_new_nodes = cgraph_node_set_new ();
522 cgraph_node_set_add (cgraph_new_nodes, node);
523 break;
524
525 case CGRAPH_STATE_FINISHED:
526 /* At the very end of compilation we have to do all the work up
527 to expansion. */
528 node = cgraph_create_node (fndecl);
529 if (lowered)
530 node->lowered = true;
531 node->symbol.definition = true;
532 analyze_function (node);
533 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
534 gimple_register_cfg_hooks ();
535 bitmap_obstack_initialize (NULL);
536 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
537 execute_pass_list (pass_early_local_passes.pass.sub);
538 bitmap_obstack_release (NULL);
539 pop_cfun ();
540 expand_function (node);
541 break;
542
543 default:
544 gcc_unreachable ();
545 }
546
547 /* Set a personality if required and we already passed EH lowering. */
548 if (lowered
549 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
550 == eh_personality_lang))
551 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
552}
553
554/* Add a top-level asm statement to the list. */
555
556struct asm_node *
557add_asm_node (tree asm_str)
558{
559 struct asm_node *node;
560
561 node = ggc_alloc_cleared_asm_node ();
562 node->asm_str = asm_str;
563 node->order = symtab_order++;
564 node->next = NULL;
565 if (asm_nodes == NULL)
566 asm_nodes = node;
567 else
568 asm_last_node->next = node;
569 asm_last_node = node;
570 return node;
571}
572
573/* Output all asm statements we have stored up to be output. */
574
575static void
576output_asm_statements (void)
577{
578 struct asm_node *can;
579
580 if (seen_error ())
581 return;
582
583 for (can = asm_nodes; can; can = can->next)
584 assemble_asm (can->asm_str);
585 asm_nodes = NULL;
586}
587
588/* Analyze the function scheduled to be output. */
589static void
590analyze_function (struct cgraph_node *node)
591{
592 tree decl = node->symbol.decl;
593 location_t saved_loc = input_location;
594 input_location = DECL_SOURCE_LOCATION (decl);
595
596 if (node->symbol.alias)
597 symtab_resolve_alias
598 ((symtab_node) node, (symtab_node) cgraph_get_node (node->symbol.alias_target));
599 else if (node->thunk.thunk_p)
600 {
601 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
602 NULL, 0, CGRAPH_FREQ_BASE);
603 node->thunk.alias = NULL;
604 }
605 else if (node->dispatcher_function)
606 {
607 /* Generate the dispatcher body of multi-versioned functions. */
608 struct cgraph_function_version_info *dispatcher_version_info
609 = get_cgraph_node_version (node);
610 if (dispatcher_version_info != NULL
611 && (dispatcher_version_info->dispatcher_resolver
612 == NULL_TREE))
613 {
614 tree resolver = NULL_TREE;
615 gcc_assert (targetm.generate_version_dispatcher_body);
616 resolver = targetm.generate_version_dispatcher_body (node);
617 gcc_assert (resolver != NULL_TREE);
618 }
619 }
620 else
621 {
622 push_cfun (DECL_STRUCT_FUNCTION (decl));
623
624 assign_assembler_name_if_neeeded (node->symbol.decl);
625
626 /* Make sure to gimplify bodies only once. During analyzing a
627 function we lower it, which will require gimplified nested
628 functions, so we can end up here with an already gimplified
629 body. */
630 if (!gimple_has_body_p (decl))
631 gimplify_function_tree (decl);
632 dump_function (TDI_generic, decl);
633
634 /* Lower the function. */
635 if (!node->lowered)
636 {
637 if (node->nested)
638 lower_nested_functions (node->symbol.decl);
639 gcc_assert (!node->nested);
640
641 gimple_register_cfg_hooks ();
642 bitmap_obstack_initialize (NULL);
643 execute_pass_list (all_lowering_passes);
644 free_dominance_info (CDI_POST_DOMINATORS);
645 free_dominance_info (CDI_DOMINATORS);
646 compact_blocks ();
647 bitmap_obstack_release (NULL);
648 node->lowered = true;
649 }
650
651 pop_cfun ();
652 }
653 node->symbol.analyzed = true;
654
655 input_location = saved_loc;
656}
657
658/* C++ frontend produce same body aliases all over the place, even before PCH
659 gets streamed out. It relies on us linking the aliases with their function
660 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
661 first produce aliases without links, but once C++ FE is sure he won't sream
662 PCH we build the links via this function. */
663
664void
665cgraph_process_same_body_aliases (void)
666{
667 symtab_node node;
668 FOR_EACH_SYMBOL (node)
669 if (node->symbol.cpp_implicit_alias && !node->symbol.analyzed)
670 symtab_resolve_alias
671 (node,
672 TREE_CODE (node->symbol.alias_target) == VAR_DECL
673 ? (symtab_node)varpool_node_for_decl (node->symbol.alias_target)
674 : (symtab_node)cgraph_get_create_node (node->symbol.alias_target));
675 cpp_implicit_aliases_done = true;
676}
677
678/* Process attributes common for vars and functions. */
679
680static void
681process_common_attributes (tree decl)
682{
683 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
684
685 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
686 {
687 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
688 "%<weakref%> attribute should be accompanied with"
689 " an %<alias%> attribute");
690 DECL_WEAK (decl) = 0;
691 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
692 DECL_ATTRIBUTES (decl));
693 }
694}
695
696/* Look for externally_visible and used attributes and mark cgraph nodes
697 accordingly.
698
699 We cannot mark the nodes at the point the attributes are processed (in
700 handle_*_attribute) because the copy of the declarations available at that
701 point may not be canonical. For example, in:
702
703 void f();
704 void f() __attribute__((used));
705
706 the declaration we see in handle_used_attribute will be the second
707 declaration -- but the front end will subsequently merge that declaration
708 with the original declaration and discard the second declaration.
709
710 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
711
712 void f() {}
713 void f() __attribute__((externally_visible));
714
715 is valid.
716
717 So, we walk the nodes at the end of the translation unit, applying the
718 attributes at that point. */
719
720static void
721process_function_and_variable_attributes (struct cgraph_node *first,
722 struct varpool_node *first_var)
723{
724 struct cgraph_node *node;
725 struct varpool_node *vnode;
726
727 for (node = cgraph_first_function (); node != first;
728 node = cgraph_next_function (node))
729 {
730 tree decl = node->symbol.decl;
731 if (DECL_PRESERVE_P (decl))
732 cgraph_mark_force_output_node (node);
733 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
734 {
735 if (! TREE_PUBLIC (node->symbol.decl))
736 warning_at (DECL_SOURCE_LOCATION (node->symbol.decl), OPT_Wattributes,
737 "%<externally_visible%>"
738 " attribute have effect only on public objects");
739 }
740 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
741 && (node->symbol.definition && !node->symbol.alias))
742 {
743 warning_at (DECL_SOURCE_LOCATION (node->symbol.decl), OPT_Wattributes,
744 "%<weakref%> attribute ignored"
745 " because function is defined");
746 DECL_WEAK (decl) = 0;
747 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
748 DECL_ATTRIBUTES (decl));
749 }
750
751 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
752 && !DECL_DECLARED_INLINE_P (decl)
753 /* redefining extern inline function makes it DECL_UNINLINABLE. */
754 && !DECL_UNINLINABLE (decl))
755 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
756 "always_inline function might not be inlinable");
757
758 process_common_attributes (decl);
759 }
760 for (vnode = varpool_first_variable (); vnode != first_var;
761 vnode = varpool_next_variable (vnode))
762 {
763 tree decl = vnode->symbol.decl;
764 if (DECL_EXTERNAL (decl)
765 && DECL_INITIAL (decl))
766 varpool_finalize_decl (decl);
767 if (DECL_PRESERVE_P (decl))
768 vnode->symbol.force_output = true;
769 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
770 {
771 if (! TREE_PUBLIC (vnode->symbol.decl))
772 warning_at (DECL_SOURCE_LOCATION (vnode->symbol.decl), OPT_Wattributes,
773 "%<externally_visible%>"
774 " attribute have effect only on public objects");
775 }
776 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
777 && vnode->symbol.definition
778 && DECL_INITIAL (decl))
779 {
780 warning_at (DECL_SOURCE_LOCATION (vnode->symbol.decl), OPT_Wattributes,
781 "%<weakref%> attribute ignored"
782 " because variable is initialized");
783 DECL_WEAK (decl) = 0;
784 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
785 DECL_ATTRIBUTES (decl));
786 }
787 process_common_attributes (decl);
788 }
789}
790
791/* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
792 middle end to output the variable to asm file, if needed or externally
793 visible. */
794
795void
796varpool_finalize_decl (tree decl)
797{
798 struct varpool_node *node = varpool_node_for_decl (decl);
799
800 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
801
802 if (node->symbol.definition)
803 return;
804 notice_global_symbol (decl);
805 node->symbol.definition = true;
806 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
807 /* Traditionally we do not eliminate static variables when not
808 optimizing and when not doing toplevel reoder. */
809 || (!flag_toplevel_reorder && !DECL_COMDAT (node->symbol.decl)
810 && !DECL_ARTIFICIAL (node->symbol.decl)))
811 node->symbol.force_output = true;
812
813 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
814 && (decide_is_symbol_needed ((symtab_node) node)
815 || referred_to_p ((symtab_node)node)))
816 enqueue_node ((symtab_node)node);
817 if (cgraph_state >= CGRAPH_STATE_IPA_SSA)
818 varpool_analyze_node (node);
819 /* Some frontends produce various interface variables after compilation
820 finished. */
821 if (cgraph_state == CGRAPH_STATE_FINISHED)
822 varpool_assemble_decl (node);
823}
824
825
826/* Discover all functions and variables that are trivially needed, analyze
827 them as well as all functions and variables referred by them */
828
829static void
830analyze_functions (void)
831{
832 /* Keep track of already processed nodes when called multiple times for
833 intermodule optimization. */
834 static struct cgraph_node *first_analyzed;
835 struct cgraph_node *first_handled = first_analyzed;
836 static struct varpool_node *first_analyzed_var;
837 struct varpool_node *first_handled_var = first_analyzed_var;
838
839 symtab_node node, next;
840 int i;
841 struct ipa_ref *ref;
842 bool changed = true;
843
844 bitmap_obstack_initialize (NULL);
845 cgraph_state = CGRAPH_STATE_CONSTRUCTION;
846
847 /* Ugly, but the fixup can not happen at a time same body alias is created;
848 C++ FE is confused about the COMDAT groups being right. */
849 if (cpp_implicit_aliases_done)
850 FOR_EACH_SYMBOL (node)
851 if (node->symbol.cpp_implicit_alias)
852 fixup_same_cpp_alias_visibility (node, symtab_alias_target (node));
853
854 /* Analysis adds static variables that in turn adds references to new functions.
855 So we need to iterate the process until it stabilize. */
856 while (changed)
857 {
858 changed = false;
859 process_function_and_variable_attributes (first_analyzed,
860 first_analyzed_var);
861
862 /* First identify the trivially needed symbols. */
863 for (node = symtab_nodes;
864 node != (symtab_node)first_analyzed
865 && node != (symtab_node)first_analyzed_var; node = node->symbol.next)
866 {
867 if (decide_is_symbol_needed (node))
868 {
869 enqueue_node (node);
870 if (!changed && cgraph_dump_file)
871 fprintf (cgraph_dump_file, "Trivially needed symbols:");
872 changed = true;
873 if (cgraph_dump_file)
874 fprintf (cgraph_dump_file, " %s", symtab_node_asm_name (node));
875 }
876 if (node == (symtab_node)first_analyzed
877 || node == (symtab_node)first_analyzed_var)
878 break;
879 }
880 cgraph_process_new_functions ();
881 first_analyzed_var = varpool_first_variable ();
882 first_analyzed = cgraph_first_function ();
883
884 if (changed && dump_file)
885 fprintf (cgraph_dump_file, "\n");
886
887 /* Lower representation, build callgraph edges and references for all trivially
888 needed symbols and all symbols referred by them. */
889 while (first != (symtab_node)(void *)1)
890 {
891 changed = true;
892 node = first;
893 first = (symtab_node)first->symbol.aux;
894 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
895 if (cnode && cnode->symbol.definition)
896 {
897 struct cgraph_edge *edge;
898 tree decl = cnode->symbol.decl;
899
900 /* ??? It is possible to create extern inline function
901 and later using weak alias attribute to kill its body.
902 See gcc.c-torture/compile/20011119-1.c */
903 if (!DECL_STRUCT_FUNCTION (decl)
904 && !cnode->symbol.alias
905 && !cnode->thunk.thunk_p
906 && !cnode->dispatcher_function)
907 {
908 cgraph_reset_node (cnode);
909 cnode->local.redefined_extern_inline = true;
910 continue;
911 }
912
913 if (!cnode->symbol.analyzed)
914 analyze_function (cnode);
915
916 for (edge = cnode->callees; edge; edge = edge->next_callee)
917 if (edge->callee->symbol.definition)
918 enqueue_node ((symtab_node)edge->callee);
919
920 /* If decl is a clone of an abstract function,
921 mark that abstract function so that we don't release its body.
922 The DECL_INITIAL() of that abstract function declaration
923 will be later needed to output debug info. */
924 if (DECL_ABSTRACT_ORIGIN (decl))
925 {
926 struct cgraph_node *origin_node
927 = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
928 origin_node->abstract_and_needed = true;
929 }
930 }
931 else
932 {
933 varpool_node *vnode = dyn_cast <varpool_node> (node);
934 if (vnode && vnode->symbol.definition && !vnode->symbol.analyzed)
935 varpool_analyze_node (vnode);
936 }
937
938 if (node->symbol.same_comdat_group)
939 {
940 symtab_node next;
941 for (next = node->symbol.same_comdat_group;
942 next != node;
943 next = next->symbol.same_comdat_group)
944 enqueue_node (next);
945 }
946 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list, i, ref); i++)
947 if (ref->referred->symbol.definition)
948 enqueue_node (ref->referred);
949 cgraph_process_new_functions ();
950 }
951 }
952
953 /* Collect entry points to the unit. */
954 if (cgraph_dump_file)
955 {
956 fprintf (cgraph_dump_file, "\n\nInitial ");
957 dump_symtab (cgraph_dump_file);
958 }
959
960 if (cgraph_dump_file)
961 fprintf (cgraph_dump_file, "\nRemoving unused symbols:");
962
963 for (node = symtab_nodes;
964 node != (symtab_node)first_handled
965 && node != (symtab_node)first_handled_var; node = next)
966 {
967 next = node->symbol.next;
968 if (!node->symbol.aux && !referred_to_p (node))
969 {
970 if (cgraph_dump_file)
971 fprintf (cgraph_dump_file, " %s", symtab_node_name (node));
972 symtab_remove_node (node);
973 continue;
974 }
975 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
976 {
977 tree decl = node->symbol.decl;
978
979 if (cnode->symbol.definition && !gimple_has_body_p (decl)
980 && !cnode->symbol.alias
981 && !cnode->thunk.thunk_p)
982 cgraph_reset_node (cnode);
983
984 gcc_assert (!cnode->symbol.definition || cnode->thunk.thunk_p
985 || cnode->symbol.alias
986 || gimple_has_body_p (decl));
987 gcc_assert (cnode->symbol.analyzed == cnode->symbol.definition);
988 }
989 node->symbol.aux = NULL;
990 }
991 first_analyzed = cgraph_first_function ();
992 first_analyzed_var = varpool_first_variable ();
993 if (cgraph_dump_file)
994 {
995 fprintf (cgraph_dump_file, "\n\nReclaimed ");
996 dump_symtab (cgraph_dump_file);
997 }
998 bitmap_obstack_release (NULL);
999 ggc_collect ();
1000}
1001
1002/* Translate the ugly representation of aliases as alias pairs into nice
1003 representation in callgraph. We don't handle all cases yet,
1004 unforutnately. */
1005
1006static void
1007handle_alias_pairs (void)
1008{
1009 alias_pair *p;
1010 unsigned i;
1011
1012 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1013 {
1014 symtab_node target_node = symtab_node_for_asm (p->target);
1015
1016 /* Weakrefs with target not defined in current unit are easy to handle; they
1017 behave just as external variables except we need to note the alias flag
1018 to later output the weakref pseudo op into asm file. */
1019 if (!target_node && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1020 {
1021 symtab_node node = symtab_get_node (p->decl);
1022 if (node)
1023 {
1024 node->symbol.alias_target = p->target;
1025 node->symbol.weakref = true;
1026 node->symbol.alias = true;
1027 }
1028 alias_pairs->unordered_remove (i);
1029 continue;
1030 }
1031 else if (!target_node)
1032 {
1033 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1034 alias_pairs->unordered_remove (i);
1035 continue;
1036 }
1037
1038 if (DECL_EXTERNAL (target_node->symbol.decl)
1039 /* We use local aliases for C++ thunks to force the tailcall
1040 to bind locally. This is a hack - to keep it working do
1041 the following (which is not strictly correct). */
1042 && (! TREE_CODE (target_node->symbol.decl) == FUNCTION_DECL
1043 || ! DECL_VIRTUAL_P (target_node->symbol.decl))
1044 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1045 {
1046 error ("%q+D aliased to external symbol %qE",
1047 p->decl, p->target);
1048 }
1049
1050 if (TREE_CODE (p->decl) == FUNCTION_DECL
1051 && target_node && is_a <cgraph_node> (target_node))
1052 {
1053 struct cgraph_node *src_node = cgraph_get_node (p->decl);
1054 if (src_node && src_node->symbol.definition)
1055 cgraph_reset_node (src_node);
1056 cgraph_create_function_alias (p->decl, target_node->symbol.decl);
1057 alias_pairs->unordered_remove (i);
1058 }
1059 else if (TREE_CODE (p->decl) == VAR_DECL
1060 && target_node && is_a <varpool_node> (target_node))
1061 {
1062 varpool_create_variable_alias (p->decl, target_node->symbol.decl);
1063 alias_pairs->unordered_remove (i);
1064 }
1065 else
1066 {
1067 error ("%q+D alias in between function and variable is not supported",
1068 p->decl);
1069 warning (0, "%q+D aliased declaration",
1070 target_node->symbol.decl);
1071 alias_pairs->unordered_remove (i);
1072 }
1073 }
1074 vec_free (alias_pairs);
1075}
1076
1077
1078/* Figure out what functions we want to assemble. */
1079
1080static void
1081mark_functions_to_output (void)
1082{
1083 struct cgraph_node *node;
1084#ifdef ENABLE_CHECKING
1085 bool check_same_comdat_groups = false;
1086
1087 FOR_EACH_FUNCTION (node)
1088 gcc_assert (!node->process);
1089#endif
1090
1091 FOR_EACH_FUNCTION (node)
1092 {
1093 tree decl = node->symbol.decl;
1094
1095 gcc_assert (!node->process || node->symbol.same_comdat_group);
1096 if (node->process)
1097 continue;
1098
1099 /* We need to output all local functions that are used and not
1100 always inlined, as well as those that are reachable from
1101 outside the current compilation unit. */
1102 if (node->symbol.analyzed
1103 && !node->thunk.thunk_p
1104 && !node->symbol.alias
1105 && !node->global.inlined_to
1106 && !TREE_ASM_WRITTEN (decl)
1107 && !DECL_EXTERNAL (decl))
1108 {
1109 node->process = 1;
1110 if (node->symbol.same_comdat_group)
1111 {
1112 struct cgraph_node *next;
1113 for (next = cgraph (node->symbol.same_comdat_group);
1114 next != node;
1115 next = cgraph (next->symbol.same_comdat_group))
1116 if (!next->thunk.thunk_p && !next->symbol.alias)
1117 next->process = 1;
1118 }
1119 }
1120 else if (node->symbol.same_comdat_group)
1121 {
1122#ifdef ENABLE_CHECKING
1123 check_same_comdat_groups = true;
1124#endif
1125 }
1126 else
1127 {
1128 /* We should've reclaimed all functions that are not needed. */
1129#ifdef ENABLE_CHECKING
1130 if (!node->global.inlined_to
1131 && gimple_has_body_p (decl)
1132 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1133 are inside partition, we can end up not removing the body since we no longer
1134 have analyzed node pointing to it. */
1135 && !node->symbol.in_other_partition
1136 && !node->symbol.alias
1137 && !node->clones
1138 && !DECL_EXTERNAL (decl))
1139 {
1140 dump_cgraph_node (stderr, node);
1141 internal_error ("failed to reclaim unneeded function");
1142 }
1143#endif
1144 gcc_assert (node->global.inlined_to
1145 || !gimple_has_body_p (decl)
1146 || node->symbol.in_other_partition
1147 || node->clones
1148 || DECL_ARTIFICIAL (decl)
1149 || DECL_EXTERNAL (decl));
1150
1151 }
1152
1153 }
1154#ifdef ENABLE_CHECKING
1155 if (check_same_comdat_groups)
1156 FOR_EACH_FUNCTION (node)
1157 if (node->symbol.same_comdat_group && !node->process)
1158 {
1159 tree decl = node->symbol.decl;
1160 if (!node->global.inlined_to
1161 && gimple_has_body_p (decl)
1162 /* FIXME: in an ltrans unit when the offline copy is outside a
1163 partition but inline copies are inside a partition, we can
1164 end up not removing the body since we no longer have an
1165 analyzed node pointing to it. */
1166 && !node->symbol.in_other_partition
1167 && !node->clones
1168 && !DECL_EXTERNAL (decl))
1169 {
1170 dump_cgraph_node (stderr, node);
1171 internal_error ("failed to reclaim unneeded function in same "
1172 "comdat group");
1173 }
1174 }
1175#endif
1176}
1177
1178/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1179 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1180
1181 Set current_function_decl and cfun to newly constructed empty function body.
1182 return basic block in the function body. */
1183
1184basic_block
1185init_lowered_empty_function (tree decl, bool in_ssa)
1186{
1187 basic_block bb;
1188
1189 current_function_decl = decl;
1190 allocate_struct_function (decl, false);
1191 gimple_register_cfg_hooks ();
1192 init_empty_tree_cfg ();
1193
1194 if (in_ssa)
1195 {
1196 init_tree_ssa (cfun);
1197 init_ssa_operands (cfun);
1198 cfun->gimple_df->in_ssa_p = true;
1199 cfun->curr_properties |= PROP_ssa;
1200 }
1201
1202 DECL_INITIAL (decl) = make_node (BLOCK);
1203
1204 DECL_SAVED_TREE (decl) = error_mark_node;
1205 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1206 | PROP_cfg | PROP_loops);
1207
1208 set_loops_for_fn (cfun, ggc_alloc_cleared_loops ());
1209 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1210 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1211
1212 /* Create BB for body of the function and connect it properly. */
1213 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1214 make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1215 make_edge (bb, EXIT_BLOCK_PTR, 0);
1216 add_bb_to_loop (bb, ENTRY_BLOCK_PTR->loop_father);
1217
1218 return bb;
1219}
1220
1221/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1222 offset indicated by VIRTUAL_OFFSET, if that is
1223 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1224 zero for a result adjusting thunk. */
1225
1226static tree
1227thunk_adjust (gimple_stmt_iterator * bsi,
1228 tree ptr, bool this_adjusting,
1229 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1230{
1231 gimple stmt;
1232 tree ret;
1233
1234 if (this_adjusting
1235 && fixed_offset != 0)
1236 {
1237 stmt = gimple_build_assign
1238 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1239 ptr,
1240 fixed_offset));
1241 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1242 }
1243
1244 /* If there's a virtual offset, look up that value in the vtable and
1245 adjust the pointer again. */
1246 if (virtual_offset)
1247 {
1248 tree vtabletmp;
1249 tree vtabletmp2;
1250 tree vtabletmp3;
1251
1252 if (!vtable_entry_type)
1253 {
1254 tree vfunc_type = make_node (FUNCTION_TYPE);
1255 TREE_TYPE (vfunc_type) = integer_type_node;
1256 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1257 layout_type (vfunc_type);
1258
1259 vtable_entry_type = build_pointer_type (vfunc_type);
1260 }
1261
1262 vtabletmp =
1263 create_tmp_reg (build_pointer_type
1264 (build_pointer_type (vtable_entry_type)), "vptr");
1265
1266 /* The vptr is always at offset zero in the object. */
1267 stmt = gimple_build_assign (vtabletmp,
1268 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1269 ptr));
1270 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1271
1272 /* Form the vtable address. */
1273 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1274 "vtableaddr");
1275 stmt = gimple_build_assign (vtabletmp2,
1276 build_simple_mem_ref (vtabletmp));
1277 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1278
1279 /* Find the entry with the vcall offset. */
1280 stmt = gimple_build_assign (vtabletmp2,
1281 fold_build_pointer_plus_loc (input_location,
1282 vtabletmp2,
1283 virtual_offset));
1284 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1285
1286 /* Get the offset itself. */
1287 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1288 "vcalloffset");
1289 stmt = gimple_build_assign (vtabletmp3,
1290 build_simple_mem_ref (vtabletmp2));
1291 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1292
1293 /* Adjust the `this' pointer. */
1294 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1295 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1296 GSI_CONTINUE_LINKING);
1297 }
1298
1299 if (!this_adjusting
1300 && fixed_offset != 0)
1301 /* Adjust the pointer by the constant. */
1302 {
1303 tree ptrtmp;
1304
1305 if (TREE_CODE (ptr) == VAR_DECL)
1306 ptrtmp = ptr;
1307 else
1308 {
1309 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1310 stmt = gimple_build_assign (ptrtmp, ptr);
1311 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1312 }
1313 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1314 ptrtmp, fixed_offset);
1315 }
1316
1317 /* Emit the statement and gimplify the adjustment expression. */
1318 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1319 stmt = gimple_build_assign (ret, ptr);
1320 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1321
1322 return ret;
1323}
1324
1325/* Produce assembler for thunk NODE. */
1326
1327static void
1328assemble_thunk (struct cgraph_node *node)
1329{
1330 bool this_adjusting = node->thunk.this_adjusting;
1331 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1332 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1333 tree virtual_offset = NULL;
1334 tree alias = node->callees->callee->symbol.decl;
1335 tree thunk_fndecl = node->symbol.decl;
1336 tree a = DECL_ARGUMENTS (thunk_fndecl);
1337
1338 current_function_decl = thunk_fndecl;
1339
1340 /* Ensure thunks are emitted in their correct sections. */
1341 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1342
1343 if (this_adjusting
1344 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1345 virtual_value, alias))
1346 {
1347 const char *fnname;
1348 tree fn_block;
1349 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1350
1351 DECL_RESULT (thunk_fndecl)
1352 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1353 RESULT_DECL, 0, restype);
1354 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1355
1356 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1357 create one. */
1358 fn_block = make_node (BLOCK);
1359 BLOCK_VARS (fn_block) = a;
1360 DECL_INITIAL (thunk_fndecl) = fn_block;
1361 init_function_start (thunk_fndecl);
1362 cfun->is_thunk = 1;
1363 insn_locations_init ();
1364 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1365 prologue_location = curr_insn_location ();
1366 assemble_start_function (thunk_fndecl, fnname);
1367
1368 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1369 fixed_offset, virtual_value, alias);
1370
1371 assemble_end_function (thunk_fndecl, fnname);
1372 insn_locations_finalize ();
1373 init_insn_lengths ();
1374 free_after_compilation (cfun);
1375 set_cfun (NULL);
1376 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1377 node->thunk.thunk_p = false;
1378 node->symbol.analyzed = false;
1379 }
1380 else
1381 {
1382 tree restype;
1383 basic_block bb, then_bb, else_bb, return_bb;
1384 gimple_stmt_iterator bsi;
1385 int nargs = 0;
1386 tree arg;
1387 int i;
1388 tree resdecl;
1389 tree restmp = NULL;
1390 vec<tree> vargs;
1391
1392 gimple call;
1393 gimple ret;
1394
1395 DECL_IGNORED_P (thunk_fndecl) = 1;
1396 bitmap_obstack_initialize (NULL);
1397
1398 if (node->thunk.virtual_offset_p)
1399 virtual_offset = size_int (virtual_value);
1400
1401 /* Build the return declaration for the function. */
1402 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1403 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1404 {
1405 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1406 DECL_ARTIFICIAL (resdecl) = 1;
1407 DECL_IGNORED_P (resdecl) = 1;
1408 DECL_RESULT (thunk_fndecl) = resdecl;
1409 }
1410 else
1411 resdecl = DECL_RESULT (thunk_fndecl);
1412
1413 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl, true);
1414
1415 bsi = gsi_start_bb (bb);
1416
1417 /* Build call to the function being thunked. */
1418 if (!VOID_TYPE_P (restype))
1419 {
1420 if (!is_gimple_reg_type (restype))
1421 {
1422 restmp = resdecl;
1423 add_local_decl (cfun, restmp);
1424 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1425 }
1426 else
1427 restmp = create_tmp_reg (restype, "retval");
1428 }
1429
1430 for (arg = a; arg; arg = DECL_CHAIN (arg))
1431 nargs++;
1432 vargs.create (nargs);
1433 if (this_adjusting)
1434 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1435 virtual_offset));
1436 else
1437 vargs.quick_push (a);
1438 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1439 vargs.quick_push (arg);
1440 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1441 vargs.release ();
1442 gimple_call_set_from_thunk (call, true);
1443 if (restmp)
1444 gimple_call_set_lhs (call, restmp);
1445 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1446
1447 if (restmp && !this_adjusting)
1448 {
1449 tree true_label = NULL_TREE;
1450
1451 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1452 {
1453 gimple stmt;
1454 /* If the return type is a pointer, we need to
1455 protect against NULL. We know there will be an
1456 adjustment, because that's why we're emitting a
1457 thunk. */
1458 then_bb = create_basic_block (NULL, (void *) 0, bb);
1459 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1460 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1461 add_bb_to_loop (then_bb, bb->loop_father);
1462 add_bb_to_loop (return_bb, bb->loop_father);
1463 add_bb_to_loop (else_bb, bb->loop_father);
1464 remove_edge (single_succ_edge (bb));
1465 true_label = gimple_block_label (then_bb);
1466 stmt = gimple_build_cond (NE_EXPR, restmp,
1467 build_zero_cst (TREE_TYPE (restmp)),
1468 NULL_TREE, NULL_TREE);
1469 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1470 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1471 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1472 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1473 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1474 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1475 bsi = gsi_last_bb (then_bb);
1476 }
1477
1478 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1479 fixed_offset, virtual_offset);
1480 if (true_label)
1481 {
1482 gimple stmt;
1483 bsi = gsi_last_bb (else_bb);
1484 stmt = gimple_build_assign (restmp,
1485 build_zero_cst (TREE_TYPE (restmp)));
1486 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1487 bsi = gsi_last_bb (return_bb);
1488 }
1489 }
1490 else
1491 gimple_call_set_tail (call, true);
1492
1493 /* Build return value. */
1494 ret = gimple_build_return (restmp);
1495 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1496
1497 delete_unreachable_blocks ();
1498 update_ssa (TODO_update_ssa);
1499
1500 /* Since we want to emit the thunk, we explicitly mark its name as
1501 referenced. */
1502 node->thunk.thunk_p = false;
1503 cgraph_node_remove_callees (node);
1504 cgraph_add_new_function (thunk_fndecl, true);
1505 bitmap_obstack_release (NULL);
1506 }
1507 current_function_decl = NULL;
1508 set_cfun (NULL);
1509}
1510
1511
1512
1513/* Assemble thunks and aliases associated to NODE. */
1514
1515static void
1516assemble_thunks_and_aliases (struct cgraph_node *node)
1517{
1518 struct cgraph_edge *e;
1519 int i;
1520 struct ipa_ref *ref;
1521
1522 for (e = node->callers; e;)
1523 if (e->caller->thunk.thunk_p)
1524 {
1525 struct cgraph_node *thunk = e->caller;
1526
1527 e = e->next_caller;
1528 assemble_thunks_and_aliases (thunk);
1529 assemble_thunk (thunk);
1530 }
1531 else
1532 e = e->next_caller;
1533 for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list,
1534 i, ref); i++)
1535 if (ref->use == IPA_REF_ALIAS)
1536 {
1537 struct cgraph_node *alias = ipa_ref_referring_node (ref);
1538 bool saved_written = TREE_ASM_WRITTEN (node->symbol.decl);
1539
1540 /* Force assemble_alias to really output the alias this time instead
1541 of buffering it in same alias pairs. */
1542 TREE_ASM_WRITTEN (node->symbol.decl) = 1;
1543 do_assemble_alias (alias->symbol.decl,
1544 DECL_ASSEMBLER_NAME (node->symbol.decl));
1545 assemble_thunks_and_aliases (alias);
1546 TREE_ASM_WRITTEN (node->symbol.decl) = saved_written;
1547 }
1548}
1549
1550/* Expand function specified by NODE. */
1551
1552static void
1553expand_function (struct cgraph_node *node)
1554{
1555 tree decl = node->symbol.decl;
1556 location_t saved_loc;
1557
1558 /* We ought to not compile any inline clones. */
1559 gcc_assert (!node->global.inlined_to);
1560
1561 announce_function (decl);
1562 node->process = 0;
1563 gcc_assert (node->lowered);
1564
1565 /* Generate RTL for the body of DECL. */
1566
1567 timevar_push (TV_REST_OF_COMPILATION);
1568
1569 gcc_assert (cgraph_global_info_ready);
1570
1571 /* Initialize the default bitmap obstack. */
1572 bitmap_obstack_initialize (NULL);
1573
1574 /* Initialize the RTL code for the function. */
1575 current_function_decl = decl;
1576 saved_loc = input_location;
1577 input_location = DECL_SOURCE_LOCATION (decl);
1578 init_function_start (decl);
1579
1580 gimple_register_cfg_hooks ();
1581
1582 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1583
1584 execute_all_ipa_transforms ();
1585
1586 /* Perform all tree transforms and optimizations. */
1587
1588 /* Signal the start of passes. */
1589 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1590
1591 execute_pass_list (all_passes);
1592
1593 /* Signal the end of passes. */
1594 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1595
1596 bitmap_obstack_release (&reg_obstack);
1597
1598 /* Release the default bitmap obstack. */
1599 bitmap_obstack_release (NULL);
1600
1601 /* If requested, warn about function definitions where the function will
1602 return a value (usually of some struct or union type) which itself will
1603 take up a lot of stack space. */
1604 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1605 {
1606 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1607
1608 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1609 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1610 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1611 larger_than_size))
1612 {
1613 unsigned int size_as_int
1614 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1615
1616 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1617 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1618 decl, size_as_int);
1619 else
1620 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1621 decl, larger_than_size);
1622 }
1623 }
1624
1625 gimple_set_body (decl, NULL);
1626 if (DECL_STRUCT_FUNCTION (decl) == 0
1627 && !cgraph_get_node (decl)->origin)
1628 {
1629 /* Stop pointing to the local nodes about to be freed.
1630 But DECL_INITIAL must remain nonzero so we know this
1631 was an actual function definition.
1632 For a nested function, this is done in c_pop_function_context.
1633 If rest_of_compilation set this to 0, leave it 0. */
1634 if (DECL_INITIAL (decl) != 0)
1635 DECL_INITIAL (decl) = error_mark_node;
1636 }
1637
1638 input_location = saved_loc;
1639
1640 ggc_collect ();
1641 timevar_pop (TV_REST_OF_COMPILATION);
1642
1643 /* Make sure that BE didn't give up on compiling. */
1644 gcc_assert (TREE_ASM_WRITTEN (decl));
1645 set_cfun (NULL);
1646 current_function_decl = NULL;
1647
1648 /* It would make a lot more sense to output thunks before function body to get more
1649 forward and lest backwarding jumps. This however would need solving problem
1650 with comdats. See PR48668. Also aliases must come after function itself to
1651 make one pass assemblers, like one on AIX, happy. See PR 50689.
1652 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1653 groups. */
1654 assemble_thunks_and_aliases (node);
1655 cgraph_release_function_body (node);
1656 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1657 points to the dead function body. */
1658 cgraph_node_remove_callees (node);
1659}
1660
1661
1662/* Expand all functions that must be output.
1663
1664 Attempt to topologically sort the nodes so function is output when
1665 all called functions are already assembled to allow data to be
1666 propagated across the callgraph. Use a stack to get smaller distance
1667 between a function and its callees (later we may choose to use a more
1668 sophisticated algorithm for function reordering; we will likely want
1669 to use subsections to make the output functions appear in top-down
1670 order). */
1671
1672static void
1673expand_all_functions (void)
1674{
1675 struct cgraph_node *node;
1676 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1677 int order_pos, new_order_pos = 0;
1678 int i;
1679
1680 order_pos = ipa_reverse_postorder (order);
1681 gcc_assert (order_pos == cgraph_n_nodes);
1682
1683 /* Garbage collector may remove inline clones we eliminate during
1684 optimization. So we must be sure to not reference them. */
1685 for (i = 0; i < order_pos; i++)
1686 if (order[i]->process)
1687 order[new_order_pos++] = order[i];
1688
1689 for (i = new_order_pos - 1; i >= 0; i--)
1690 {
1691 node = order[i];
1692 if (node->process)
1693 {
1694 node->process = 0;
1695 expand_function (node);
1696 }
1697 }
1698 cgraph_process_new_functions ();
1699
1700 free (order);
1701
1702}
1703
1704/* This is used to sort the node types by the cgraph order number. */
1705
1706enum cgraph_order_sort_kind
1707{
1708 ORDER_UNDEFINED = 0,
1709 ORDER_FUNCTION,
1710 ORDER_VAR,
1711 ORDER_ASM
1712};
1713
1714struct cgraph_order_sort
1715{
1716 enum cgraph_order_sort_kind kind;
1717 union
1718 {
1719 struct cgraph_node *f;
1720 struct varpool_node *v;
1721 struct asm_node *a;
1722 } u;
1723};
1724
1725/* Output all functions, variables, and asm statements in the order
1726 according to their order fields, which is the order in which they
1727 appeared in the file. This implements -fno-toplevel-reorder. In
1728 this mode we may output functions and variables which don't really
1729 need to be output. */
1730
1731static void
1732output_in_order (void)
1733{
1734 int max;
1735 struct cgraph_order_sort *nodes;
1736 int i;
1737 struct cgraph_node *pf;
1738 struct varpool_node *pv;
1739 struct asm_node *pa;
1740
1741 max = symtab_order;
1742 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1743
1744 FOR_EACH_DEFINED_FUNCTION (pf)
1745 {
1746 if (pf->process && !pf->thunk.thunk_p && !pf->symbol.alias)
1747 {
1748 i = pf->symbol.order;
1749 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1750 nodes[i].kind = ORDER_FUNCTION;
1751 nodes[i].u.f = pf;
1752 }
1753 }
1754
1755 FOR_EACH_DEFINED_VARIABLE (pv)
1756 if (!DECL_EXTERNAL (pv->symbol.decl))
1757 {
1758 i = pv->symbol.order;
1759 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1760 nodes[i].kind = ORDER_VAR;
1761 nodes[i].u.v = pv;
1762 }
1763
1764 for (pa = asm_nodes; pa; pa = pa->next)
1765 {
1766 i = pa->order;
1767 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1768 nodes[i].kind = ORDER_ASM;
1769 nodes[i].u.a = pa;
1770 }
1771
1772 /* In toplevel reorder mode we output all statics; mark them as needed. */
1773
1774 for (i = 0; i < max; ++i)
1775 if (nodes[i].kind == ORDER_VAR)
1776 varpool_finalize_named_section_flags (nodes[i].u.v);
1777
1778 for (i = 0; i < max; ++i)
1779 {
1780 switch (nodes[i].kind)
1781 {
1782 case ORDER_FUNCTION:
1783 nodes[i].u.f->process = 0;
1784 expand_function (nodes[i].u.f);
1785 break;
1786
1787 case ORDER_VAR:
1788 varpool_assemble_decl (nodes[i].u.v);
1789 break;
1790
1791 case ORDER_ASM:
1792 assemble_asm (nodes[i].u.a->asm_str);
1793 break;
1794
1795 case ORDER_UNDEFINED:
1796 break;
1797
1798 default:
1799 gcc_unreachable ();
1800 }
1801 }
1802
1803 asm_nodes = NULL;
1804 free (nodes);
1805}
1806
1807static void
1808ipa_passes (void)
1809{
1810 set_cfun (NULL);
1811 current_function_decl = NULL;
1812 gimple_register_cfg_hooks ();
1813 bitmap_obstack_initialize (NULL);
1814
1815 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1816
1817 if (!in_lto_p)
1818 {
1819 execute_ipa_pass_list (all_small_ipa_passes);
1820 if (seen_error ())
1821 return;
1822 }
1823
1824 /* We never run removal of unreachable nodes after early passes. This is
1825 because TODO is run before the subpasses. It is important to remove
1826 the unreachable functions to save works at IPA level and to get LTO
1827 symbol tables right. */
1828 symtab_remove_unreachable_nodes (true, cgraph_dump_file);
1829
1830 /* If pass_all_early_optimizations was not scheduled, the state of
1831 the cgraph will not be properly updated. Update it now. */
1832 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1833 cgraph_state = CGRAPH_STATE_IPA_SSA;
1834
1835 if (!in_lto_p)
1836 {
1837 /* Generate coverage variables and constructors. */
1838 coverage_finish ();
1839
1840 /* Process new functions added. */
1841 set_cfun (NULL);
1842 current_function_decl = NULL;
1843 cgraph_process_new_functions ();
1844
1845 execute_ipa_summary_passes
1846 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1847 }
1848
1849 /* Some targets need to handle LTO assembler output specially. */
1850 if (flag_generate_lto)
1851 targetm.asm_out.lto_start ();
1852
1853 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1854
1855 if (!in_lto_p)
1856 ipa_write_summaries ();
1857
1858 if (flag_generate_lto)
1859 targetm.asm_out.lto_end ();
1860
1861 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
1862 execute_ipa_pass_list (all_regular_ipa_passes);
1863 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
1864
1865 bitmap_obstack_release (NULL);
1866}
1867
1868
1869/* Return string alias is alias of. */
1870
1871static tree
1872get_alias_symbol (tree decl)
1873{
1874 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1875 return get_identifier (TREE_STRING_POINTER
1876 (TREE_VALUE (TREE_VALUE (alias))));
1877}
1878
1879
1880/* Weakrefs may be associated to external decls and thus not output
1881 at expansion time. Emit all necessary aliases. */
1882
1883static void
1884output_weakrefs (void)
1885{
1886 symtab_node node;
1887 FOR_EACH_SYMBOL (node)
1888 if (node->symbol.alias
1889 && !TREE_ASM_WRITTEN (node->symbol.decl)
1890 && node->symbol.weakref)
1891 {
1892 tree target;
1893
1894 /* Weakrefs are special by not requiring target definition in current
1895 compilation unit. It is thus bit hard to work out what we want to
1896 alias.
1897 When alias target is defined, we need to fetch it from symtab reference,
1898 otherwise it is pointed to by alias_target. */
1899 if (node->symbol.alias_target)
1900 target = (DECL_P (node->symbol.alias_target)
1901 ? DECL_ASSEMBLER_NAME (node->symbol.alias_target)
1902 : node->symbol.alias_target);
1903 else if (node->symbol.analyzed)
1904 target = DECL_ASSEMBLER_NAME (symtab_alias_target (node)->symbol.decl);
1905 else
1906 {
1907 gcc_unreachable ();
1908 target = get_alias_symbol (node->symbol.decl);
1909 }
1910 do_assemble_alias (node->symbol.decl, target);
1911 }
1912}
1913
1914/* Initialize callgraph dump file. */
1915
1916void
1917init_cgraph (void)
1918{
1919 if (!cgraph_dump_file)
1920 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1921}
1922
1923
1924/* Perform simple optimizations based on callgraph. */
1925
1926void
1927compile (void)
1928{
1929 if (seen_error ())
1930 return;
1931
1932#ifdef ENABLE_CHECKING
1933 verify_symtab ();
1934#endif
1935
1936 timevar_push (TV_CGRAPHOPT);
1937 if (pre_ipa_mem_report)
1938 {
1939 fprintf (stderr, "Memory consumption before IPA\n");
1940 dump_memory_report (false);
1941 }
1942 if (!quiet_flag)
1943 fprintf (stderr, "Performing interprocedural optimizations\n");
1944 cgraph_state = CGRAPH_STATE_IPA;
1945
1946 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
1947 if (flag_lto)
1948 lto_streamer_hooks_init ();
1949
1950 /* Don't run the IPA passes if there was any error or sorry messages. */
1951 if (!seen_error ())
1952 ipa_passes ();
1953
1954 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
1955 if (seen_error ()
1956 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
1957 {
1958 timevar_pop (TV_CGRAPHOPT);
1959 return;
1960 }
1961
1962 /* This pass remove bodies of extern inline functions we never inlined.
1963 Do this later so other IPA passes see what is really going on. */
1964 symtab_remove_unreachable_nodes (false, dump_file);
1965 cgraph_global_info_ready = true;
1966 if (cgraph_dump_file)
1967 {
1968 fprintf (cgraph_dump_file, "Optimized ");
1969 dump_symtab (cgraph_dump_file);
1970 }
1971 if (post_ipa_mem_report)
1972 {
1973 fprintf (stderr, "Memory consumption after IPA\n");
1974 dump_memory_report (false);
1975 }
1976 timevar_pop (TV_CGRAPHOPT);
1977
1978 /* Output everything. */
1979 (*debug_hooks->assembly_start) ();
1980 if (!quiet_flag)
1981 fprintf (stderr, "Assembling functions:\n");
1982#ifdef ENABLE_CHECKING
1983 verify_symtab ();
1984#endif
1985
1986 cgraph_materialize_all_clones ();
1987 bitmap_obstack_initialize (NULL);
1988 execute_ipa_pass_list (all_late_ipa_passes);
1989 symtab_remove_unreachable_nodes (true, dump_file);
1990#ifdef ENABLE_CHECKING
1991 verify_symtab ();
1992#endif
1993 bitmap_obstack_release (NULL);
1994 mark_functions_to_output ();
1995
1996 /* When weakref support is missing, we autmatically translate all
1997 references to NODE to references to its ultimate alias target.
1998 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
1999 TREE_CHAIN.
2000
2001 Set up this mapping before we output any assembler but once we are sure
2002 that all symbol renaming is done.
2003
2004 FIXME: All this uglyness can go away if we just do renaming at gimple
2005 level by physically rewritting the IL. At the moment we can only redirect
2006 calls, so we need infrastructure for renaming references as well. */
2007#ifndef ASM_OUTPUT_WEAKREF
2008 symtab_node node;
2009
2010 FOR_EACH_SYMBOL (node)
2011 if (node->symbol.alias
2012 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->symbol.decl)))
2013 {
2014 IDENTIFIER_TRANSPARENT_ALIAS
2015 (DECL_ASSEMBLER_NAME (node->symbol.decl)) = 1;
2016 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->symbol.decl))
2017 = (node->symbol.alias_target ? node->symbol.alias_target
2018 : DECL_ASSEMBLER_NAME (symtab_alias_target (node)->symbol.decl));
2019 }
2020#endif
2021
2022 cgraph_state = CGRAPH_STATE_EXPANSION;
2023 if (!flag_toplevel_reorder)
2024 output_in_order ();
2025 else
2026 {
2027 output_asm_statements ();
2028
2029 expand_all_functions ();
2030 varpool_output_variables ();
2031 }
2032
2033 cgraph_process_new_functions ();
2034 cgraph_state = CGRAPH_STATE_FINISHED;
2035 output_weakrefs ();
2036
2037 if (cgraph_dump_file)
2038 {
2039 fprintf (cgraph_dump_file, "\nFinal ");
2040 dump_symtab (cgraph_dump_file);
2041 }
2042#ifdef ENABLE_CHECKING
2043 verify_symtab ();
2044 /* Double check that all inline clones are gone and that all
2045 function bodies have been released from memory. */
2046 if (!seen_error ())
2047 {
2048 struct cgraph_node *node;
2049 bool error_found = false;
2050
2051 FOR_EACH_DEFINED_FUNCTION (node)
2052 if (node->global.inlined_to
2053 || gimple_has_body_p (node->symbol.decl))
2054 {
2055 error_found = true;
2056 dump_cgraph_node (stderr, node);
2057 }
2058 if (error_found)
2059 internal_error ("nodes with unreleased memory found");
2060 }
2061#endif
2062}
2063
2064
2065/* Analyze the whole compilation unit once it is parsed completely. */
2066
2067void
2068finalize_compilation_unit (void)
2069{
2070 timevar_push (TV_CGRAPH);
2071
2072 /* If we're here there's no current function anymore. Some frontends
2073 are lazy in clearing these. */
2074 current_function_decl = NULL;
2075 set_cfun (NULL);
2076
2077 /* Do not skip analyzing the functions if there were errors, we
2078 miss diagnostics for following functions otherwise. */
2079
2080 /* Emit size functions we didn't inline. */
2081 finalize_size_functions ();
2082
2083 /* Mark alias targets necessary and emit diagnostics. */
2084 handle_alias_pairs ();
2085
2086 if (!quiet_flag)
2087 {
2088 fprintf (stderr, "\nAnalyzing compilation unit\n");
2089 fflush (stderr);
2090 }
2091
2092 if (flag_dump_passes)
2093 dump_passes ();
2094
2095 /* Gimplify and lower all functions, compute reachability and
2096 remove unreachable nodes. */
2097 analyze_functions ();
2098
2099 /* Mark alias targets necessary and emit diagnostics. */
2100 handle_alias_pairs ();
2101
2102 /* Gimplify and lower thunks. */
2103 analyze_functions ();
2104
2105 /* Finally drive the pass manager. */
2106 compile ();
2107
2108 timevar_pop (TV_CGRAPH);
2109}
2110
2111
2112#include "gt-cgraphunit.h"