]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraphunit.c
2013-08-23 Kaz Kojima <kkojima@gcc.gnu.org>
[thirdparty/gcc.git] / gcc / cgraphunit.c
1 /* Driver of optimization process
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - cgraph_finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158 */
159
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "tm.h"
164 #include "tree.h"
165 #include "output.h"
166 #include "rtl.h"
167 #include "tree-flow.h"
168 #include "tree-inline.h"
169 #include "langhooks.h"
170 #include "pointer-set.h"
171 #include "toplev.h"
172 #include "flags.h"
173 #include "ggc.h"
174 #include "debug.h"
175 #include "target.h"
176 #include "cgraph.h"
177 #include "diagnostic.h"
178 #include "params.h"
179 #include "fibheap.h"
180 #include "intl.h"
181 #include "function.h"
182 #include "ipa-prop.h"
183 #include "gimple.h"
184 #include "tree-iterator.h"
185 #include "tree-pass.h"
186 #include "tree-dump.h"
187 #include "gimple-pretty-print.h"
188 #include "output.h"
189 #include "coverage.h"
190 #include "plugin.h"
191 #include "ipa-inline.h"
192 #include "ipa-utils.h"
193 #include "lto-streamer.h"
194 #include "except.h"
195 #include "cfgloop.h"
196 #include "regset.h" /* FIXME: For reg_obstack. */
197 #include "context.h"
198 #include "pass_manager.h"
199
200 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
201 secondary queue used during optimization to accommodate passes that
202 may generate new functions that need to be optimized and expanded. */
203 cgraph_node_set cgraph_new_nodes;
204
205 static void expand_all_functions (void);
206 static void mark_functions_to_output (void);
207 static void expand_function (struct cgraph_node *);
208 static void analyze_function (struct cgraph_node *);
209 static void handle_alias_pairs (void);
210
211 FILE *cgraph_dump_file;
212
213 /* Linked list of cgraph asm nodes. */
214 struct asm_node *asm_nodes;
215
216 /* Last node in cgraph_asm_nodes. */
217 static GTY(()) struct asm_node *asm_last_node;
218
219 /* Used for vtable lookup in thunk adjusting. */
220 static GTY (()) tree vtable_entry_type;
221
222 /* Determine if symbol DECL is needed. That is, visible to something
223 either outside this translation unit, something magic in the system
224 configury */
225 bool
226 decide_is_symbol_needed (symtab_node node)
227 {
228 tree decl = node->symbol.decl;
229
230 /* Double check that no one output the function into assembly file
231 early. */
232 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
233 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
234
235 if (!node->symbol.definition)
236 return false;
237
238 if (DECL_EXTERNAL (decl))
239 return false;
240
241 /* If the user told us it is used, then it must be so. */
242 if (node->symbol.force_output)
243 return true;
244
245 /* ABI forced symbols are needed when they are external. */
246 if (node->symbol.forced_by_abi && TREE_PUBLIC (decl))
247 return true;
248
249 /* Keep constructors, destructors and virtual functions. */
250 if (TREE_CODE (decl) == FUNCTION_DECL
251 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
252 return true;
253
254 /* Externally visible variables must be output. The exception is
255 COMDAT variables that must be output only when they are needed. */
256 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
257 return true;
258
259 return false;
260 }
261
262 /* Head of the queue of nodes to be processed while building callgraph */
263
264 static symtab_node first = (symtab_node)(void *)1;
265
266 /* Add NODE to queue starting at FIRST.
267 The queue is linked via AUX pointers and terminated by pointer to 1. */
268
269 static void
270 enqueue_node (symtab_node node)
271 {
272 if (node->symbol.aux)
273 return;
274 gcc_checking_assert (first);
275 node->symbol.aux = first;
276 first = node;
277 }
278
279 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
280 functions into callgraph in a way so they look like ordinary reachable
281 functions inserted into callgraph already at construction time. */
282
283 bool
284 cgraph_process_new_functions (void)
285 {
286 bool output = false;
287 tree fndecl;
288 struct cgraph_node *node;
289 cgraph_node_set_iterator csi;
290
291 if (!cgraph_new_nodes)
292 return false;
293 handle_alias_pairs ();
294 /* Note that this queue may grow as its being processed, as the new
295 functions may generate new ones. */
296 for (csi = csi_start (cgraph_new_nodes); !csi_end_p (csi); csi_next (&csi))
297 {
298 node = csi_node (csi);
299 fndecl = node->symbol.decl;
300 switch (cgraph_state)
301 {
302 case CGRAPH_STATE_CONSTRUCTION:
303 /* At construction time we just need to finalize function and move
304 it into reachable functions list. */
305
306 cgraph_finalize_function (fndecl, false);
307 output = true;
308 cgraph_call_function_insertion_hooks (node);
309 enqueue_node ((symtab_node) node);
310 break;
311
312 case CGRAPH_STATE_IPA:
313 case CGRAPH_STATE_IPA_SSA:
314 /* When IPA optimization already started, do all essential
315 transformations that has been already performed on the whole
316 cgraph but not on this function. */
317
318 gimple_register_cfg_hooks ();
319 if (!node->symbol.analyzed)
320 analyze_function (node);
321 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
322 if (cgraph_state == CGRAPH_STATE_IPA_SSA
323 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
324 g->get_passes ()->execute_early_local_passes ();
325 else if (inline_summary_vec != NULL)
326 compute_inline_parameters (node, true);
327 free_dominance_info (CDI_POST_DOMINATORS);
328 free_dominance_info (CDI_DOMINATORS);
329 pop_cfun ();
330 cgraph_call_function_insertion_hooks (node);
331 break;
332
333 case CGRAPH_STATE_EXPANSION:
334 /* Functions created during expansion shall be compiled
335 directly. */
336 node->process = 0;
337 cgraph_call_function_insertion_hooks (node);
338 expand_function (node);
339 break;
340
341 default:
342 gcc_unreachable ();
343 break;
344 }
345 }
346 free_cgraph_node_set (cgraph_new_nodes);
347 cgraph_new_nodes = NULL;
348 return output;
349 }
350
351 /* As an GCC extension we allow redefinition of the function. The
352 semantics when both copies of bodies differ is not well defined.
353 We replace the old body with new body so in unit at a time mode
354 we always use new body, while in normal mode we may end up with
355 old body inlined into some functions and new body expanded and
356 inlined in others.
357
358 ??? It may make more sense to use one body for inlining and other
359 body for expanding the function but this is difficult to do. */
360
361 void
362 cgraph_reset_node (struct cgraph_node *node)
363 {
364 /* If node->process is set, then we have already begun whole-unit analysis.
365 This is *not* testing for whether we've already emitted the function.
366 That case can be sort-of legitimately seen with real function redefinition
367 errors. I would argue that the front end should never present us with
368 such a case, but don't enforce that for now. */
369 gcc_assert (!node->process);
370
371 /* Reset our data structures so we can analyze the function again. */
372 memset (&node->local, 0, sizeof (node->local));
373 memset (&node->global, 0, sizeof (node->global));
374 memset (&node->rtl, 0, sizeof (node->rtl));
375 node->symbol.analyzed = false;
376 node->symbol.definition = false;
377 node->symbol.alias = false;
378 node->symbol.weakref = false;
379 node->symbol.cpp_implicit_alias = false;
380
381 cgraph_node_remove_callees (node);
382 ipa_remove_all_references (&node->symbol.ref_list);
383 }
384
385 /* Return true when there are references to NODE. */
386
387 static bool
388 referred_to_p (symtab_node node)
389 {
390 struct ipa_ref *ref;
391
392 /* See if there are any references at all. */
393 if (ipa_ref_list_referring_iterate (&node->symbol.ref_list, 0, ref))
394 return true;
395 /* For functions check also calls. */
396 cgraph_node *cn = dyn_cast <cgraph_node> (node);
397 if (cn && cn->callers)
398 return true;
399 return false;
400 }
401
402 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
403 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
404 the garbage collector run at the moment. We would need to either create
405 a new GC context, or just not compile right now. */
406
407 void
408 cgraph_finalize_function (tree decl, bool no_collect)
409 {
410 struct cgraph_node *node = cgraph_get_create_node (decl);
411
412 if (node->symbol.definition)
413 {
414 /* Nested functions should only be defined once. */
415 gcc_assert (!DECL_CONTEXT (decl)
416 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
417 cgraph_reset_node (node);
418 node->local.redefined_extern_inline = true;
419 }
420
421 notice_global_symbol (decl);
422 node->symbol.definition = true;
423 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
424
425 /* With -fkeep-inline-functions we are keeping all inline functions except
426 for extern inline ones. */
427 if (flag_keep_inline_functions
428 && DECL_DECLARED_INLINE_P (decl)
429 && !DECL_EXTERNAL (decl)
430 && !DECL_DISREGARD_INLINE_LIMITS (decl))
431 node->symbol.force_output = 1;
432
433 /* When not optimizing, also output the static functions. (see
434 PR24561), but don't do so for always_inline functions, functions
435 declared inline and nested functions. These were optimized out
436 in the original implementation and it is unclear whether we want
437 to change the behavior here. */
438 if ((!optimize
439 && !node->symbol.cpp_implicit_alias
440 && !DECL_DISREGARD_INLINE_LIMITS (decl)
441 && !DECL_DECLARED_INLINE_P (decl)
442 && !(DECL_CONTEXT (decl)
443 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
444 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
445 node->symbol.force_output = 1;
446
447 /* If we've not yet emitted decl, tell the debug info about it. */
448 if (!TREE_ASM_WRITTEN (decl))
449 (*debug_hooks->deferred_inline_function) (decl);
450
451 /* Possibly warn about unused parameters. */
452 if (warn_unused_parameter)
453 do_warn_unused_parameter (decl);
454
455 if (!no_collect)
456 ggc_collect ();
457
458 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
459 && (decide_is_symbol_needed ((symtab_node) node)
460 || referred_to_p ((symtab_node)node)))
461 enqueue_node ((symtab_node)node);
462 }
463
464 /* Add the function FNDECL to the call graph.
465 Unlike cgraph_finalize_function, this function is intended to be used
466 by middle end and allows insertion of new function at arbitrary point
467 of compilation. The function can be either in high, low or SSA form
468 GIMPLE.
469
470 The function is assumed to be reachable and have address taken (so no
471 API breaking optimizations are performed on it).
472
473 Main work done by this function is to enqueue the function for later
474 processing to avoid need the passes to be re-entrant. */
475
476 void
477 cgraph_add_new_function (tree fndecl, bool lowered)
478 {
479 gcc::pass_manager *passes = g->get_passes ();
480 struct cgraph_node *node;
481 switch (cgraph_state)
482 {
483 case CGRAPH_STATE_PARSING:
484 cgraph_finalize_function (fndecl, false);
485 break;
486 case CGRAPH_STATE_CONSTRUCTION:
487 /* Just enqueue function to be processed at nearest occurrence. */
488 node = cgraph_create_node (fndecl);
489 if (lowered)
490 node->lowered = true;
491 if (!cgraph_new_nodes)
492 cgraph_new_nodes = cgraph_node_set_new ();
493 cgraph_node_set_add (cgraph_new_nodes, node);
494 break;
495
496 case CGRAPH_STATE_IPA:
497 case CGRAPH_STATE_IPA_SSA:
498 case CGRAPH_STATE_EXPANSION:
499 /* Bring the function into finalized state and enqueue for later
500 analyzing and compilation. */
501 node = cgraph_get_create_node (fndecl);
502 node->local.local = false;
503 node->symbol.definition = true;
504 node->symbol.force_output = true;
505 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
506 {
507 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
508 gimple_register_cfg_hooks ();
509 bitmap_obstack_initialize (NULL);
510 execute_pass_list (passes->all_lowering_passes);
511 passes->execute_early_local_passes ();
512 bitmap_obstack_release (NULL);
513 pop_cfun ();
514
515 lowered = true;
516 }
517 if (lowered)
518 node->lowered = true;
519 if (!cgraph_new_nodes)
520 cgraph_new_nodes = cgraph_node_set_new ();
521 cgraph_node_set_add (cgraph_new_nodes, node);
522 break;
523
524 case CGRAPH_STATE_FINISHED:
525 /* At the very end of compilation we have to do all the work up
526 to expansion. */
527 node = cgraph_create_node (fndecl);
528 if (lowered)
529 node->lowered = true;
530 node->symbol.definition = true;
531 analyze_function (node);
532 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
533 gimple_register_cfg_hooks ();
534 bitmap_obstack_initialize (NULL);
535 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
536 g->get_passes ()->execute_early_local_passes ();
537 bitmap_obstack_release (NULL);
538 pop_cfun ();
539 expand_function (node);
540 break;
541
542 default:
543 gcc_unreachable ();
544 }
545
546 /* Set a personality if required and we already passed EH lowering. */
547 if (lowered
548 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
549 == eh_personality_lang))
550 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
551 }
552
553 /* Add a top-level asm statement to the list. */
554
555 struct asm_node *
556 add_asm_node (tree asm_str)
557 {
558 struct asm_node *node;
559
560 node = ggc_alloc_cleared_asm_node ();
561 node->asm_str = asm_str;
562 node->order = symtab_order++;
563 node->next = NULL;
564 if (asm_nodes == NULL)
565 asm_nodes = node;
566 else
567 asm_last_node->next = node;
568 asm_last_node = node;
569 return node;
570 }
571
572 /* Output all asm statements we have stored up to be output. */
573
574 static void
575 output_asm_statements (void)
576 {
577 struct asm_node *can;
578
579 if (seen_error ())
580 return;
581
582 for (can = asm_nodes; can; can = can->next)
583 assemble_asm (can->asm_str);
584 asm_nodes = NULL;
585 }
586
587 /* Analyze the function scheduled to be output. */
588 static void
589 analyze_function (struct cgraph_node *node)
590 {
591 tree decl = node->symbol.decl;
592 location_t saved_loc = input_location;
593 input_location = DECL_SOURCE_LOCATION (decl);
594
595 if (node->symbol.alias)
596 symtab_resolve_alias
597 ((symtab_node) node, (symtab_node) cgraph_get_node (node->symbol.alias_target));
598 else if (node->thunk.thunk_p)
599 {
600 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
601 NULL, 0, CGRAPH_FREQ_BASE);
602 node->thunk.alias = NULL;
603 }
604 else if (node->dispatcher_function)
605 {
606 /* Generate the dispatcher body of multi-versioned functions. */
607 struct cgraph_function_version_info *dispatcher_version_info
608 = get_cgraph_node_version (node);
609 if (dispatcher_version_info != NULL
610 && (dispatcher_version_info->dispatcher_resolver
611 == NULL_TREE))
612 {
613 tree resolver = NULL_TREE;
614 gcc_assert (targetm.generate_version_dispatcher_body);
615 resolver = targetm.generate_version_dispatcher_body (node);
616 gcc_assert (resolver != NULL_TREE);
617 }
618 }
619 else
620 {
621 push_cfun (DECL_STRUCT_FUNCTION (decl));
622
623 assign_assembler_name_if_neeeded (node->symbol.decl);
624
625 /* Make sure to gimplify bodies only once. During analyzing a
626 function we lower it, which will require gimplified nested
627 functions, so we can end up here with an already gimplified
628 body. */
629 if (!gimple_has_body_p (decl))
630 gimplify_function_tree (decl);
631 dump_function (TDI_generic, decl);
632
633 /* Lower the function. */
634 if (!node->lowered)
635 {
636 if (node->nested)
637 lower_nested_functions (node->symbol.decl);
638 gcc_assert (!node->nested);
639
640 gimple_register_cfg_hooks ();
641 bitmap_obstack_initialize (NULL);
642 execute_pass_list (g->get_passes ()->all_lowering_passes);
643 free_dominance_info (CDI_POST_DOMINATORS);
644 free_dominance_info (CDI_DOMINATORS);
645 compact_blocks ();
646 bitmap_obstack_release (NULL);
647 node->lowered = true;
648 }
649
650 pop_cfun ();
651 }
652 node->symbol.analyzed = true;
653
654 input_location = saved_loc;
655 }
656
657 /* C++ frontend produce same body aliases all over the place, even before PCH
658 gets streamed out. It relies on us linking the aliases with their function
659 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
660 first produce aliases without links, but once C++ FE is sure he won't sream
661 PCH we build the links via this function. */
662
663 void
664 cgraph_process_same_body_aliases (void)
665 {
666 symtab_node node;
667 FOR_EACH_SYMBOL (node)
668 if (node->symbol.cpp_implicit_alias && !node->symbol.analyzed)
669 symtab_resolve_alias
670 (node,
671 TREE_CODE (node->symbol.alias_target) == VAR_DECL
672 ? (symtab_node)varpool_node_for_decl (node->symbol.alias_target)
673 : (symtab_node)cgraph_get_create_node (node->symbol.alias_target));
674 cpp_implicit_aliases_done = true;
675 }
676
677 /* Process attributes common for vars and functions. */
678
679 static void
680 process_common_attributes (tree decl)
681 {
682 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
683
684 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
685 {
686 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
687 "%<weakref%> attribute should be accompanied with"
688 " an %<alias%> attribute");
689 DECL_WEAK (decl) = 0;
690 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
691 DECL_ATTRIBUTES (decl));
692 }
693 }
694
695 /* Look for externally_visible and used attributes and mark cgraph nodes
696 accordingly.
697
698 We cannot mark the nodes at the point the attributes are processed (in
699 handle_*_attribute) because the copy of the declarations available at that
700 point may not be canonical. For example, in:
701
702 void f();
703 void f() __attribute__((used));
704
705 the declaration we see in handle_used_attribute will be the second
706 declaration -- but the front end will subsequently merge that declaration
707 with the original declaration and discard the second declaration.
708
709 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
710
711 void f() {}
712 void f() __attribute__((externally_visible));
713
714 is valid.
715
716 So, we walk the nodes at the end of the translation unit, applying the
717 attributes at that point. */
718
719 static void
720 process_function_and_variable_attributes (struct cgraph_node *first,
721 struct varpool_node *first_var)
722 {
723 struct cgraph_node *node;
724 struct varpool_node *vnode;
725
726 for (node = cgraph_first_function (); node != first;
727 node = cgraph_next_function (node))
728 {
729 tree decl = node->symbol.decl;
730 if (DECL_PRESERVE_P (decl))
731 cgraph_mark_force_output_node (node);
732 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
733 {
734 if (! TREE_PUBLIC (node->symbol.decl))
735 warning_at (DECL_SOURCE_LOCATION (node->symbol.decl), OPT_Wattributes,
736 "%<externally_visible%>"
737 " attribute have effect only on public objects");
738 }
739 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
740 && (node->symbol.definition && !node->symbol.alias))
741 {
742 warning_at (DECL_SOURCE_LOCATION (node->symbol.decl), OPT_Wattributes,
743 "%<weakref%> attribute ignored"
744 " because function is defined");
745 DECL_WEAK (decl) = 0;
746 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
747 DECL_ATTRIBUTES (decl));
748 }
749
750 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
751 && !DECL_DECLARED_INLINE_P (decl)
752 /* redefining extern inline function makes it DECL_UNINLINABLE. */
753 && !DECL_UNINLINABLE (decl))
754 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
755 "always_inline function might not be inlinable");
756
757 process_common_attributes (decl);
758 }
759 for (vnode = varpool_first_variable (); vnode != first_var;
760 vnode = varpool_next_variable (vnode))
761 {
762 tree decl = vnode->symbol.decl;
763 if (DECL_EXTERNAL (decl)
764 && DECL_INITIAL (decl))
765 varpool_finalize_decl (decl);
766 if (DECL_PRESERVE_P (decl))
767 vnode->symbol.force_output = true;
768 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
769 {
770 if (! TREE_PUBLIC (vnode->symbol.decl))
771 warning_at (DECL_SOURCE_LOCATION (vnode->symbol.decl), OPT_Wattributes,
772 "%<externally_visible%>"
773 " attribute have effect only on public objects");
774 }
775 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
776 && vnode->symbol.definition
777 && DECL_INITIAL (decl))
778 {
779 warning_at (DECL_SOURCE_LOCATION (vnode->symbol.decl), OPT_Wattributes,
780 "%<weakref%> attribute ignored"
781 " because variable is initialized");
782 DECL_WEAK (decl) = 0;
783 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
784 DECL_ATTRIBUTES (decl));
785 }
786 process_common_attributes (decl);
787 }
788 }
789
790 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
791 middle end to output the variable to asm file, if needed or externally
792 visible. */
793
794 void
795 varpool_finalize_decl (tree decl)
796 {
797 struct varpool_node *node = varpool_node_for_decl (decl);
798
799 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
800
801 if (node->symbol.definition)
802 return;
803 notice_global_symbol (decl);
804 node->symbol.definition = true;
805 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
806 /* Traditionally we do not eliminate static variables when not
807 optimizing and when not doing toplevel reoder. */
808 || (!flag_toplevel_reorder && !DECL_COMDAT (node->symbol.decl)
809 && !DECL_ARTIFICIAL (node->symbol.decl)))
810 node->symbol.force_output = true;
811
812 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
813 && (decide_is_symbol_needed ((symtab_node) node)
814 || referred_to_p ((symtab_node)node)))
815 enqueue_node ((symtab_node)node);
816 if (cgraph_state >= CGRAPH_STATE_IPA_SSA)
817 varpool_analyze_node (node);
818 /* Some frontends produce various interface variables after compilation
819 finished. */
820 if (cgraph_state == CGRAPH_STATE_FINISHED)
821 varpool_assemble_decl (node);
822 }
823
824
825 /* Discover all functions and variables that are trivially needed, analyze
826 them as well as all functions and variables referred by them */
827
828 static void
829 analyze_functions (void)
830 {
831 /* Keep track of already processed nodes when called multiple times for
832 intermodule optimization. */
833 static struct cgraph_node *first_analyzed;
834 struct cgraph_node *first_handled = first_analyzed;
835 static struct varpool_node *first_analyzed_var;
836 struct varpool_node *first_handled_var = first_analyzed_var;
837 struct pointer_set_t *reachable_call_targets = pointer_set_create ();
838
839 symtab_node node, next;
840 int i;
841 struct ipa_ref *ref;
842 bool changed = true;
843
844 bitmap_obstack_initialize (NULL);
845 cgraph_state = CGRAPH_STATE_CONSTRUCTION;
846
847 /* Ugly, but the fixup can not happen at a time same body alias is created;
848 C++ FE is confused about the COMDAT groups being right. */
849 if (cpp_implicit_aliases_done)
850 FOR_EACH_SYMBOL (node)
851 if (node->symbol.cpp_implicit_alias)
852 fixup_same_cpp_alias_visibility (node, symtab_alias_target (node));
853 if (optimize && flag_devirtualize)
854 build_type_inheritance_graph ();
855
856 /* Analysis adds static variables that in turn adds references to new functions.
857 So we need to iterate the process until it stabilize. */
858 while (changed)
859 {
860 changed = false;
861 process_function_and_variable_attributes (first_analyzed,
862 first_analyzed_var);
863
864 /* First identify the trivially needed symbols. */
865 for (node = symtab_nodes;
866 node != (symtab_node)first_analyzed
867 && node != (symtab_node)first_analyzed_var; node = node->symbol.next)
868 {
869 if (decide_is_symbol_needed (node))
870 {
871 enqueue_node (node);
872 if (!changed && cgraph_dump_file)
873 fprintf (cgraph_dump_file, "Trivially needed symbols:");
874 changed = true;
875 if (cgraph_dump_file)
876 fprintf (cgraph_dump_file, " %s", symtab_node_asm_name (node));
877 if (!changed && cgraph_dump_file)
878 fprintf (cgraph_dump_file, "\n");
879 }
880 if (node == (symtab_node)first_analyzed
881 || node == (symtab_node)first_analyzed_var)
882 break;
883 }
884 cgraph_process_new_functions ();
885 first_analyzed_var = varpool_first_variable ();
886 first_analyzed = cgraph_first_function ();
887
888 if (changed && dump_file)
889 fprintf (cgraph_dump_file, "\n");
890
891 /* Lower representation, build callgraph edges and references for all trivially
892 needed symbols and all symbols referred by them. */
893 while (first != (symtab_node)(void *)1)
894 {
895 changed = true;
896 node = first;
897 first = (symtab_node)first->symbol.aux;
898 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
899 if (cnode && cnode->symbol.definition)
900 {
901 struct cgraph_edge *edge;
902 tree decl = cnode->symbol.decl;
903
904 /* ??? It is possible to create extern inline function
905 and later using weak alias attribute to kill its body.
906 See gcc.c-torture/compile/20011119-1.c */
907 if (!DECL_STRUCT_FUNCTION (decl)
908 && !cnode->symbol.alias
909 && !cnode->thunk.thunk_p
910 && !cnode->dispatcher_function)
911 {
912 cgraph_reset_node (cnode);
913 cnode->local.redefined_extern_inline = true;
914 continue;
915 }
916
917 if (!cnode->symbol.analyzed)
918 analyze_function (cnode);
919
920 for (edge = cnode->callees; edge; edge = edge->next_callee)
921 if (edge->callee->symbol.definition)
922 enqueue_node ((symtab_node)edge->callee);
923 if (optimize && flag_devirtualize)
924 {
925 for (edge = cnode->indirect_calls; edge; edge = edge->next_callee)
926 if (edge->indirect_info->polymorphic)
927 {
928 unsigned int i;
929 void *cache_token;
930 vec <cgraph_node *>targets
931 = possible_polymorphic_call_targets
932 (edge, NULL, &cache_token);
933
934 if (!pointer_set_insert (reachable_call_targets,
935 cache_token))
936 {
937 if (cgraph_dump_file)
938 dump_possible_polymorphic_call_targets
939 (cgraph_dump_file, edge);
940
941 for (i = 0; i < targets.length(); i++)
942 enqueue_node ((symtab_node) targets[i]);
943 }
944 }
945 }
946
947 /* If decl is a clone of an abstract function,
948 mark that abstract function so that we don't release its body.
949 The DECL_INITIAL() of that abstract function declaration
950 will be later needed to output debug info. */
951 if (DECL_ABSTRACT_ORIGIN (decl))
952 {
953 struct cgraph_node *origin_node
954 = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
955 origin_node->used_as_abstract_origin = true;
956 }
957 }
958 else
959 {
960 varpool_node *vnode = dyn_cast <varpool_node> (node);
961 if (vnode && vnode->symbol.definition && !vnode->symbol.analyzed)
962 varpool_analyze_node (vnode);
963 }
964
965 if (node->symbol.same_comdat_group)
966 {
967 symtab_node next;
968 for (next = node->symbol.same_comdat_group;
969 next != node;
970 next = next->symbol.same_comdat_group)
971 enqueue_node (next);
972 }
973 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list, i, ref); i++)
974 if (ref->referred->symbol.definition)
975 enqueue_node (ref->referred);
976 cgraph_process_new_functions ();
977 }
978 }
979 if (optimize && flag_devirtualize)
980 update_type_inheritance_graph ();
981
982 /* Collect entry points to the unit. */
983 if (cgraph_dump_file)
984 {
985 fprintf (cgraph_dump_file, "\n\nInitial ");
986 dump_symtab (cgraph_dump_file);
987 }
988
989 if (cgraph_dump_file)
990 fprintf (cgraph_dump_file, "\nRemoving unused symbols:");
991
992 for (node = symtab_nodes;
993 node != (symtab_node)first_handled
994 && node != (symtab_node)first_handled_var; node = next)
995 {
996 next = node->symbol.next;
997 if (!node->symbol.aux && !referred_to_p (node))
998 {
999 if (cgraph_dump_file)
1000 fprintf (cgraph_dump_file, " %s", symtab_node_name (node));
1001 symtab_remove_node (node);
1002 continue;
1003 }
1004 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
1005 {
1006 tree decl = node->symbol.decl;
1007
1008 if (cnode->symbol.definition && !gimple_has_body_p (decl)
1009 && !cnode->symbol.alias
1010 && !cnode->thunk.thunk_p)
1011 cgraph_reset_node (cnode);
1012
1013 gcc_assert (!cnode->symbol.definition || cnode->thunk.thunk_p
1014 || cnode->symbol.alias
1015 || gimple_has_body_p (decl));
1016 gcc_assert (cnode->symbol.analyzed == cnode->symbol.definition);
1017 }
1018 node->symbol.aux = NULL;
1019 }
1020 first_analyzed = cgraph_first_function ();
1021 first_analyzed_var = varpool_first_variable ();
1022 if (cgraph_dump_file)
1023 {
1024 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1025 dump_symtab (cgraph_dump_file);
1026 }
1027 bitmap_obstack_release (NULL);
1028 pointer_set_destroy (reachable_call_targets);
1029 ggc_collect ();
1030 }
1031
1032 /* Translate the ugly representation of aliases as alias pairs into nice
1033 representation in callgraph. We don't handle all cases yet,
1034 unfortunately. */
1035
1036 static void
1037 handle_alias_pairs (void)
1038 {
1039 alias_pair *p;
1040 unsigned i;
1041
1042 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1043 {
1044 symtab_node target_node = symtab_node_for_asm (p->target);
1045
1046 /* Weakrefs with target not defined in current unit are easy to handle:
1047 they behave just as external variables except we need to note the
1048 alias flag to later output the weakref pseudo op into asm file. */
1049 if (!target_node
1050 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1051 {
1052 symtab_node node = symtab_get_node (p->decl);
1053 if (node)
1054 {
1055 node->symbol.alias_target = p->target;
1056 node->symbol.weakref = true;
1057 node->symbol.alias = true;
1058 }
1059 alias_pairs->unordered_remove (i);
1060 continue;
1061 }
1062 else if (!target_node)
1063 {
1064 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1065 symtab_node node = symtab_get_node (p->decl);
1066 if (node)
1067 node->symbol.alias = false;
1068 alias_pairs->unordered_remove (i);
1069 continue;
1070 }
1071
1072 if (DECL_EXTERNAL (target_node->symbol.decl)
1073 /* We use local aliases for C++ thunks to force the tailcall
1074 to bind locally. This is a hack - to keep it working do
1075 the following (which is not strictly correct). */
1076 && (! TREE_CODE (target_node->symbol.decl) == FUNCTION_DECL
1077 || ! DECL_VIRTUAL_P (target_node->symbol.decl))
1078 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1079 {
1080 error ("%q+D aliased to external symbol %qE",
1081 p->decl, p->target);
1082 }
1083
1084 if (TREE_CODE (p->decl) == FUNCTION_DECL
1085 && target_node && is_a <cgraph_node> (target_node))
1086 {
1087 struct cgraph_node *src_node = cgraph_get_node (p->decl);
1088 if (src_node && src_node->symbol.definition)
1089 cgraph_reset_node (src_node);
1090 cgraph_create_function_alias (p->decl, target_node->symbol.decl);
1091 alias_pairs->unordered_remove (i);
1092 }
1093 else if (TREE_CODE (p->decl) == VAR_DECL
1094 && target_node && is_a <varpool_node> (target_node))
1095 {
1096 varpool_create_variable_alias (p->decl, target_node->symbol.decl);
1097 alias_pairs->unordered_remove (i);
1098 }
1099 else
1100 {
1101 error ("%q+D alias in between function and variable is not supported",
1102 p->decl);
1103 warning (0, "%q+D aliased declaration",
1104 target_node->symbol.decl);
1105 alias_pairs->unordered_remove (i);
1106 }
1107 }
1108 vec_free (alias_pairs);
1109 }
1110
1111
1112 /* Figure out what functions we want to assemble. */
1113
1114 static void
1115 mark_functions_to_output (void)
1116 {
1117 struct cgraph_node *node;
1118 #ifdef ENABLE_CHECKING
1119 bool check_same_comdat_groups = false;
1120
1121 FOR_EACH_FUNCTION (node)
1122 gcc_assert (!node->process);
1123 #endif
1124
1125 FOR_EACH_FUNCTION (node)
1126 {
1127 tree decl = node->symbol.decl;
1128
1129 gcc_assert (!node->process || node->symbol.same_comdat_group);
1130 if (node->process)
1131 continue;
1132
1133 /* We need to output all local functions that are used and not
1134 always inlined, as well as those that are reachable from
1135 outside the current compilation unit. */
1136 if (node->symbol.analyzed
1137 && !node->thunk.thunk_p
1138 && !node->symbol.alias
1139 && !node->global.inlined_to
1140 && !TREE_ASM_WRITTEN (decl)
1141 && !DECL_EXTERNAL (decl))
1142 {
1143 node->process = 1;
1144 if (node->symbol.same_comdat_group)
1145 {
1146 struct cgraph_node *next;
1147 for (next = cgraph (node->symbol.same_comdat_group);
1148 next != node;
1149 next = cgraph (next->symbol.same_comdat_group))
1150 if (!next->thunk.thunk_p && !next->symbol.alias)
1151 next->process = 1;
1152 }
1153 }
1154 else if (node->symbol.same_comdat_group)
1155 {
1156 #ifdef ENABLE_CHECKING
1157 check_same_comdat_groups = true;
1158 #endif
1159 }
1160 else
1161 {
1162 /* We should've reclaimed all functions that are not needed. */
1163 #ifdef ENABLE_CHECKING
1164 if (!node->global.inlined_to
1165 && gimple_has_body_p (decl)
1166 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1167 are inside partition, we can end up not removing the body since we no longer
1168 have analyzed node pointing to it. */
1169 && !node->symbol.in_other_partition
1170 && !node->symbol.alias
1171 && !node->clones
1172 && !DECL_EXTERNAL (decl))
1173 {
1174 dump_cgraph_node (stderr, node);
1175 internal_error ("failed to reclaim unneeded function");
1176 }
1177 #endif
1178 gcc_assert (node->global.inlined_to
1179 || !gimple_has_body_p (decl)
1180 || node->symbol.in_other_partition
1181 || node->clones
1182 || DECL_ARTIFICIAL (decl)
1183 || DECL_EXTERNAL (decl));
1184
1185 }
1186
1187 }
1188 #ifdef ENABLE_CHECKING
1189 if (check_same_comdat_groups)
1190 FOR_EACH_FUNCTION (node)
1191 if (node->symbol.same_comdat_group && !node->process)
1192 {
1193 tree decl = node->symbol.decl;
1194 if (!node->global.inlined_to
1195 && gimple_has_body_p (decl)
1196 /* FIXME: in an ltrans unit when the offline copy is outside a
1197 partition but inline copies are inside a partition, we can
1198 end up not removing the body since we no longer have an
1199 analyzed node pointing to it. */
1200 && !node->symbol.in_other_partition
1201 && !node->clones
1202 && !DECL_EXTERNAL (decl))
1203 {
1204 dump_cgraph_node (stderr, node);
1205 internal_error ("failed to reclaim unneeded function in same "
1206 "comdat group");
1207 }
1208 }
1209 #endif
1210 }
1211
1212 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1213 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1214
1215 Set current_function_decl and cfun to newly constructed empty function body.
1216 return basic block in the function body. */
1217
1218 basic_block
1219 init_lowered_empty_function (tree decl, bool in_ssa)
1220 {
1221 basic_block bb;
1222
1223 current_function_decl = decl;
1224 allocate_struct_function (decl, false);
1225 gimple_register_cfg_hooks ();
1226 init_empty_tree_cfg ();
1227
1228 if (in_ssa)
1229 {
1230 init_tree_ssa (cfun);
1231 init_ssa_operands (cfun);
1232 cfun->gimple_df->in_ssa_p = true;
1233 cfun->curr_properties |= PROP_ssa;
1234 }
1235
1236 DECL_INITIAL (decl) = make_node (BLOCK);
1237
1238 DECL_SAVED_TREE (decl) = error_mark_node;
1239 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1240 | PROP_cfg | PROP_loops);
1241
1242 set_loops_for_fn (cfun, ggc_alloc_cleared_loops ());
1243 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1244 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1245
1246 /* Create BB for body of the function and connect it properly. */
1247 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1248 make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1249 make_edge (bb, EXIT_BLOCK_PTR, 0);
1250 add_bb_to_loop (bb, ENTRY_BLOCK_PTR->loop_father);
1251
1252 return bb;
1253 }
1254
1255 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1256 offset indicated by VIRTUAL_OFFSET, if that is
1257 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1258 zero for a result adjusting thunk. */
1259
1260 static tree
1261 thunk_adjust (gimple_stmt_iterator * bsi,
1262 tree ptr, bool this_adjusting,
1263 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1264 {
1265 gimple stmt;
1266 tree ret;
1267
1268 if (this_adjusting
1269 && fixed_offset != 0)
1270 {
1271 stmt = gimple_build_assign
1272 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1273 ptr,
1274 fixed_offset));
1275 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1276 }
1277
1278 /* If there's a virtual offset, look up that value in the vtable and
1279 adjust the pointer again. */
1280 if (virtual_offset)
1281 {
1282 tree vtabletmp;
1283 tree vtabletmp2;
1284 tree vtabletmp3;
1285
1286 if (!vtable_entry_type)
1287 {
1288 tree vfunc_type = make_node (FUNCTION_TYPE);
1289 TREE_TYPE (vfunc_type) = integer_type_node;
1290 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1291 layout_type (vfunc_type);
1292
1293 vtable_entry_type = build_pointer_type (vfunc_type);
1294 }
1295
1296 vtabletmp =
1297 create_tmp_reg (build_pointer_type
1298 (build_pointer_type (vtable_entry_type)), "vptr");
1299
1300 /* The vptr is always at offset zero in the object. */
1301 stmt = gimple_build_assign (vtabletmp,
1302 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1303 ptr));
1304 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1305
1306 /* Form the vtable address. */
1307 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1308 "vtableaddr");
1309 stmt = gimple_build_assign (vtabletmp2,
1310 build_simple_mem_ref (vtabletmp));
1311 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1312
1313 /* Find the entry with the vcall offset. */
1314 stmt = gimple_build_assign (vtabletmp2,
1315 fold_build_pointer_plus_loc (input_location,
1316 vtabletmp2,
1317 virtual_offset));
1318 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1319
1320 /* Get the offset itself. */
1321 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1322 "vcalloffset");
1323 stmt = gimple_build_assign (vtabletmp3,
1324 build_simple_mem_ref (vtabletmp2));
1325 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1326
1327 /* Adjust the `this' pointer. */
1328 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1329 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1330 GSI_CONTINUE_LINKING);
1331 }
1332
1333 if (!this_adjusting
1334 && fixed_offset != 0)
1335 /* Adjust the pointer by the constant. */
1336 {
1337 tree ptrtmp;
1338
1339 if (TREE_CODE (ptr) == VAR_DECL)
1340 ptrtmp = ptr;
1341 else
1342 {
1343 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1344 stmt = gimple_build_assign (ptrtmp, ptr);
1345 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1346 }
1347 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1348 ptrtmp, fixed_offset);
1349 }
1350
1351 /* Emit the statement and gimplify the adjustment expression. */
1352 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1353 stmt = gimple_build_assign (ret, ptr);
1354 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1355
1356 return ret;
1357 }
1358
1359 /* Produce assembler for thunk NODE. */
1360
1361 void
1362 expand_thunk (struct cgraph_node *node)
1363 {
1364 bool this_adjusting = node->thunk.this_adjusting;
1365 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1366 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1367 tree virtual_offset = NULL;
1368 tree alias = node->callees->callee->symbol.decl;
1369 tree thunk_fndecl = node->symbol.decl;
1370 tree a = DECL_ARGUMENTS (thunk_fndecl);
1371
1372 current_function_decl = thunk_fndecl;
1373
1374 /* Ensure thunks are emitted in their correct sections. */
1375 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1376
1377 if (this_adjusting
1378 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1379 virtual_value, alias))
1380 {
1381 const char *fnname;
1382 tree fn_block;
1383 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1384
1385 DECL_RESULT (thunk_fndecl)
1386 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1387 RESULT_DECL, 0, restype);
1388 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1389
1390 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1391 create one. */
1392 fn_block = make_node (BLOCK);
1393 BLOCK_VARS (fn_block) = a;
1394 DECL_INITIAL (thunk_fndecl) = fn_block;
1395 init_function_start (thunk_fndecl);
1396 cfun->is_thunk = 1;
1397 insn_locations_init ();
1398 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1399 prologue_location = curr_insn_location ();
1400 assemble_start_function (thunk_fndecl, fnname);
1401
1402 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1403 fixed_offset, virtual_value, alias);
1404
1405 assemble_end_function (thunk_fndecl, fnname);
1406 insn_locations_finalize ();
1407 init_insn_lengths ();
1408 free_after_compilation (cfun);
1409 set_cfun (NULL);
1410 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1411 node->thunk.thunk_p = false;
1412 node->symbol.analyzed = false;
1413 }
1414 else
1415 {
1416 tree restype;
1417 basic_block bb, then_bb, else_bb, return_bb;
1418 gimple_stmt_iterator bsi;
1419 int nargs = 0;
1420 tree arg;
1421 int i;
1422 tree resdecl;
1423 tree restmp = NULL;
1424 vec<tree> vargs;
1425
1426 gimple call;
1427 gimple ret;
1428
1429 DECL_IGNORED_P (thunk_fndecl) = 1;
1430 bitmap_obstack_initialize (NULL);
1431
1432 if (node->thunk.virtual_offset_p)
1433 virtual_offset = size_int (virtual_value);
1434
1435 /* Build the return declaration for the function. */
1436 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1437 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1438 {
1439 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1440 DECL_ARTIFICIAL (resdecl) = 1;
1441 DECL_IGNORED_P (resdecl) = 1;
1442 DECL_RESULT (thunk_fndecl) = resdecl;
1443 }
1444 else
1445 resdecl = DECL_RESULT (thunk_fndecl);
1446
1447 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl, true);
1448
1449 bsi = gsi_start_bb (bb);
1450
1451 /* Build call to the function being thunked. */
1452 if (!VOID_TYPE_P (restype))
1453 {
1454 if (DECL_BY_REFERENCE (resdecl))
1455 restmp = gimple_fold_indirect_ref (resdecl);
1456 else if (!is_gimple_reg_type (restype))
1457 {
1458 restmp = resdecl;
1459 add_local_decl (cfun, restmp);
1460 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1461 }
1462 else
1463 restmp = create_tmp_reg (restype, "retval");
1464 }
1465
1466 for (arg = a; arg; arg = DECL_CHAIN (arg))
1467 nargs++;
1468 vargs.create (nargs);
1469 if (this_adjusting)
1470 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1471 virtual_offset));
1472 else if (nargs)
1473 vargs.quick_push (a);
1474
1475 if (nargs)
1476 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1477 vargs.quick_push (arg);
1478 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1479 vargs.release ();
1480 gimple_call_set_from_thunk (call, true);
1481 if (restmp)
1482 {
1483 gimple_call_set_lhs (call, restmp);
1484 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1485 TREE_TYPE (TREE_TYPE (alias))));
1486 }
1487 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1488 if (!(gimple_call_flags (call) & ECF_NORETURN))
1489 {
1490 if (restmp && !this_adjusting
1491 && (fixed_offset || virtual_offset))
1492 {
1493 tree true_label = NULL_TREE;
1494
1495 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1496 {
1497 gimple stmt;
1498 /* If the return type is a pointer, we need to
1499 protect against NULL. We know there will be an
1500 adjustment, because that's why we're emitting a
1501 thunk. */
1502 then_bb = create_basic_block (NULL, (void *) 0, bb);
1503 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1504 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1505 add_bb_to_loop (then_bb, bb->loop_father);
1506 add_bb_to_loop (return_bb, bb->loop_father);
1507 add_bb_to_loop (else_bb, bb->loop_father);
1508 remove_edge (single_succ_edge (bb));
1509 true_label = gimple_block_label (then_bb);
1510 stmt = gimple_build_cond (NE_EXPR, restmp,
1511 build_zero_cst (TREE_TYPE (restmp)),
1512 NULL_TREE, NULL_TREE);
1513 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1514 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1515 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1516 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1517 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1518 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1519 bsi = gsi_last_bb (then_bb);
1520 }
1521
1522 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1523 fixed_offset, virtual_offset);
1524 if (true_label)
1525 {
1526 gimple stmt;
1527 bsi = gsi_last_bb (else_bb);
1528 stmt = gimple_build_assign (restmp,
1529 build_zero_cst (TREE_TYPE (restmp)));
1530 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1531 bsi = gsi_last_bb (return_bb);
1532 }
1533 }
1534 else
1535 gimple_call_set_tail (call, true);
1536
1537 /* Build return value. */
1538 ret = gimple_build_return (restmp);
1539 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1540 }
1541 else
1542 {
1543 gimple_call_set_tail (call, true);
1544 remove_edge (single_succ_edge (bb));
1545 }
1546
1547 delete_unreachable_blocks ();
1548 update_ssa (TODO_update_ssa);
1549 #ifdef ENABLE_CHECKING
1550 verify_flow_info ();
1551 #endif
1552
1553 /* Since we want to emit the thunk, we explicitly mark its name as
1554 referenced. */
1555 node->thunk.thunk_p = false;
1556 rebuild_cgraph_edges ();
1557 cgraph_add_new_function (thunk_fndecl, true);
1558 bitmap_obstack_release (NULL);
1559 }
1560 current_function_decl = NULL;
1561 set_cfun (NULL);
1562 }
1563
1564 /* Assemble thunks and aliases associated to NODE. */
1565
1566 static void
1567 assemble_thunks_and_aliases (struct cgraph_node *node)
1568 {
1569 struct cgraph_edge *e;
1570 int i;
1571 struct ipa_ref *ref;
1572
1573 for (e = node->callers; e;)
1574 if (e->caller->thunk.thunk_p)
1575 {
1576 struct cgraph_node *thunk = e->caller;
1577
1578 e = e->next_caller;
1579 assemble_thunks_and_aliases (thunk);
1580 expand_thunk (thunk);
1581 }
1582 else
1583 e = e->next_caller;
1584 for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list,
1585 i, ref); i++)
1586 if (ref->use == IPA_REF_ALIAS)
1587 {
1588 struct cgraph_node *alias = ipa_ref_referring_node (ref);
1589 bool saved_written = TREE_ASM_WRITTEN (node->symbol.decl);
1590
1591 /* Force assemble_alias to really output the alias this time instead
1592 of buffering it in same alias pairs. */
1593 TREE_ASM_WRITTEN (node->symbol.decl) = 1;
1594 do_assemble_alias (alias->symbol.decl,
1595 DECL_ASSEMBLER_NAME (node->symbol.decl));
1596 assemble_thunks_and_aliases (alias);
1597 TREE_ASM_WRITTEN (node->symbol.decl) = saved_written;
1598 }
1599 }
1600
1601 /* Expand function specified by NODE. */
1602
1603 static void
1604 expand_function (struct cgraph_node *node)
1605 {
1606 tree decl = node->symbol.decl;
1607 location_t saved_loc;
1608
1609 /* We ought to not compile any inline clones. */
1610 gcc_assert (!node->global.inlined_to);
1611
1612 announce_function (decl);
1613 node->process = 0;
1614 gcc_assert (node->lowered);
1615 cgraph_get_body (node);
1616
1617 /* Generate RTL for the body of DECL. */
1618
1619 timevar_push (TV_REST_OF_COMPILATION);
1620
1621 gcc_assert (cgraph_global_info_ready);
1622
1623 /* Initialize the default bitmap obstack. */
1624 bitmap_obstack_initialize (NULL);
1625
1626 /* Initialize the RTL code for the function. */
1627 current_function_decl = decl;
1628 saved_loc = input_location;
1629 input_location = DECL_SOURCE_LOCATION (decl);
1630 init_function_start (decl);
1631
1632 gimple_register_cfg_hooks ();
1633
1634 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1635
1636 execute_all_ipa_transforms ();
1637
1638 /* Perform all tree transforms and optimizations. */
1639
1640 /* Signal the start of passes. */
1641 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1642
1643 execute_pass_list (g->get_passes ()->all_passes);
1644
1645 /* Signal the end of passes. */
1646 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1647
1648 bitmap_obstack_release (&reg_obstack);
1649
1650 /* Release the default bitmap obstack. */
1651 bitmap_obstack_release (NULL);
1652
1653 /* If requested, warn about function definitions where the function will
1654 return a value (usually of some struct or union type) which itself will
1655 take up a lot of stack space. */
1656 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1657 {
1658 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1659
1660 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1661 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1662 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1663 larger_than_size))
1664 {
1665 unsigned int size_as_int
1666 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1667
1668 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1669 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1670 decl, size_as_int);
1671 else
1672 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1673 decl, larger_than_size);
1674 }
1675 }
1676
1677 gimple_set_body (decl, NULL);
1678 if (DECL_STRUCT_FUNCTION (decl) == 0
1679 && !cgraph_get_node (decl)->origin)
1680 {
1681 /* Stop pointing to the local nodes about to be freed.
1682 But DECL_INITIAL must remain nonzero so we know this
1683 was an actual function definition.
1684 For a nested function, this is done in c_pop_function_context.
1685 If rest_of_compilation set this to 0, leave it 0. */
1686 if (DECL_INITIAL (decl) != 0)
1687 DECL_INITIAL (decl) = error_mark_node;
1688 }
1689
1690 input_location = saved_loc;
1691
1692 ggc_collect ();
1693 timevar_pop (TV_REST_OF_COMPILATION);
1694
1695 /* Make sure that BE didn't give up on compiling. */
1696 gcc_assert (TREE_ASM_WRITTEN (decl));
1697 set_cfun (NULL);
1698 current_function_decl = NULL;
1699
1700 /* It would make a lot more sense to output thunks before function body to get more
1701 forward and lest backwarding jumps. This however would need solving problem
1702 with comdats. See PR48668. Also aliases must come after function itself to
1703 make one pass assemblers, like one on AIX, happy. See PR 50689.
1704 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1705 groups. */
1706 assemble_thunks_and_aliases (node);
1707 cgraph_release_function_body (node);
1708 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1709 points to the dead function body. */
1710 cgraph_node_remove_callees (node);
1711 ipa_remove_all_references (&node->symbol.ref_list);
1712 }
1713
1714
1715 /* Expand all functions that must be output.
1716
1717 Attempt to topologically sort the nodes so function is output when
1718 all called functions are already assembled to allow data to be
1719 propagated across the callgraph. Use a stack to get smaller distance
1720 between a function and its callees (later we may choose to use a more
1721 sophisticated algorithm for function reordering; we will likely want
1722 to use subsections to make the output functions appear in top-down
1723 order). */
1724
1725 static void
1726 expand_all_functions (void)
1727 {
1728 struct cgraph_node *node;
1729 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1730 int order_pos, new_order_pos = 0;
1731 int i;
1732
1733 order_pos = ipa_reverse_postorder (order);
1734 gcc_assert (order_pos == cgraph_n_nodes);
1735
1736 /* Garbage collector may remove inline clones we eliminate during
1737 optimization. So we must be sure to not reference them. */
1738 for (i = 0; i < order_pos; i++)
1739 if (order[i]->process)
1740 order[new_order_pos++] = order[i];
1741
1742 for (i = new_order_pos - 1; i >= 0; i--)
1743 {
1744 node = order[i];
1745 if (node->process)
1746 {
1747 node->process = 0;
1748 expand_function (node);
1749 }
1750 }
1751 cgraph_process_new_functions ();
1752
1753 free (order);
1754
1755 }
1756
1757 /* This is used to sort the node types by the cgraph order number. */
1758
1759 enum cgraph_order_sort_kind
1760 {
1761 ORDER_UNDEFINED = 0,
1762 ORDER_FUNCTION,
1763 ORDER_VAR,
1764 ORDER_ASM
1765 };
1766
1767 struct cgraph_order_sort
1768 {
1769 enum cgraph_order_sort_kind kind;
1770 union
1771 {
1772 struct cgraph_node *f;
1773 struct varpool_node *v;
1774 struct asm_node *a;
1775 } u;
1776 };
1777
1778 /* Output all functions, variables, and asm statements in the order
1779 according to their order fields, which is the order in which they
1780 appeared in the file. This implements -fno-toplevel-reorder. In
1781 this mode we may output functions and variables which don't really
1782 need to be output. */
1783
1784 static void
1785 output_in_order (void)
1786 {
1787 int max;
1788 struct cgraph_order_sort *nodes;
1789 int i;
1790 struct cgraph_node *pf;
1791 struct varpool_node *pv;
1792 struct asm_node *pa;
1793
1794 max = symtab_order;
1795 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1796
1797 FOR_EACH_DEFINED_FUNCTION (pf)
1798 {
1799 if (pf->process && !pf->thunk.thunk_p && !pf->symbol.alias)
1800 {
1801 i = pf->symbol.order;
1802 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1803 nodes[i].kind = ORDER_FUNCTION;
1804 nodes[i].u.f = pf;
1805 }
1806 }
1807
1808 FOR_EACH_DEFINED_VARIABLE (pv)
1809 if (!DECL_EXTERNAL (pv->symbol.decl))
1810 {
1811 i = pv->symbol.order;
1812 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1813 nodes[i].kind = ORDER_VAR;
1814 nodes[i].u.v = pv;
1815 }
1816
1817 for (pa = asm_nodes; pa; pa = pa->next)
1818 {
1819 i = pa->order;
1820 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1821 nodes[i].kind = ORDER_ASM;
1822 nodes[i].u.a = pa;
1823 }
1824
1825 /* In toplevel reorder mode we output all statics; mark them as needed. */
1826
1827 for (i = 0; i < max; ++i)
1828 if (nodes[i].kind == ORDER_VAR)
1829 varpool_finalize_named_section_flags (nodes[i].u.v);
1830
1831 for (i = 0; i < max; ++i)
1832 {
1833 switch (nodes[i].kind)
1834 {
1835 case ORDER_FUNCTION:
1836 nodes[i].u.f->process = 0;
1837 expand_function (nodes[i].u.f);
1838 break;
1839
1840 case ORDER_VAR:
1841 varpool_assemble_decl (nodes[i].u.v);
1842 break;
1843
1844 case ORDER_ASM:
1845 assemble_asm (nodes[i].u.a->asm_str);
1846 break;
1847
1848 case ORDER_UNDEFINED:
1849 break;
1850
1851 default:
1852 gcc_unreachable ();
1853 }
1854 }
1855
1856 asm_nodes = NULL;
1857 free (nodes);
1858 }
1859
1860 static void
1861 ipa_passes (void)
1862 {
1863 gcc::pass_manager *passes = g->get_passes ();
1864
1865 set_cfun (NULL);
1866 current_function_decl = NULL;
1867 gimple_register_cfg_hooks ();
1868 bitmap_obstack_initialize (NULL);
1869
1870 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1871
1872 if (!in_lto_p)
1873 {
1874 execute_ipa_pass_list (passes->all_small_ipa_passes);
1875 if (seen_error ())
1876 return;
1877 }
1878
1879 /* We never run removal of unreachable nodes after early passes. This is
1880 because TODO is run before the subpasses. It is important to remove
1881 the unreachable functions to save works at IPA level and to get LTO
1882 symbol tables right. */
1883 symtab_remove_unreachable_nodes (true, cgraph_dump_file);
1884
1885 /* If pass_all_early_optimizations was not scheduled, the state of
1886 the cgraph will not be properly updated. Update it now. */
1887 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1888 cgraph_state = CGRAPH_STATE_IPA_SSA;
1889
1890 if (!in_lto_p)
1891 {
1892 /* Generate coverage variables and constructors. */
1893 coverage_finish ();
1894
1895 /* Process new functions added. */
1896 set_cfun (NULL);
1897 current_function_decl = NULL;
1898 cgraph_process_new_functions ();
1899
1900 execute_ipa_summary_passes
1901 ((struct ipa_opt_pass_d *) passes->all_regular_ipa_passes);
1902 }
1903
1904 /* Some targets need to handle LTO assembler output specially. */
1905 if (flag_generate_lto)
1906 targetm.asm_out.lto_start ();
1907
1908 execute_ipa_summary_passes ((struct ipa_opt_pass_d *)
1909 passes->all_lto_gen_passes);
1910
1911 if (!in_lto_p)
1912 ipa_write_summaries ();
1913
1914 if (flag_generate_lto)
1915 targetm.asm_out.lto_end ();
1916
1917 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
1918 execute_ipa_pass_list (passes->all_regular_ipa_passes);
1919 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
1920
1921 bitmap_obstack_release (NULL);
1922 }
1923
1924
1925 /* Return string alias is alias of. */
1926
1927 static tree
1928 get_alias_symbol (tree decl)
1929 {
1930 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1931 return get_identifier (TREE_STRING_POINTER
1932 (TREE_VALUE (TREE_VALUE (alias))));
1933 }
1934
1935
1936 /* Weakrefs may be associated to external decls and thus not output
1937 at expansion time. Emit all necessary aliases. */
1938
1939 static void
1940 output_weakrefs (void)
1941 {
1942 symtab_node node;
1943 FOR_EACH_SYMBOL (node)
1944 if (node->symbol.alias
1945 && !TREE_ASM_WRITTEN (node->symbol.decl)
1946 && node->symbol.weakref)
1947 {
1948 tree target;
1949
1950 /* Weakrefs are special by not requiring target definition in current
1951 compilation unit. It is thus bit hard to work out what we want to
1952 alias.
1953 When alias target is defined, we need to fetch it from symtab reference,
1954 otherwise it is pointed to by alias_target. */
1955 if (node->symbol.alias_target)
1956 target = (DECL_P (node->symbol.alias_target)
1957 ? DECL_ASSEMBLER_NAME (node->symbol.alias_target)
1958 : node->symbol.alias_target);
1959 else if (node->symbol.analyzed)
1960 target = DECL_ASSEMBLER_NAME (symtab_alias_target (node)->symbol.decl);
1961 else
1962 {
1963 gcc_unreachable ();
1964 target = get_alias_symbol (node->symbol.decl);
1965 }
1966 do_assemble_alias (node->symbol.decl, target);
1967 }
1968 }
1969
1970 /* Initialize callgraph dump file. */
1971
1972 void
1973 init_cgraph (void)
1974 {
1975 if (!cgraph_dump_file)
1976 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1977 }
1978
1979
1980 /* Perform simple optimizations based on callgraph. */
1981
1982 void
1983 compile (void)
1984 {
1985 if (seen_error ())
1986 return;
1987
1988 #ifdef ENABLE_CHECKING
1989 verify_symtab ();
1990 #endif
1991
1992 timevar_push (TV_CGRAPHOPT);
1993 if (pre_ipa_mem_report)
1994 {
1995 fprintf (stderr, "Memory consumption before IPA\n");
1996 dump_memory_report (false);
1997 }
1998 if (!quiet_flag)
1999 fprintf (stderr, "Performing interprocedural optimizations\n");
2000 cgraph_state = CGRAPH_STATE_IPA;
2001
2002 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2003 if (flag_lto)
2004 lto_streamer_hooks_init ();
2005
2006 /* Don't run the IPA passes if there was any error or sorry messages. */
2007 if (!seen_error ())
2008 ipa_passes ();
2009
2010 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2011 if (seen_error ()
2012 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2013 {
2014 timevar_pop (TV_CGRAPHOPT);
2015 return;
2016 }
2017
2018 /* This pass remove bodies of extern inline functions we never inlined.
2019 Do this later so other IPA passes see what is really going on. */
2020 symtab_remove_unreachable_nodes (false, dump_file);
2021 cgraph_global_info_ready = true;
2022 if (cgraph_dump_file)
2023 {
2024 fprintf (cgraph_dump_file, "Optimized ");
2025 dump_symtab (cgraph_dump_file);
2026 }
2027 if (post_ipa_mem_report)
2028 {
2029 fprintf (stderr, "Memory consumption after IPA\n");
2030 dump_memory_report (false);
2031 }
2032 timevar_pop (TV_CGRAPHOPT);
2033
2034 /* Output everything. */
2035 (*debug_hooks->assembly_start) ();
2036 if (!quiet_flag)
2037 fprintf (stderr, "Assembling functions:\n");
2038 #ifdef ENABLE_CHECKING
2039 verify_symtab ();
2040 #endif
2041
2042 cgraph_materialize_all_clones ();
2043 bitmap_obstack_initialize (NULL);
2044 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2045 symtab_remove_unreachable_nodes (true, dump_file);
2046 #ifdef ENABLE_CHECKING
2047 verify_symtab ();
2048 #endif
2049 bitmap_obstack_release (NULL);
2050 mark_functions_to_output ();
2051
2052 /* When weakref support is missing, we autmatically translate all
2053 references to NODE to references to its ultimate alias target.
2054 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2055 TREE_CHAIN.
2056
2057 Set up this mapping before we output any assembler but once we are sure
2058 that all symbol renaming is done.
2059
2060 FIXME: All this uglyness can go away if we just do renaming at gimple
2061 level by physically rewritting the IL. At the moment we can only redirect
2062 calls, so we need infrastructure for renaming references as well. */
2063 #ifndef ASM_OUTPUT_WEAKREF
2064 symtab_node node;
2065
2066 FOR_EACH_SYMBOL (node)
2067 if (node->symbol.alias
2068 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->symbol.decl)))
2069 {
2070 IDENTIFIER_TRANSPARENT_ALIAS
2071 (DECL_ASSEMBLER_NAME (node->symbol.decl)) = 1;
2072 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->symbol.decl))
2073 = (node->symbol.alias_target ? node->symbol.alias_target
2074 : DECL_ASSEMBLER_NAME (symtab_alias_target (node)->symbol.decl));
2075 }
2076 #endif
2077
2078 cgraph_state = CGRAPH_STATE_EXPANSION;
2079 if (!flag_toplevel_reorder)
2080 output_in_order ();
2081 else
2082 {
2083 output_asm_statements ();
2084
2085 expand_all_functions ();
2086 varpool_output_variables ();
2087 }
2088
2089 cgraph_process_new_functions ();
2090 cgraph_state = CGRAPH_STATE_FINISHED;
2091 output_weakrefs ();
2092
2093 if (cgraph_dump_file)
2094 {
2095 fprintf (cgraph_dump_file, "\nFinal ");
2096 dump_symtab (cgraph_dump_file);
2097 }
2098 #ifdef ENABLE_CHECKING
2099 verify_symtab ();
2100 /* Double check that all inline clones are gone and that all
2101 function bodies have been released from memory. */
2102 if (!seen_error ())
2103 {
2104 struct cgraph_node *node;
2105 bool error_found = false;
2106
2107 FOR_EACH_DEFINED_FUNCTION (node)
2108 if (node->global.inlined_to
2109 || gimple_has_body_p (node->symbol.decl))
2110 {
2111 error_found = true;
2112 dump_cgraph_node (stderr, node);
2113 }
2114 if (error_found)
2115 internal_error ("nodes with unreleased memory found");
2116 }
2117 #endif
2118 }
2119
2120
2121 /* Analyze the whole compilation unit once it is parsed completely. */
2122
2123 void
2124 finalize_compilation_unit (void)
2125 {
2126 timevar_push (TV_CGRAPH);
2127
2128 /* If we're here there's no current function anymore. Some frontends
2129 are lazy in clearing these. */
2130 current_function_decl = NULL;
2131 set_cfun (NULL);
2132
2133 /* Do not skip analyzing the functions if there were errors, we
2134 miss diagnostics for following functions otherwise. */
2135
2136 /* Emit size functions we didn't inline. */
2137 finalize_size_functions ();
2138
2139 /* Mark alias targets necessary and emit diagnostics. */
2140 handle_alias_pairs ();
2141
2142 if (!quiet_flag)
2143 {
2144 fprintf (stderr, "\nAnalyzing compilation unit\n");
2145 fflush (stderr);
2146 }
2147
2148 if (flag_dump_passes)
2149 dump_passes ();
2150
2151 /* Gimplify and lower all functions, compute reachability and
2152 remove unreachable nodes. */
2153 analyze_functions ();
2154
2155 /* Mark alias targets necessary and emit diagnostics. */
2156 handle_alias_pairs ();
2157
2158 /* Gimplify and lower thunks. */
2159 analyze_functions ();
2160
2161 /* Finally drive the pass manager. */
2162 compile ();
2163
2164 timevar_pop (TV_CGRAPH);
2165 }
2166
2167
2168 #include "gt-cgraphunit.h"