]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraphunit.c
Autogenerated fixes of "->symbol." to "->"
[thirdparty/gcc.git] / gcc / cgraphunit.c
1 /* Driver of optimization process
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - cgraph_finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158 */
159
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "tm.h"
164 #include "tree.h"
165 #include "output.h"
166 #include "rtl.h"
167 #include "gimple.h"
168 #include "gimple-ssa.h"
169 #include "tree-cfg.h"
170 #include "tree-into-ssa.h"
171 #include "tree-ssa.h"
172 #include "tree-inline.h"
173 #include "langhooks.h"
174 #include "pointer-set.h"
175 #include "toplev.h"
176 #include "flags.h"
177 #include "ggc.h"
178 #include "debug.h"
179 #include "target.h"
180 #include "diagnostic.h"
181 #include "params.h"
182 #include "fibheap.h"
183 #include "intl.h"
184 #include "function.h"
185 #include "ipa-prop.h"
186 #include "tree-iterator.h"
187 #include "tree-pass.h"
188 #include "tree-dump.h"
189 #include "gimple-pretty-print.h"
190 #include "output.h"
191 #include "coverage.h"
192 #include "plugin.h"
193 #include "ipa-inline.h"
194 #include "ipa-utils.h"
195 #include "lto-streamer.h"
196 #include "except.h"
197 #include "cfgloop.h"
198 #include "regset.h" /* FIXME: For reg_obstack. */
199 #include "context.h"
200 #include "pass_manager.h"
201
202 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
203 secondary queue used during optimization to accommodate passes that
204 may generate new functions that need to be optimized and expanded. */
205 cgraph_node_set cgraph_new_nodes;
206
207 static void expand_all_functions (void);
208 static void mark_functions_to_output (void);
209 static void expand_function (struct cgraph_node *);
210 static void analyze_function (struct cgraph_node *);
211 static void handle_alias_pairs (void);
212
213 FILE *cgraph_dump_file;
214
215 /* Linked list of cgraph asm nodes. */
216 struct asm_node *asm_nodes;
217
218 /* Last node in cgraph_asm_nodes. */
219 static GTY(()) struct asm_node *asm_last_node;
220
221 /* Used for vtable lookup in thunk adjusting. */
222 static GTY (()) tree vtable_entry_type;
223
224 /* Determine if symbol DECL is needed. That is, visible to something
225 either outside this translation unit, something magic in the system
226 configury */
227 bool
228 decide_is_symbol_needed (symtab_node node)
229 {
230 tree decl = node->decl;
231
232 /* Double check that no one output the function into assembly file
233 early. */
234 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
235 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
236
237 if (!node->definition)
238 return false;
239
240 if (DECL_EXTERNAL (decl))
241 return false;
242
243 /* If the user told us it is used, then it must be so. */
244 if (node->force_output)
245 return true;
246
247 /* ABI forced symbols are needed when they are external. */
248 if (node->forced_by_abi && TREE_PUBLIC (decl))
249 return true;
250
251 /* Keep constructors, destructors and virtual functions. */
252 if (TREE_CODE (decl) == FUNCTION_DECL
253 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
254 return true;
255
256 /* Externally visible variables must be output. The exception is
257 COMDAT variables that must be output only when they are needed. */
258 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
259 return true;
260
261 return false;
262 }
263
264 /* Head of the queue of nodes to be processed while building callgraph */
265
266 static symtab_node first = (symtab_node)(void *)1;
267
268 /* Add NODE to queue starting at FIRST.
269 The queue is linked via AUX pointers and terminated by pointer to 1. */
270
271 static void
272 enqueue_node (symtab_node node)
273 {
274 if (node->aux)
275 return;
276 gcc_checking_assert (first);
277 node->aux = first;
278 first = node;
279 }
280
281 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
282 functions into callgraph in a way so they look like ordinary reachable
283 functions inserted into callgraph already at construction time. */
284
285 bool
286 cgraph_process_new_functions (void)
287 {
288 bool output = false;
289 tree fndecl;
290 struct cgraph_node *node;
291 cgraph_node_set_iterator csi;
292
293 if (!cgraph_new_nodes)
294 return false;
295 handle_alias_pairs ();
296 /* Note that this queue may grow as its being processed, as the new
297 functions may generate new ones. */
298 for (csi = csi_start (cgraph_new_nodes); !csi_end_p (csi); csi_next (&csi))
299 {
300 node = csi_node (csi);
301 fndecl = node->decl;
302 switch (cgraph_state)
303 {
304 case CGRAPH_STATE_CONSTRUCTION:
305 /* At construction time we just need to finalize function and move
306 it into reachable functions list. */
307
308 cgraph_finalize_function (fndecl, false);
309 output = true;
310 cgraph_call_function_insertion_hooks (node);
311 enqueue_node (node);
312 break;
313
314 case CGRAPH_STATE_IPA:
315 case CGRAPH_STATE_IPA_SSA:
316 /* When IPA optimization already started, do all essential
317 transformations that has been already performed on the whole
318 cgraph but not on this function. */
319
320 gimple_register_cfg_hooks ();
321 if (!node->analyzed)
322 analyze_function (node);
323 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
324 if (cgraph_state == CGRAPH_STATE_IPA_SSA
325 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
326 g->get_passes ()->execute_early_local_passes ();
327 else if (inline_summary_vec != NULL)
328 compute_inline_parameters (node, true);
329 free_dominance_info (CDI_POST_DOMINATORS);
330 free_dominance_info (CDI_DOMINATORS);
331 pop_cfun ();
332 cgraph_call_function_insertion_hooks (node);
333 break;
334
335 case CGRAPH_STATE_EXPANSION:
336 /* Functions created during expansion shall be compiled
337 directly. */
338 node->process = 0;
339 cgraph_call_function_insertion_hooks (node);
340 expand_function (node);
341 break;
342
343 default:
344 gcc_unreachable ();
345 break;
346 }
347 }
348 free_cgraph_node_set (cgraph_new_nodes);
349 cgraph_new_nodes = NULL;
350 return output;
351 }
352
353 /* As an GCC extension we allow redefinition of the function. The
354 semantics when both copies of bodies differ is not well defined.
355 We replace the old body with new body so in unit at a time mode
356 we always use new body, while in normal mode we may end up with
357 old body inlined into some functions and new body expanded and
358 inlined in others.
359
360 ??? It may make more sense to use one body for inlining and other
361 body for expanding the function but this is difficult to do. */
362
363 void
364 cgraph_reset_node (struct cgraph_node *node)
365 {
366 /* If node->process is set, then we have already begun whole-unit analysis.
367 This is *not* testing for whether we've already emitted the function.
368 That case can be sort-of legitimately seen with real function redefinition
369 errors. I would argue that the front end should never present us with
370 such a case, but don't enforce that for now. */
371 gcc_assert (!node->process);
372
373 /* Reset our data structures so we can analyze the function again. */
374 memset (&node->local, 0, sizeof (node->local));
375 memset (&node->global, 0, sizeof (node->global));
376 memset (&node->rtl, 0, sizeof (node->rtl));
377 node->analyzed = false;
378 node->definition = false;
379 node->alias = false;
380 node->weakref = false;
381 node->cpp_implicit_alias = false;
382
383 cgraph_node_remove_callees (node);
384 ipa_remove_all_references (&node->ref_list);
385 }
386
387 /* Return true when there are references to NODE. */
388
389 static bool
390 referred_to_p (symtab_node node)
391 {
392 struct ipa_ref *ref;
393
394 /* See if there are any references at all. */
395 if (ipa_ref_list_referring_iterate (&node->ref_list, 0, ref))
396 return true;
397 /* For functions check also calls. */
398 cgraph_node *cn = dyn_cast <cgraph_node> (node);
399 if (cn && cn->callers)
400 return true;
401 return false;
402 }
403
404 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
405 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
406 the garbage collector run at the moment. We would need to either create
407 a new GC context, or just not compile right now. */
408
409 void
410 cgraph_finalize_function (tree decl, bool no_collect)
411 {
412 struct cgraph_node *node = cgraph_get_create_node (decl);
413
414 if (node->definition)
415 {
416 /* Nested functions should only be defined once. */
417 gcc_assert (!DECL_CONTEXT (decl)
418 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
419 cgraph_reset_node (node);
420 node->local.redefined_extern_inline = true;
421 }
422
423 notice_global_symbol (decl);
424 node->definition = true;
425 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
426
427 /* With -fkeep-inline-functions we are keeping all inline functions except
428 for extern inline ones. */
429 if (flag_keep_inline_functions
430 && DECL_DECLARED_INLINE_P (decl)
431 && !DECL_EXTERNAL (decl)
432 && !DECL_DISREGARD_INLINE_LIMITS (decl))
433 node->force_output = 1;
434
435 /* When not optimizing, also output the static functions. (see
436 PR24561), but don't do so for always_inline functions, functions
437 declared inline and nested functions. These were optimized out
438 in the original implementation and it is unclear whether we want
439 to change the behavior here. */
440 if ((!optimize
441 && !node->cpp_implicit_alias
442 && !DECL_DISREGARD_INLINE_LIMITS (decl)
443 && !DECL_DECLARED_INLINE_P (decl)
444 && !(DECL_CONTEXT (decl)
445 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
446 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
447 node->force_output = 1;
448
449 /* If we've not yet emitted decl, tell the debug info about it. */
450 if (!TREE_ASM_WRITTEN (decl))
451 (*debug_hooks->deferred_inline_function) (decl);
452
453 /* Possibly warn about unused parameters. */
454 if (warn_unused_parameter)
455 do_warn_unused_parameter (decl);
456
457 if (!no_collect)
458 ggc_collect ();
459
460 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
461 && (decide_is_symbol_needed (node)
462 || referred_to_p (node)))
463 enqueue_node (node);
464 }
465
466 /* Add the function FNDECL to the call graph.
467 Unlike cgraph_finalize_function, this function is intended to be used
468 by middle end and allows insertion of new function at arbitrary point
469 of compilation. The function can be either in high, low or SSA form
470 GIMPLE.
471
472 The function is assumed to be reachable and have address taken (so no
473 API breaking optimizations are performed on it).
474
475 Main work done by this function is to enqueue the function for later
476 processing to avoid need the passes to be re-entrant. */
477
478 void
479 cgraph_add_new_function (tree fndecl, bool lowered)
480 {
481 gcc::pass_manager *passes = g->get_passes ();
482 struct cgraph_node *node;
483 switch (cgraph_state)
484 {
485 case CGRAPH_STATE_PARSING:
486 cgraph_finalize_function (fndecl, false);
487 break;
488 case CGRAPH_STATE_CONSTRUCTION:
489 /* Just enqueue function to be processed at nearest occurrence. */
490 node = cgraph_create_node (fndecl);
491 if (lowered)
492 node->lowered = true;
493 if (!cgraph_new_nodes)
494 cgraph_new_nodes = cgraph_node_set_new ();
495 cgraph_node_set_add (cgraph_new_nodes, node);
496 break;
497
498 case CGRAPH_STATE_IPA:
499 case CGRAPH_STATE_IPA_SSA:
500 case CGRAPH_STATE_EXPANSION:
501 /* Bring the function into finalized state and enqueue for later
502 analyzing and compilation. */
503 node = cgraph_get_create_node (fndecl);
504 node->local.local = false;
505 node->definition = true;
506 node->force_output = true;
507 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
508 {
509 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
510 gimple_register_cfg_hooks ();
511 bitmap_obstack_initialize (NULL);
512 execute_pass_list (passes->all_lowering_passes);
513 passes->execute_early_local_passes ();
514 bitmap_obstack_release (NULL);
515 pop_cfun ();
516
517 lowered = true;
518 }
519 if (lowered)
520 node->lowered = true;
521 if (!cgraph_new_nodes)
522 cgraph_new_nodes = cgraph_node_set_new ();
523 cgraph_node_set_add (cgraph_new_nodes, node);
524 break;
525
526 case CGRAPH_STATE_FINISHED:
527 /* At the very end of compilation we have to do all the work up
528 to expansion. */
529 node = cgraph_create_node (fndecl);
530 if (lowered)
531 node->lowered = true;
532 node->definition = true;
533 analyze_function (node);
534 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
535 gimple_register_cfg_hooks ();
536 bitmap_obstack_initialize (NULL);
537 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
538 g->get_passes ()->execute_early_local_passes ();
539 bitmap_obstack_release (NULL);
540 pop_cfun ();
541 expand_function (node);
542 break;
543
544 default:
545 gcc_unreachable ();
546 }
547
548 /* Set a personality if required and we already passed EH lowering. */
549 if (lowered
550 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
551 == eh_personality_lang))
552 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
553 }
554
555 /* Add a top-level asm statement to the list. */
556
557 struct asm_node *
558 add_asm_node (tree asm_str)
559 {
560 struct asm_node *node;
561
562 node = ggc_alloc_cleared_asm_node ();
563 node->asm_str = asm_str;
564 node->order = symtab_order++;
565 node->next = NULL;
566 if (asm_nodes == NULL)
567 asm_nodes = node;
568 else
569 asm_last_node->next = node;
570 asm_last_node = node;
571 return node;
572 }
573
574 /* Output all asm statements we have stored up to be output. */
575
576 static void
577 output_asm_statements (void)
578 {
579 struct asm_node *can;
580
581 if (seen_error ())
582 return;
583
584 for (can = asm_nodes; can; can = can->next)
585 assemble_asm (can->asm_str);
586 asm_nodes = NULL;
587 }
588
589 /* Analyze the function scheduled to be output. */
590 static void
591 analyze_function (struct cgraph_node *node)
592 {
593 tree decl = node->decl;
594 location_t saved_loc = input_location;
595 input_location = DECL_SOURCE_LOCATION (decl);
596
597 if (node->thunk.thunk_p)
598 {
599 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
600 NULL, 0, CGRAPH_FREQ_BASE);
601 if (!expand_thunk (node, false))
602 {
603 node->thunk.alias = NULL;
604 node->analyzed = true;
605 return;
606 }
607 node->thunk.alias = NULL;
608 }
609 if (node->alias)
610 symtab_resolve_alias
611 (node, cgraph_get_node (node->alias_target));
612 else if (node->dispatcher_function)
613 {
614 /* Generate the dispatcher body of multi-versioned functions. */
615 struct cgraph_function_version_info *dispatcher_version_info
616 = get_cgraph_node_version (node);
617 if (dispatcher_version_info != NULL
618 && (dispatcher_version_info->dispatcher_resolver
619 == NULL_TREE))
620 {
621 tree resolver = NULL_TREE;
622 gcc_assert (targetm.generate_version_dispatcher_body);
623 resolver = targetm.generate_version_dispatcher_body (node);
624 gcc_assert (resolver != NULL_TREE);
625 }
626 }
627 else
628 {
629 push_cfun (DECL_STRUCT_FUNCTION (decl));
630
631 assign_assembler_name_if_neeeded (node->decl);
632
633 /* Make sure to gimplify bodies only once. During analyzing a
634 function we lower it, which will require gimplified nested
635 functions, so we can end up here with an already gimplified
636 body. */
637 if (!gimple_has_body_p (decl))
638 gimplify_function_tree (decl);
639 dump_function (TDI_generic, decl);
640
641 /* Lower the function. */
642 if (!node->lowered)
643 {
644 if (node->nested)
645 lower_nested_functions (node->decl);
646 gcc_assert (!node->nested);
647
648 gimple_register_cfg_hooks ();
649 bitmap_obstack_initialize (NULL);
650 execute_pass_list (g->get_passes ()->all_lowering_passes);
651 free_dominance_info (CDI_POST_DOMINATORS);
652 free_dominance_info (CDI_DOMINATORS);
653 compact_blocks ();
654 bitmap_obstack_release (NULL);
655 node->lowered = true;
656 }
657
658 pop_cfun ();
659 }
660 node->analyzed = true;
661
662 input_location = saved_loc;
663 }
664
665 /* C++ frontend produce same body aliases all over the place, even before PCH
666 gets streamed out. It relies on us linking the aliases with their function
667 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
668 first produce aliases without links, but once C++ FE is sure he won't sream
669 PCH we build the links via this function. */
670
671 void
672 cgraph_process_same_body_aliases (void)
673 {
674 symtab_node node;
675 FOR_EACH_SYMBOL (node)
676 if (node->cpp_implicit_alias && !node->analyzed)
677 symtab_resolve_alias
678 (node,
679 TREE_CODE (node->alias_target) == VAR_DECL
680 ? (symtab_node)varpool_node_for_decl (node->alias_target)
681 : (symtab_node)cgraph_get_create_node (node->alias_target));
682 cpp_implicit_aliases_done = true;
683 }
684
685 /* Process attributes common for vars and functions. */
686
687 static void
688 process_common_attributes (tree decl)
689 {
690 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
691
692 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
693 {
694 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
695 "%<weakref%> attribute should be accompanied with"
696 " an %<alias%> attribute");
697 DECL_WEAK (decl) = 0;
698 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
699 DECL_ATTRIBUTES (decl));
700 }
701 }
702
703 /* Look for externally_visible and used attributes and mark cgraph nodes
704 accordingly.
705
706 We cannot mark the nodes at the point the attributes are processed (in
707 handle_*_attribute) because the copy of the declarations available at that
708 point may not be canonical. For example, in:
709
710 void f();
711 void f() __attribute__((used));
712
713 the declaration we see in handle_used_attribute will be the second
714 declaration -- but the front end will subsequently merge that declaration
715 with the original declaration and discard the second declaration.
716
717 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
718
719 void f() {}
720 void f() __attribute__((externally_visible));
721
722 is valid.
723
724 So, we walk the nodes at the end of the translation unit, applying the
725 attributes at that point. */
726
727 static void
728 process_function_and_variable_attributes (struct cgraph_node *first,
729 struct varpool_node *first_var)
730 {
731 struct cgraph_node *node;
732 struct varpool_node *vnode;
733
734 for (node = cgraph_first_function (); node != first;
735 node = cgraph_next_function (node))
736 {
737 tree decl = node->decl;
738 if (DECL_PRESERVE_P (decl))
739 cgraph_mark_force_output_node (node);
740 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
741 {
742 if (! TREE_PUBLIC (node->decl))
743 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
744 "%<externally_visible%>"
745 " attribute have effect only on public objects");
746 }
747 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
748 && (node->definition && !node->alias))
749 {
750 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
751 "%<weakref%> attribute ignored"
752 " because function is defined");
753 DECL_WEAK (decl) = 0;
754 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
755 DECL_ATTRIBUTES (decl));
756 }
757
758 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
759 && !DECL_DECLARED_INLINE_P (decl)
760 /* redefining extern inline function makes it DECL_UNINLINABLE. */
761 && !DECL_UNINLINABLE (decl))
762 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
763 "always_inline function might not be inlinable");
764
765 process_common_attributes (decl);
766 }
767 for (vnode = varpool_first_variable (); vnode != first_var;
768 vnode = varpool_next_variable (vnode))
769 {
770 tree decl = vnode->decl;
771 if (DECL_EXTERNAL (decl)
772 && DECL_INITIAL (decl))
773 varpool_finalize_decl (decl);
774 if (DECL_PRESERVE_P (decl))
775 vnode->force_output = true;
776 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
777 {
778 if (! TREE_PUBLIC (vnode->decl))
779 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
780 "%<externally_visible%>"
781 " attribute have effect only on public objects");
782 }
783 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
784 && vnode->definition
785 && DECL_INITIAL (decl))
786 {
787 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
788 "%<weakref%> attribute ignored"
789 " because variable is initialized");
790 DECL_WEAK (decl) = 0;
791 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
792 DECL_ATTRIBUTES (decl));
793 }
794 process_common_attributes (decl);
795 }
796 }
797
798 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
799 middle end to output the variable to asm file, if needed or externally
800 visible. */
801
802 void
803 varpool_finalize_decl (tree decl)
804 {
805 struct varpool_node *node = varpool_node_for_decl (decl);
806
807 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
808
809 if (node->definition)
810 return;
811 notice_global_symbol (decl);
812 node->definition = true;
813 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
814 /* Traditionally we do not eliminate static variables when not
815 optimizing and when not doing toplevel reoder. */
816 || (!flag_toplevel_reorder && !DECL_COMDAT (node->decl)
817 && !DECL_ARTIFICIAL (node->decl)))
818 node->force_output = true;
819
820 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
821 && (decide_is_symbol_needed (node)
822 || referred_to_p (node)))
823 enqueue_node (node);
824 if (cgraph_state >= CGRAPH_STATE_IPA_SSA)
825 varpool_analyze_node (node);
826 /* Some frontends produce various interface variables after compilation
827 finished. */
828 if (cgraph_state == CGRAPH_STATE_FINISHED)
829 varpool_assemble_decl (node);
830 }
831
832 /* EDGE is an polymorphic call. Mark all possible targets as reachable
833 and if there is only one target, perform trivial devirtualization.
834 REACHABLE_CALL_TARGETS collects target lists we already walked to
835 avoid udplicate work. */
836
837 static void
838 walk_polymorphic_call_targets (pointer_set_t *reachable_call_targets,
839 struct cgraph_edge *edge)
840 {
841 unsigned int i;
842 void *cache_token;
843 bool final;
844 vec <cgraph_node *>targets
845 = possible_polymorphic_call_targets
846 (edge, &final, &cache_token);
847
848 if (!pointer_set_insert (reachable_call_targets,
849 cache_token))
850 {
851 if (cgraph_dump_file)
852 dump_possible_polymorphic_call_targets
853 (cgraph_dump_file, edge);
854
855 for (i = 0; i < targets.length (); i++)
856 {
857 /* Do not bother to mark virtual methods in anonymous namespace;
858 either we will find use of virtual table defining it, or it is
859 unused. */
860 if (targets[i]->definition
861 && TREE_CODE
862 (TREE_TYPE (targets[i]->decl))
863 == METHOD_TYPE
864 && !type_in_anonymous_namespace_p
865 (method_class_type
866 (TREE_TYPE (targets[i]->decl))))
867 enqueue_node (targets[i]);
868 }
869 }
870
871 /* Very trivial devirtualization; when the type is
872 final or anonymous (so we know all its derivation)
873 and there is only one possible virtual call target,
874 make the edge direct. */
875 if (final)
876 {
877 if (targets.length () <= 1)
878 {
879 cgraph_node *target;
880 if (targets.length () == 1)
881 target = targets[0];
882 else
883 target = cgraph_get_create_node
884 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
885
886 if (cgraph_dump_file)
887 {
888 fprintf (cgraph_dump_file,
889 "Devirtualizing call: ");
890 print_gimple_stmt (cgraph_dump_file,
891 edge->call_stmt, 0,
892 TDF_SLIM);
893 }
894 cgraph_make_edge_direct (edge, target);
895 cgraph_redirect_edge_call_stmt_to_callee (edge);
896 if (cgraph_dump_file)
897 {
898 fprintf (cgraph_dump_file,
899 "Devirtualized as: ");
900 print_gimple_stmt (cgraph_dump_file,
901 edge->call_stmt, 0,
902 TDF_SLIM);
903 }
904 }
905 }
906 }
907
908
909 /* Discover all functions and variables that are trivially needed, analyze
910 them as well as all functions and variables referred by them */
911
912 static void
913 analyze_functions (void)
914 {
915 /* Keep track of already processed nodes when called multiple times for
916 intermodule optimization. */
917 static struct cgraph_node *first_analyzed;
918 struct cgraph_node *first_handled = first_analyzed;
919 static struct varpool_node *first_analyzed_var;
920 struct varpool_node *first_handled_var = first_analyzed_var;
921 struct pointer_set_t *reachable_call_targets = pointer_set_create ();
922
923 symtab_node node, next;
924 int i;
925 struct ipa_ref *ref;
926 bool changed = true;
927 location_t saved_loc = input_location;
928
929 bitmap_obstack_initialize (NULL);
930 cgraph_state = CGRAPH_STATE_CONSTRUCTION;
931 input_location = UNKNOWN_LOCATION;
932
933 /* Ugly, but the fixup can not happen at a time same body alias is created;
934 C++ FE is confused about the COMDAT groups being right. */
935 if (cpp_implicit_aliases_done)
936 FOR_EACH_SYMBOL (node)
937 if (node->cpp_implicit_alias)
938 fixup_same_cpp_alias_visibility (node, symtab_alias_target (node));
939 if (optimize && flag_devirtualize)
940 build_type_inheritance_graph ();
941
942 /* Analysis adds static variables that in turn adds references to new functions.
943 So we need to iterate the process until it stabilize. */
944 while (changed)
945 {
946 changed = false;
947 process_function_and_variable_attributes (first_analyzed,
948 first_analyzed_var);
949
950 /* First identify the trivially needed symbols. */
951 for (node = symtab_nodes;
952 node != first_analyzed
953 && node != first_analyzed_var; node = node->next)
954 {
955 if (decide_is_symbol_needed (node))
956 {
957 enqueue_node (node);
958 if (!changed && cgraph_dump_file)
959 fprintf (cgraph_dump_file, "Trivially needed symbols:");
960 changed = true;
961 if (cgraph_dump_file)
962 fprintf (cgraph_dump_file, " %s", symtab_node_asm_name (node));
963 if (!changed && cgraph_dump_file)
964 fprintf (cgraph_dump_file, "\n");
965 }
966 if (node == first_analyzed
967 || node == first_analyzed_var)
968 break;
969 }
970 cgraph_process_new_functions ();
971 first_analyzed_var = varpool_first_variable ();
972 first_analyzed = cgraph_first_function ();
973
974 if (changed && dump_file)
975 fprintf (cgraph_dump_file, "\n");
976
977 /* Lower representation, build callgraph edges and references for all trivially
978 needed symbols and all symbols referred by them. */
979 while (first != (symtab_node)(void *)1)
980 {
981 changed = true;
982 node = first;
983 first = (symtab_node)first->aux;
984 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
985 if (cnode && cnode->definition)
986 {
987 struct cgraph_edge *edge;
988 tree decl = cnode->decl;
989
990 /* ??? It is possible to create extern inline function
991 and later using weak alias attribute to kill its body.
992 See gcc.c-torture/compile/20011119-1.c */
993 if (!DECL_STRUCT_FUNCTION (decl)
994 && !cnode->alias
995 && !cnode->thunk.thunk_p
996 && !cnode->dispatcher_function)
997 {
998 cgraph_reset_node (cnode);
999 cnode->local.redefined_extern_inline = true;
1000 continue;
1001 }
1002
1003 if (!cnode->analyzed)
1004 analyze_function (cnode);
1005
1006 for (edge = cnode->callees; edge; edge = edge->next_callee)
1007 if (edge->callee->definition)
1008 enqueue_node (edge->callee);
1009 if (optimize && flag_devirtualize)
1010 {
1011 struct cgraph_edge *next;
1012
1013 for (edge = cnode->indirect_calls; edge; edge = next)
1014 {
1015 next = edge->next_callee;
1016 if (edge->indirect_info->polymorphic)
1017 walk_polymorphic_call_targets (reachable_call_targets,
1018 edge);
1019 }
1020 }
1021
1022 /* If decl is a clone of an abstract function,
1023 mark that abstract function so that we don't release its body.
1024 The DECL_INITIAL() of that abstract function declaration
1025 will be later needed to output debug info. */
1026 if (DECL_ABSTRACT_ORIGIN (decl))
1027 {
1028 struct cgraph_node *origin_node
1029 = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
1030 origin_node->used_as_abstract_origin = true;
1031 }
1032 }
1033 else
1034 {
1035 varpool_node *vnode = dyn_cast <varpool_node> (node);
1036 if (vnode && vnode->definition && !vnode->analyzed)
1037 varpool_analyze_node (vnode);
1038 }
1039
1040 if (node->same_comdat_group)
1041 {
1042 symtab_node next;
1043 for (next = node->same_comdat_group;
1044 next != node;
1045 next = next->same_comdat_group)
1046 enqueue_node (next);
1047 }
1048 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
1049 if (ref->referred->definition)
1050 enqueue_node (ref->referred);
1051 cgraph_process_new_functions ();
1052 }
1053 }
1054 if (optimize && flag_devirtualize)
1055 update_type_inheritance_graph ();
1056
1057 /* Collect entry points to the unit. */
1058 if (cgraph_dump_file)
1059 {
1060 fprintf (cgraph_dump_file, "\n\nInitial ");
1061 dump_symtab (cgraph_dump_file);
1062 }
1063
1064 if (cgraph_dump_file)
1065 fprintf (cgraph_dump_file, "\nRemoving unused symbols:");
1066
1067 for (node = symtab_nodes;
1068 node != first_handled
1069 && node != first_handled_var; node = next)
1070 {
1071 next = node->next;
1072 if (!node->aux && !referred_to_p (node))
1073 {
1074 if (cgraph_dump_file)
1075 fprintf (cgraph_dump_file, " %s", symtab_node_name (node));
1076 symtab_remove_node (node);
1077 continue;
1078 }
1079 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
1080 {
1081 tree decl = node->decl;
1082
1083 if (cnode->definition && !gimple_has_body_p (decl)
1084 && !cnode->alias
1085 && !cnode->thunk.thunk_p)
1086 cgraph_reset_node (cnode);
1087
1088 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1089 || cnode->alias
1090 || gimple_has_body_p (decl));
1091 gcc_assert (cnode->analyzed == cnode->definition);
1092 }
1093 node->aux = NULL;
1094 }
1095 for (;node; node = node->next)
1096 node->aux = NULL;
1097 first_analyzed = cgraph_first_function ();
1098 first_analyzed_var = varpool_first_variable ();
1099 if (cgraph_dump_file)
1100 {
1101 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1102 dump_symtab (cgraph_dump_file);
1103 }
1104 bitmap_obstack_release (NULL);
1105 pointer_set_destroy (reachable_call_targets);
1106 ggc_collect ();
1107 /* Initialize assembler name hash, in particular we want to trigger C++
1108 mangling and same body alias creation before we free DECL_ARGUMENTS
1109 used by it. */
1110 if (!seen_error ())
1111 symtab_initialize_asm_name_hash ();
1112
1113 input_location = saved_loc;
1114 }
1115
1116 /* Translate the ugly representation of aliases as alias pairs into nice
1117 representation in callgraph. We don't handle all cases yet,
1118 unfortunately. */
1119
1120 static void
1121 handle_alias_pairs (void)
1122 {
1123 alias_pair *p;
1124 unsigned i;
1125
1126 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1127 {
1128 symtab_node target_node = symtab_node_for_asm (p->target);
1129
1130 /* Weakrefs with target not defined in current unit are easy to handle:
1131 they behave just as external variables except we need to note the
1132 alias flag to later output the weakref pseudo op into asm file. */
1133 if (!target_node
1134 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1135 {
1136 symtab_node node = symtab_get_node (p->decl);
1137 if (node)
1138 {
1139 node->alias_target = p->target;
1140 node->weakref = true;
1141 node->alias = true;
1142 }
1143 alias_pairs->unordered_remove (i);
1144 continue;
1145 }
1146 else if (!target_node)
1147 {
1148 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1149 symtab_node node = symtab_get_node (p->decl);
1150 if (node)
1151 node->alias = false;
1152 alias_pairs->unordered_remove (i);
1153 continue;
1154 }
1155
1156 if (DECL_EXTERNAL (target_node->decl)
1157 /* We use local aliases for C++ thunks to force the tailcall
1158 to bind locally. This is a hack - to keep it working do
1159 the following (which is not strictly correct). */
1160 && (! TREE_CODE (target_node->decl) == FUNCTION_DECL
1161 || ! DECL_VIRTUAL_P (target_node->decl))
1162 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1163 {
1164 error ("%q+D aliased to external symbol %qE",
1165 p->decl, p->target);
1166 }
1167
1168 if (TREE_CODE (p->decl) == FUNCTION_DECL
1169 && target_node && is_a <cgraph_node> (target_node))
1170 {
1171 struct cgraph_node *src_node = cgraph_get_node (p->decl);
1172 if (src_node && src_node->definition)
1173 cgraph_reset_node (src_node);
1174 cgraph_create_function_alias (p->decl, target_node->decl);
1175 alias_pairs->unordered_remove (i);
1176 }
1177 else if (TREE_CODE (p->decl) == VAR_DECL
1178 && target_node && is_a <varpool_node> (target_node))
1179 {
1180 varpool_create_variable_alias (p->decl, target_node->decl);
1181 alias_pairs->unordered_remove (i);
1182 }
1183 else
1184 {
1185 error ("%q+D alias in between function and variable is not supported",
1186 p->decl);
1187 warning (0, "%q+D aliased declaration",
1188 target_node->decl);
1189 alias_pairs->unordered_remove (i);
1190 }
1191 }
1192 vec_free (alias_pairs);
1193 }
1194
1195
1196 /* Figure out what functions we want to assemble. */
1197
1198 static void
1199 mark_functions_to_output (void)
1200 {
1201 struct cgraph_node *node;
1202 #ifdef ENABLE_CHECKING
1203 bool check_same_comdat_groups = false;
1204
1205 FOR_EACH_FUNCTION (node)
1206 gcc_assert (!node->process);
1207 #endif
1208
1209 FOR_EACH_FUNCTION (node)
1210 {
1211 tree decl = node->decl;
1212
1213 gcc_assert (!node->process || node->same_comdat_group);
1214 if (node->process)
1215 continue;
1216
1217 /* We need to output all local functions that are used and not
1218 always inlined, as well as those that are reachable from
1219 outside the current compilation unit. */
1220 if (node->analyzed
1221 && !node->thunk.thunk_p
1222 && !node->alias
1223 && !node->global.inlined_to
1224 && !TREE_ASM_WRITTEN (decl)
1225 && !DECL_EXTERNAL (decl))
1226 {
1227 node->process = 1;
1228 if (node->same_comdat_group)
1229 {
1230 struct cgraph_node *next;
1231 for (next = cgraph (node->same_comdat_group);
1232 next != node;
1233 next = cgraph (next->same_comdat_group))
1234 if (!next->thunk.thunk_p && !next->alias)
1235 next->process = 1;
1236 }
1237 }
1238 else if (node->same_comdat_group)
1239 {
1240 #ifdef ENABLE_CHECKING
1241 check_same_comdat_groups = true;
1242 #endif
1243 }
1244 else
1245 {
1246 /* We should've reclaimed all functions that are not needed. */
1247 #ifdef ENABLE_CHECKING
1248 if (!node->global.inlined_to
1249 && gimple_has_body_p (decl)
1250 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1251 are inside partition, we can end up not removing the body since we no longer
1252 have analyzed node pointing to it. */
1253 && !node->in_other_partition
1254 && !node->alias
1255 && !node->clones
1256 && !DECL_EXTERNAL (decl))
1257 {
1258 dump_cgraph_node (stderr, node);
1259 internal_error ("failed to reclaim unneeded function");
1260 }
1261 #endif
1262 gcc_assert (node->global.inlined_to
1263 || !gimple_has_body_p (decl)
1264 || node->in_other_partition
1265 || node->clones
1266 || DECL_ARTIFICIAL (decl)
1267 || DECL_EXTERNAL (decl));
1268
1269 }
1270
1271 }
1272 #ifdef ENABLE_CHECKING
1273 if (check_same_comdat_groups)
1274 FOR_EACH_FUNCTION (node)
1275 if (node->same_comdat_group && !node->process)
1276 {
1277 tree decl = node->decl;
1278 if (!node->global.inlined_to
1279 && gimple_has_body_p (decl)
1280 /* FIXME: in an ltrans unit when the offline copy is outside a
1281 partition but inline copies are inside a partition, we can
1282 end up not removing the body since we no longer have an
1283 analyzed node pointing to it. */
1284 && !node->in_other_partition
1285 && !node->clones
1286 && !DECL_EXTERNAL (decl))
1287 {
1288 dump_cgraph_node (stderr, node);
1289 internal_error ("failed to reclaim unneeded function in same "
1290 "comdat group");
1291 }
1292 }
1293 #endif
1294 }
1295
1296 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1297 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1298
1299 Set current_function_decl and cfun to newly constructed empty function body.
1300 return basic block in the function body. */
1301
1302 basic_block
1303 init_lowered_empty_function (tree decl, bool in_ssa)
1304 {
1305 basic_block bb;
1306
1307 current_function_decl = decl;
1308 allocate_struct_function (decl, false);
1309 gimple_register_cfg_hooks ();
1310 init_empty_tree_cfg ();
1311
1312 if (in_ssa)
1313 {
1314 init_tree_ssa (cfun);
1315 init_ssa_operands (cfun);
1316 cfun->gimple_df->in_ssa_p = true;
1317 cfun->curr_properties |= PROP_ssa;
1318 }
1319
1320 DECL_INITIAL (decl) = make_node (BLOCK);
1321
1322 DECL_SAVED_TREE (decl) = error_mark_node;
1323 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1324 | PROP_cfg | PROP_loops);
1325
1326 set_loops_for_fn (cfun, ggc_alloc_cleared_loops ());
1327 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1328 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1329
1330 /* Create BB for body of the function and connect it properly. */
1331 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1332 make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1333 make_edge (bb, EXIT_BLOCK_PTR, 0);
1334 add_bb_to_loop (bb, ENTRY_BLOCK_PTR->loop_father);
1335
1336 return bb;
1337 }
1338
1339 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1340 offset indicated by VIRTUAL_OFFSET, if that is
1341 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1342 zero for a result adjusting thunk. */
1343
1344 static tree
1345 thunk_adjust (gimple_stmt_iterator * bsi,
1346 tree ptr, bool this_adjusting,
1347 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1348 {
1349 gimple stmt;
1350 tree ret;
1351
1352 if (this_adjusting
1353 && fixed_offset != 0)
1354 {
1355 stmt = gimple_build_assign
1356 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1357 ptr,
1358 fixed_offset));
1359 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1360 }
1361
1362 /* If there's a virtual offset, look up that value in the vtable and
1363 adjust the pointer again. */
1364 if (virtual_offset)
1365 {
1366 tree vtabletmp;
1367 tree vtabletmp2;
1368 tree vtabletmp3;
1369
1370 if (!vtable_entry_type)
1371 {
1372 tree vfunc_type = make_node (FUNCTION_TYPE);
1373 TREE_TYPE (vfunc_type) = integer_type_node;
1374 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1375 layout_type (vfunc_type);
1376
1377 vtable_entry_type = build_pointer_type (vfunc_type);
1378 }
1379
1380 vtabletmp =
1381 create_tmp_reg (build_pointer_type
1382 (build_pointer_type (vtable_entry_type)), "vptr");
1383
1384 /* The vptr is always at offset zero in the object. */
1385 stmt = gimple_build_assign (vtabletmp,
1386 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1387 ptr));
1388 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1389
1390 /* Form the vtable address. */
1391 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1392 "vtableaddr");
1393 stmt = gimple_build_assign (vtabletmp2,
1394 build_simple_mem_ref (vtabletmp));
1395 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1396
1397 /* Find the entry with the vcall offset. */
1398 stmt = gimple_build_assign (vtabletmp2,
1399 fold_build_pointer_plus_loc (input_location,
1400 vtabletmp2,
1401 virtual_offset));
1402 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1403
1404 /* Get the offset itself. */
1405 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1406 "vcalloffset");
1407 stmt = gimple_build_assign (vtabletmp3,
1408 build_simple_mem_ref (vtabletmp2));
1409 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1410
1411 /* Adjust the `this' pointer. */
1412 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1413 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1414 GSI_CONTINUE_LINKING);
1415 }
1416
1417 if (!this_adjusting
1418 && fixed_offset != 0)
1419 /* Adjust the pointer by the constant. */
1420 {
1421 tree ptrtmp;
1422
1423 if (TREE_CODE (ptr) == VAR_DECL)
1424 ptrtmp = ptr;
1425 else
1426 {
1427 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1428 stmt = gimple_build_assign (ptrtmp, ptr);
1429 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1430 }
1431 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1432 ptrtmp, fixed_offset);
1433 }
1434
1435 /* Emit the statement and gimplify the adjustment expression. */
1436 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1437 stmt = gimple_build_assign (ret, ptr);
1438 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1439
1440 return ret;
1441 }
1442
1443 /* Expand thunk NODE to gimple if possible.
1444 When OUTPUT_ASM_THUNK is true, also produce assembler for
1445 thunks that are not lowered. */
1446
1447 bool
1448 expand_thunk (struct cgraph_node *node, bool output_asm_thunks)
1449 {
1450 bool this_adjusting = node->thunk.this_adjusting;
1451 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1452 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1453 tree virtual_offset = NULL;
1454 tree alias = node->callees->callee->decl;
1455 tree thunk_fndecl = node->decl;
1456 tree a;
1457
1458
1459 if (this_adjusting
1460 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1461 virtual_value, alias))
1462 {
1463 const char *fnname;
1464 tree fn_block;
1465 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1466
1467 if (!output_asm_thunks)
1468 return false;
1469
1470 if (in_lto_p)
1471 cgraph_get_body (node);
1472 a = DECL_ARGUMENTS (thunk_fndecl);
1473
1474 current_function_decl = thunk_fndecl;
1475
1476 /* Ensure thunks are emitted in their correct sections. */
1477 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1478
1479 DECL_RESULT (thunk_fndecl)
1480 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1481 RESULT_DECL, 0, restype);
1482 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1483 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1484
1485 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1486 create one. */
1487 fn_block = make_node (BLOCK);
1488 BLOCK_VARS (fn_block) = a;
1489 DECL_INITIAL (thunk_fndecl) = fn_block;
1490 init_function_start (thunk_fndecl);
1491 cfun->is_thunk = 1;
1492 insn_locations_init ();
1493 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1494 prologue_location = curr_insn_location ();
1495 assemble_start_function (thunk_fndecl, fnname);
1496
1497 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1498 fixed_offset, virtual_value, alias);
1499
1500 assemble_end_function (thunk_fndecl, fnname);
1501 insn_locations_finalize ();
1502 init_insn_lengths ();
1503 free_after_compilation (cfun);
1504 set_cfun (NULL);
1505 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1506 node->thunk.thunk_p = false;
1507 node->analyzed = false;
1508 }
1509 else
1510 {
1511 tree restype;
1512 basic_block bb, then_bb, else_bb, return_bb;
1513 gimple_stmt_iterator bsi;
1514 int nargs = 0;
1515 tree arg;
1516 int i;
1517 tree resdecl;
1518 tree restmp = NULL;
1519 vec<tree> vargs;
1520
1521 gimple call;
1522 gimple ret;
1523
1524 if (in_lto_p)
1525 cgraph_get_body (node);
1526 a = DECL_ARGUMENTS (thunk_fndecl);
1527
1528 current_function_decl = thunk_fndecl;
1529
1530 /* Ensure thunks are emitted in their correct sections. */
1531 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1532
1533 DECL_IGNORED_P (thunk_fndecl) = 1;
1534 bitmap_obstack_initialize (NULL);
1535
1536 if (node->thunk.virtual_offset_p)
1537 virtual_offset = size_int (virtual_value);
1538
1539 /* Build the return declaration for the function. */
1540 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1541 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1542 {
1543 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1544 DECL_ARTIFICIAL (resdecl) = 1;
1545 DECL_IGNORED_P (resdecl) = 1;
1546 DECL_RESULT (thunk_fndecl) = resdecl;
1547 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1548 }
1549 else
1550 resdecl = DECL_RESULT (thunk_fndecl);
1551
1552 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl, true);
1553
1554 bsi = gsi_start_bb (bb);
1555
1556 /* Build call to the function being thunked. */
1557 if (!VOID_TYPE_P (restype))
1558 {
1559 if (DECL_BY_REFERENCE (resdecl))
1560 restmp = gimple_fold_indirect_ref (resdecl);
1561 else if (!is_gimple_reg_type (restype))
1562 {
1563 restmp = resdecl;
1564 add_local_decl (cfun, restmp);
1565 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1566 }
1567 else
1568 restmp = create_tmp_reg (restype, "retval");
1569 }
1570
1571 for (arg = a; arg; arg = DECL_CHAIN (arg))
1572 nargs++;
1573 vargs.create (nargs);
1574 if (this_adjusting)
1575 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1576 virtual_offset));
1577 else if (nargs)
1578 vargs.quick_push (a);
1579
1580 if (nargs)
1581 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1582 vargs.quick_push (arg);
1583 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1584 node->callees->call_stmt = call;
1585 vargs.release ();
1586 gimple_call_set_from_thunk (call, true);
1587 if (restmp)
1588 {
1589 gimple_call_set_lhs (call, restmp);
1590 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1591 TREE_TYPE (TREE_TYPE (alias))));
1592 }
1593 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1594 if (!(gimple_call_flags (call) & ECF_NORETURN))
1595 {
1596 if (restmp && !this_adjusting
1597 && (fixed_offset || virtual_offset))
1598 {
1599 tree true_label = NULL_TREE;
1600
1601 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1602 {
1603 gimple stmt;
1604 /* If the return type is a pointer, we need to
1605 protect against NULL. We know there will be an
1606 adjustment, because that's why we're emitting a
1607 thunk. */
1608 then_bb = create_basic_block (NULL, (void *) 0, bb);
1609 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1610 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1611 add_bb_to_loop (then_bb, bb->loop_father);
1612 add_bb_to_loop (return_bb, bb->loop_father);
1613 add_bb_to_loop (else_bb, bb->loop_father);
1614 remove_edge (single_succ_edge (bb));
1615 true_label = gimple_block_label (then_bb);
1616 stmt = gimple_build_cond (NE_EXPR, restmp,
1617 build_zero_cst (TREE_TYPE (restmp)),
1618 NULL_TREE, NULL_TREE);
1619 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1620 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1621 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1622 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1623 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1624 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1625 bsi = gsi_last_bb (then_bb);
1626 }
1627
1628 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1629 fixed_offset, virtual_offset);
1630 if (true_label)
1631 {
1632 gimple stmt;
1633 bsi = gsi_last_bb (else_bb);
1634 stmt = gimple_build_assign (restmp,
1635 build_zero_cst (TREE_TYPE (restmp)));
1636 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1637 bsi = gsi_last_bb (return_bb);
1638 }
1639 }
1640 else
1641 gimple_call_set_tail (call, true);
1642
1643 /* Build return value. */
1644 ret = gimple_build_return (restmp);
1645 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1646 }
1647 else
1648 {
1649 gimple_call_set_tail (call, true);
1650 remove_edge (single_succ_edge (bb));
1651 }
1652
1653 cfun->gimple_df->in_ssa_p = true;
1654 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
1655 TREE_ASM_WRITTEN (thunk_fndecl) = false;
1656 delete_unreachable_blocks ();
1657 update_ssa (TODO_update_ssa);
1658 #ifdef ENABLE_CHECKING
1659 verify_flow_info ();
1660 #endif
1661
1662 /* Since we want to emit the thunk, we explicitly mark its name as
1663 referenced. */
1664 node->thunk.thunk_p = false;
1665 node->lowered = true;
1666 bitmap_obstack_release (NULL);
1667 }
1668 current_function_decl = NULL;
1669 set_cfun (NULL);
1670 return true;
1671 }
1672
1673 /* Assemble thunks and aliases associated to NODE. */
1674
1675 static void
1676 assemble_thunks_and_aliases (struct cgraph_node *node)
1677 {
1678 struct cgraph_edge *e;
1679 int i;
1680 struct ipa_ref *ref;
1681
1682 for (e = node->callers; e;)
1683 if (e->caller->thunk.thunk_p)
1684 {
1685 struct cgraph_node *thunk = e->caller;
1686
1687 e = e->next_caller;
1688 assemble_thunks_and_aliases (thunk);
1689 expand_thunk (thunk, true);
1690 }
1691 else
1692 e = e->next_caller;
1693 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
1694 i, ref); i++)
1695 if (ref->use == IPA_REF_ALIAS)
1696 {
1697 struct cgraph_node *alias = ipa_ref_referring_node (ref);
1698 bool saved_written = TREE_ASM_WRITTEN (node->decl);
1699
1700 /* Force assemble_alias to really output the alias this time instead
1701 of buffering it in same alias pairs. */
1702 TREE_ASM_WRITTEN (node->decl) = 1;
1703 do_assemble_alias (alias->decl,
1704 DECL_ASSEMBLER_NAME (node->decl));
1705 assemble_thunks_and_aliases (alias);
1706 TREE_ASM_WRITTEN (node->decl) = saved_written;
1707 }
1708 }
1709
1710 /* Expand function specified by NODE. */
1711
1712 static void
1713 expand_function (struct cgraph_node *node)
1714 {
1715 tree decl = node->decl;
1716 location_t saved_loc;
1717
1718 /* We ought to not compile any inline clones. */
1719 gcc_assert (!node->global.inlined_to);
1720
1721 announce_function (decl);
1722 node->process = 0;
1723 gcc_assert (node->lowered);
1724 cgraph_get_body (node);
1725
1726 /* Generate RTL for the body of DECL. */
1727
1728 timevar_push (TV_REST_OF_COMPILATION);
1729
1730 gcc_assert (cgraph_global_info_ready);
1731
1732 /* Initialize the default bitmap obstack. */
1733 bitmap_obstack_initialize (NULL);
1734
1735 /* Initialize the RTL code for the function. */
1736 current_function_decl = decl;
1737 saved_loc = input_location;
1738 input_location = DECL_SOURCE_LOCATION (decl);
1739 init_function_start (decl);
1740
1741 gimple_register_cfg_hooks ();
1742
1743 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1744
1745 execute_all_ipa_transforms ();
1746
1747 /* Perform all tree transforms and optimizations. */
1748
1749 /* Signal the start of passes. */
1750 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1751
1752 execute_pass_list (g->get_passes ()->all_passes);
1753
1754 /* Signal the end of passes. */
1755 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1756
1757 bitmap_obstack_release (&reg_obstack);
1758
1759 /* Release the default bitmap obstack. */
1760 bitmap_obstack_release (NULL);
1761
1762 /* If requested, warn about function definitions where the function will
1763 return a value (usually of some struct or union type) which itself will
1764 take up a lot of stack space. */
1765 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1766 {
1767 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1768
1769 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1770 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1771 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1772 larger_than_size))
1773 {
1774 unsigned int size_as_int
1775 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1776
1777 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1778 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1779 decl, size_as_int);
1780 else
1781 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1782 decl, larger_than_size);
1783 }
1784 }
1785
1786 gimple_set_body (decl, NULL);
1787 if (DECL_STRUCT_FUNCTION (decl) == 0
1788 && !cgraph_get_node (decl)->origin)
1789 {
1790 /* Stop pointing to the local nodes about to be freed.
1791 But DECL_INITIAL must remain nonzero so we know this
1792 was an actual function definition.
1793 For a nested function, this is done in c_pop_function_context.
1794 If rest_of_compilation set this to 0, leave it 0. */
1795 if (DECL_INITIAL (decl) != 0)
1796 DECL_INITIAL (decl) = error_mark_node;
1797 }
1798
1799 input_location = saved_loc;
1800
1801 ggc_collect ();
1802 timevar_pop (TV_REST_OF_COMPILATION);
1803
1804 /* Make sure that BE didn't give up on compiling. */
1805 gcc_assert (TREE_ASM_WRITTEN (decl));
1806 set_cfun (NULL);
1807 current_function_decl = NULL;
1808
1809 /* It would make a lot more sense to output thunks before function body to get more
1810 forward and lest backwarding jumps. This however would need solving problem
1811 with comdats. See PR48668. Also aliases must come after function itself to
1812 make one pass assemblers, like one on AIX, happy. See PR 50689.
1813 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1814 groups. */
1815 assemble_thunks_and_aliases (node);
1816 cgraph_release_function_body (node);
1817 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1818 points to the dead function body. */
1819 cgraph_node_remove_callees (node);
1820 ipa_remove_all_references (&node->ref_list);
1821 }
1822
1823
1824 /* Expand all functions that must be output.
1825
1826 Attempt to topologically sort the nodes so function is output when
1827 all called functions are already assembled to allow data to be
1828 propagated across the callgraph. Use a stack to get smaller distance
1829 between a function and its callees (later we may choose to use a more
1830 sophisticated algorithm for function reordering; we will likely want
1831 to use subsections to make the output functions appear in top-down
1832 order). */
1833
1834 static void
1835 expand_all_functions (void)
1836 {
1837 struct cgraph_node *node;
1838 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1839 int order_pos, new_order_pos = 0;
1840 int i;
1841
1842 order_pos = ipa_reverse_postorder (order);
1843 gcc_assert (order_pos == cgraph_n_nodes);
1844
1845 /* Garbage collector may remove inline clones we eliminate during
1846 optimization. So we must be sure to not reference them. */
1847 for (i = 0; i < order_pos; i++)
1848 if (order[i]->process)
1849 order[new_order_pos++] = order[i];
1850
1851 for (i = new_order_pos - 1; i >= 0; i--)
1852 {
1853 node = order[i];
1854 if (node->process)
1855 {
1856 node->process = 0;
1857 expand_function (node);
1858 }
1859 }
1860 cgraph_process_new_functions ();
1861
1862 free (order);
1863
1864 }
1865
1866 /* This is used to sort the node types by the cgraph order number. */
1867
1868 enum cgraph_order_sort_kind
1869 {
1870 ORDER_UNDEFINED = 0,
1871 ORDER_FUNCTION,
1872 ORDER_VAR,
1873 ORDER_ASM
1874 };
1875
1876 struct cgraph_order_sort
1877 {
1878 enum cgraph_order_sort_kind kind;
1879 union
1880 {
1881 struct cgraph_node *f;
1882 struct varpool_node *v;
1883 struct asm_node *a;
1884 } u;
1885 };
1886
1887 /* Output all functions, variables, and asm statements in the order
1888 according to their order fields, which is the order in which they
1889 appeared in the file. This implements -fno-toplevel-reorder. In
1890 this mode we may output functions and variables which don't really
1891 need to be output. */
1892
1893 static void
1894 output_in_order (void)
1895 {
1896 int max;
1897 struct cgraph_order_sort *nodes;
1898 int i;
1899 struct cgraph_node *pf;
1900 struct varpool_node *pv;
1901 struct asm_node *pa;
1902
1903 max = symtab_order;
1904 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1905
1906 FOR_EACH_DEFINED_FUNCTION (pf)
1907 {
1908 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
1909 {
1910 i = pf->order;
1911 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1912 nodes[i].kind = ORDER_FUNCTION;
1913 nodes[i].u.f = pf;
1914 }
1915 }
1916
1917 FOR_EACH_DEFINED_VARIABLE (pv)
1918 if (!DECL_EXTERNAL (pv->decl))
1919 {
1920 i = pv->order;
1921 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1922 nodes[i].kind = ORDER_VAR;
1923 nodes[i].u.v = pv;
1924 }
1925
1926 for (pa = asm_nodes; pa; pa = pa->next)
1927 {
1928 i = pa->order;
1929 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1930 nodes[i].kind = ORDER_ASM;
1931 nodes[i].u.a = pa;
1932 }
1933
1934 /* In toplevel reorder mode we output all statics; mark them as needed. */
1935
1936 for (i = 0; i < max; ++i)
1937 if (nodes[i].kind == ORDER_VAR)
1938 varpool_finalize_named_section_flags (nodes[i].u.v);
1939
1940 for (i = 0; i < max; ++i)
1941 {
1942 switch (nodes[i].kind)
1943 {
1944 case ORDER_FUNCTION:
1945 nodes[i].u.f->process = 0;
1946 expand_function (nodes[i].u.f);
1947 break;
1948
1949 case ORDER_VAR:
1950 varpool_assemble_decl (nodes[i].u.v);
1951 break;
1952
1953 case ORDER_ASM:
1954 assemble_asm (nodes[i].u.a->asm_str);
1955 break;
1956
1957 case ORDER_UNDEFINED:
1958 break;
1959
1960 default:
1961 gcc_unreachable ();
1962 }
1963 }
1964
1965 asm_nodes = NULL;
1966 free (nodes);
1967 }
1968
1969 static void
1970 ipa_passes (void)
1971 {
1972 gcc::pass_manager *passes = g->get_passes ();
1973
1974 set_cfun (NULL);
1975 current_function_decl = NULL;
1976 gimple_register_cfg_hooks ();
1977 bitmap_obstack_initialize (NULL);
1978
1979 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1980
1981 if (!in_lto_p)
1982 {
1983 execute_ipa_pass_list (passes->all_small_ipa_passes);
1984 if (seen_error ())
1985 return;
1986 }
1987
1988 /* We never run removal of unreachable nodes after early passes. This is
1989 because TODO is run before the subpasses. It is important to remove
1990 the unreachable functions to save works at IPA level and to get LTO
1991 symbol tables right. */
1992 symtab_remove_unreachable_nodes (true, cgraph_dump_file);
1993
1994 /* If pass_all_early_optimizations was not scheduled, the state of
1995 the cgraph will not be properly updated. Update it now. */
1996 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1997 cgraph_state = CGRAPH_STATE_IPA_SSA;
1998
1999 if (!in_lto_p)
2000 {
2001 /* Generate coverage variables and constructors. */
2002 coverage_finish ();
2003
2004 /* Process new functions added. */
2005 set_cfun (NULL);
2006 current_function_decl = NULL;
2007 cgraph_process_new_functions ();
2008
2009 execute_ipa_summary_passes
2010 ((struct ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2011 }
2012
2013 /* Some targets need to handle LTO assembler output specially. */
2014 if (flag_generate_lto)
2015 targetm.asm_out.lto_start ();
2016
2017 execute_ipa_summary_passes ((struct ipa_opt_pass_d *)
2018 passes->all_lto_gen_passes);
2019
2020 if (!in_lto_p)
2021 ipa_write_summaries ();
2022
2023 if (flag_generate_lto)
2024 targetm.asm_out.lto_end ();
2025
2026 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2027 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2028 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2029
2030 bitmap_obstack_release (NULL);
2031 }
2032
2033
2034 /* Return string alias is alias of. */
2035
2036 static tree
2037 get_alias_symbol (tree decl)
2038 {
2039 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2040 return get_identifier (TREE_STRING_POINTER
2041 (TREE_VALUE (TREE_VALUE (alias))));
2042 }
2043
2044
2045 /* Weakrefs may be associated to external decls and thus not output
2046 at expansion time. Emit all necessary aliases. */
2047
2048 static void
2049 output_weakrefs (void)
2050 {
2051 symtab_node node;
2052 FOR_EACH_SYMBOL (node)
2053 if (node->alias
2054 && !TREE_ASM_WRITTEN (node->decl)
2055 && node->weakref)
2056 {
2057 tree target;
2058
2059 /* Weakrefs are special by not requiring target definition in current
2060 compilation unit. It is thus bit hard to work out what we want to
2061 alias.
2062 When alias target is defined, we need to fetch it from symtab reference,
2063 otherwise it is pointed to by alias_target. */
2064 if (node->alias_target)
2065 target = (DECL_P (node->alias_target)
2066 ? DECL_ASSEMBLER_NAME (node->alias_target)
2067 : node->alias_target);
2068 else if (node->analyzed)
2069 target = DECL_ASSEMBLER_NAME (symtab_alias_target (node)->decl);
2070 else
2071 {
2072 gcc_unreachable ();
2073 target = get_alias_symbol (node->decl);
2074 }
2075 do_assemble_alias (node->decl, target);
2076 }
2077 }
2078
2079 /* Initialize callgraph dump file. */
2080
2081 void
2082 init_cgraph (void)
2083 {
2084 if (!cgraph_dump_file)
2085 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
2086 }
2087
2088
2089 /* Perform simple optimizations based on callgraph. */
2090
2091 void
2092 compile (void)
2093 {
2094 if (seen_error ())
2095 return;
2096
2097 #ifdef ENABLE_CHECKING
2098 verify_symtab ();
2099 #endif
2100
2101 timevar_push (TV_CGRAPHOPT);
2102 if (pre_ipa_mem_report)
2103 {
2104 fprintf (stderr, "Memory consumption before IPA\n");
2105 dump_memory_report (false);
2106 }
2107 if (!quiet_flag)
2108 fprintf (stderr, "Performing interprocedural optimizations\n");
2109 cgraph_state = CGRAPH_STATE_IPA;
2110
2111 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2112 if (flag_lto)
2113 lto_streamer_hooks_init ();
2114
2115 /* Don't run the IPA passes if there was any error or sorry messages. */
2116 if (!seen_error ())
2117 ipa_passes ();
2118
2119 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2120 if (seen_error ()
2121 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2122 {
2123 timevar_pop (TV_CGRAPHOPT);
2124 return;
2125 }
2126
2127 /* This pass remove bodies of extern inline functions we never inlined.
2128 Do this later so other IPA passes see what is really going on. */
2129 symtab_remove_unreachable_nodes (false, dump_file);
2130 cgraph_global_info_ready = true;
2131 if (cgraph_dump_file)
2132 {
2133 fprintf (cgraph_dump_file, "Optimized ");
2134 dump_symtab (cgraph_dump_file);
2135 }
2136 if (post_ipa_mem_report)
2137 {
2138 fprintf (stderr, "Memory consumption after IPA\n");
2139 dump_memory_report (false);
2140 }
2141 timevar_pop (TV_CGRAPHOPT);
2142
2143 /* Output everything. */
2144 (*debug_hooks->assembly_start) ();
2145 if (!quiet_flag)
2146 fprintf (stderr, "Assembling functions:\n");
2147 #ifdef ENABLE_CHECKING
2148 verify_symtab ();
2149 #endif
2150
2151 cgraph_materialize_all_clones ();
2152 bitmap_obstack_initialize (NULL);
2153 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2154 symtab_remove_unreachable_nodes (true, dump_file);
2155 #ifdef ENABLE_CHECKING
2156 verify_symtab ();
2157 #endif
2158 bitmap_obstack_release (NULL);
2159 mark_functions_to_output ();
2160
2161 /* When weakref support is missing, we autmatically translate all
2162 references to NODE to references to its ultimate alias target.
2163 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2164 TREE_CHAIN.
2165
2166 Set up this mapping before we output any assembler but once we are sure
2167 that all symbol renaming is done.
2168
2169 FIXME: All this uglyness can go away if we just do renaming at gimple
2170 level by physically rewritting the IL. At the moment we can only redirect
2171 calls, so we need infrastructure for renaming references as well. */
2172 #ifndef ASM_OUTPUT_WEAKREF
2173 symtab_node node;
2174
2175 FOR_EACH_SYMBOL (node)
2176 if (node->alias
2177 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2178 {
2179 IDENTIFIER_TRANSPARENT_ALIAS
2180 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2181 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2182 = (node->alias_target ? node->alias_target
2183 : DECL_ASSEMBLER_NAME (symtab_alias_target (node)->decl));
2184 }
2185 #endif
2186
2187 cgraph_state = CGRAPH_STATE_EXPANSION;
2188 if (!flag_toplevel_reorder)
2189 output_in_order ();
2190 else
2191 {
2192 output_asm_statements ();
2193
2194 expand_all_functions ();
2195 varpool_output_variables ();
2196 }
2197
2198 cgraph_process_new_functions ();
2199 cgraph_state = CGRAPH_STATE_FINISHED;
2200 output_weakrefs ();
2201
2202 if (cgraph_dump_file)
2203 {
2204 fprintf (cgraph_dump_file, "\nFinal ");
2205 dump_symtab (cgraph_dump_file);
2206 }
2207 #ifdef ENABLE_CHECKING
2208 verify_symtab ();
2209 /* Double check that all inline clones are gone and that all
2210 function bodies have been released from memory. */
2211 if (!seen_error ())
2212 {
2213 struct cgraph_node *node;
2214 bool error_found = false;
2215
2216 FOR_EACH_DEFINED_FUNCTION (node)
2217 if (node->global.inlined_to
2218 || gimple_has_body_p (node->decl))
2219 {
2220 error_found = true;
2221 dump_cgraph_node (stderr, node);
2222 }
2223 if (error_found)
2224 internal_error ("nodes with unreleased memory found");
2225 }
2226 #endif
2227 }
2228
2229
2230 /* Analyze the whole compilation unit once it is parsed completely. */
2231
2232 void
2233 finalize_compilation_unit (void)
2234 {
2235 timevar_push (TV_CGRAPH);
2236
2237 /* If we're here there's no current function anymore. Some frontends
2238 are lazy in clearing these. */
2239 current_function_decl = NULL;
2240 set_cfun (NULL);
2241
2242 /* Do not skip analyzing the functions if there were errors, we
2243 miss diagnostics for following functions otherwise. */
2244
2245 /* Emit size functions we didn't inline. */
2246 finalize_size_functions ();
2247
2248 /* Mark alias targets necessary and emit diagnostics. */
2249 handle_alias_pairs ();
2250
2251 if (!quiet_flag)
2252 {
2253 fprintf (stderr, "\nAnalyzing compilation unit\n");
2254 fflush (stderr);
2255 }
2256
2257 if (flag_dump_passes)
2258 dump_passes ();
2259
2260 /* Gimplify and lower all functions, compute reachability and
2261 remove unreachable nodes. */
2262 analyze_functions ();
2263
2264 /* Mark alias targets necessary and emit diagnostics. */
2265 handle_alias_pairs ();
2266
2267 /* Gimplify and lower thunks. */
2268 analyze_functions ();
2269
2270 /* Finally drive the pass manager. */
2271 compile ();
2272
2273 timevar_pop (TV_CGRAPH);
2274 }
2275
2276
2277 #include "gt-cgraphunit.h"