]> git.ipfire.org Git - thirdparty/gcc.git/blame_incremental - gcc/cgraphunit.c
Add MOVBE and RDRND for AMD bdver4
[thirdparty/gcc.git] / gcc / cgraphunit.c
... / ...
CommitLineData
1/* Driver of optimization process
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21/* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - cgraph_finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158*/
159
160#include "config.h"
161#include "system.h"
162#include "coretypes.h"
163#include "tm.h"
164#include "tree.h"
165#include "varasm.h"
166#include "stor-layout.h"
167#include "stringpool.h"
168#include "output.h"
169#include "rtl.h"
170#include "basic-block.h"
171#include "tree-ssa-alias.h"
172#include "internal-fn.h"
173#include "gimple-fold.h"
174#include "gimple-expr.h"
175#include "is-a.h"
176#include "gimple.h"
177#include "gimplify.h"
178#include "gimple-iterator.h"
179#include "gimplify-me.h"
180#include "gimple-ssa.h"
181#include "tree-cfg.h"
182#include "tree-into-ssa.h"
183#include "tree-ssa.h"
184#include "tree-inline.h"
185#include "langhooks.h"
186#include "toplev.h"
187#include "flags.h"
188#include "debug.h"
189#include "target.h"
190#include "diagnostic.h"
191#include "params.h"
192#include "fibheap.h"
193#include "intl.h"
194#include "function.h"
195#include "ipa-prop.h"
196#include "tree-iterator.h"
197#include "tree-pass.h"
198#include "tree-dump.h"
199#include "gimple-pretty-print.h"
200#include "output.h"
201#include "coverage.h"
202#include "plugin.h"
203#include "ipa-inline.h"
204#include "ipa-utils.h"
205#include "lto-streamer.h"
206#include "except.h"
207#include "cfgloop.h"
208#include "regset.h" /* FIXME: For reg_obstack. */
209#include "context.h"
210#include "pass_manager.h"
211#include "tree-nested.h"
212#include "gimplify.h"
213#include "dbgcnt.h"
214
215/* Queue of cgraph nodes scheduled to be added into cgraph. This is a
216 secondary queue used during optimization to accommodate passes that
217 may generate new functions that need to be optimized and expanded. */
218cgraph_node_set cgraph_new_nodes;
219
220static void expand_all_functions (void);
221static void mark_functions_to_output (void);
222static void expand_function (struct cgraph_node *);
223static void handle_alias_pairs (void);
224
225FILE *cgraph_dump_file;
226
227/* Linked list of cgraph asm nodes. */
228struct asm_node *asm_nodes;
229
230/* Last node in cgraph_asm_nodes. */
231static GTY(()) struct asm_node *asm_last_node;
232
233/* Used for vtable lookup in thunk adjusting. */
234static GTY (()) tree vtable_entry_type;
235
236/* Determine if symbol DECL is needed. That is, visible to something
237 either outside this translation unit, something magic in the system
238 configury */
239bool
240decide_is_symbol_needed (symtab_node *node)
241{
242 tree decl = node->decl;
243
244 /* Double check that no one output the function into assembly file
245 early. */
246 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
247 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
248
249 if (!node->definition)
250 return false;
251
252 if (DECL_EXTERNAL (decl))
253 return false;
254
255 /* If the user told us it is used, then it must be so. */
256 if (node->force_output)
257 return true;
258
259 /* ABI forced symbols are needed when they are external. */
260 if (node->forced_by_abi && TREE_PUBLIC (decl))
261 return true;
262
263 /* Keep constructors, destructors and virtual functions. */
264 if (TREE_CODE (decl) == FUNCTION_DECL
265 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
266 return true;
267
268 /* Externally visible variables must be output. The exception is
269 COMDAT variables that must be output only when they are needed. */
270 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
271 return true;
272
273 return false;
274}
275
276/* Head and terminator of the queue of nodes to be processed while building
277 callgraph. */
278
279static symtab_node symtab_terminator;
280static symtab_node *queued_nodes = &symtab_terminator;
281
282/* Add NODE to queue starting at QUEUED_NODES.
283 The queue is linked via AUX pointers and terminated by pointer to 1. */
284
285static void
286enqueue_node (symtab_node *node)
287{
288 if (node->aux)
289 return;
290 gcc_checking_assert (queued_nodes);
291 node->aux = queued_nodes;
292 queued_nodes = node;
293}
294
295/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
296 functions into callgraph in a way so they look like ordinary reachable
297 functions inserted into callgraph already at construction time. */
298
299void
300cgraph_process_new_functions (void)
301{
302 tree fndecl;
303 struct cgraph_node *node;
304 cgraph_node_set_iterator csi;
305
306 if (!cgraph_new_nodes)
307 return;
308 handle_alias_pairs ();
309 /* Note that this queue may grow as its being processed, as the new
310 functions may generate new ones. */
311 for (csi = csi_start (cgraph_new_nodes); !csi_end_p (csi); csi_next (&csi))
312 {
313 node = csi_node (csi);
314 fndecl = node->decl;
315 switch (cgraph_state)
316 {
317 case CGRAPH_STATE_CONSTRUCTION:
318 /* At construction time we just need to finalize function and move
319 it into reachable functions list. */
320
321 cgraph_finalize_function (fndecl, false);
322 node->call_function_insertion_hooks ();
323 enqueue_node (node);
324 break;
325
326 case CGRAPH_STATE_IPA:
327 case CGRAPH_STATE_IPA_SSA:
328 /* When IPA optimization already started, do all essential
329 transformations that has been already performed on the whole
330 cgraph but not on this function. */
331
332 gimple_register_cfg_hooks ();
333 if (!node->analyzed)
334 node->analyze ();
335 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
336 if (cgraph_state == CGRAPH_STATE_IPA_SSA
337 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
338 g->get_passes ()->execute_early_local_passes ();
339 else if (inline_summary_vec != NULL)
340 compute_inline_parameters (node, true);
341 free_dominance_info (CDI_POST_DOMINATORS);
342 free_dominance_info (CDI_DOMINATORS);
343 pop_cfun ();
344 node->call_function_insertion_hooks ();
345 break;
346
347 case CGRAPH_STATE_EXPANSION:
348 /* Functions created during expansion shall be compiled
349 directly. */
350 node->process = 0;
351 node->call_function_insertion_hooks ();
352 expand_function (node);
353 break;
354
355 default:
356 gcc_unreachable ();
357 break;
358 }
359 }
360 free_cgraph_node_set (cgraph_new_nodes);
361 cgraph_new_nodes = NULL;
362}
363
364/* As an GCC extension we allow redefinition of the function. The
365 semantics when both copies of bodies differ is not well defined.
366 We replace the old body with new body so in unit at a time mode
367 we always use new body, while in normal mode we may end up with
368 old body inlined into some functions and new body expanded and
369 inlined in others.
370
371 ??? It may make more sense to use one body for inlining and other
372 body for expanding the function but this is difficult to do. */
373
374void
375cgraph_node::reset (void)
376{
377 /* If process is set, then we have already begun whole-unit analysis.
378 This is *not* testing for whether we've already emitted the function.
379 That case can be sort-of legitimately seen with real function redefinition
380 errors. I would argue that the front end should never present us with
381 such a case, but don't enforce that for now. */
382 gcc_assert (!process);
383
384 /* Reset our data structures so we can analyze the function again. */
385 memset (&local, 0, sizeof (local));
386 memset (&global, 0, sizeof (global));
387 memset (&rtl, 0, sizeof (rtl));
388 analyzed = false;
389 definition = false;
390 alias = false;
391 weakref = false;
392 cpp_implicit_alias = false;
393
394 remove_callees ();
395 remove_all_references ();
396}
397
398/* Return true when there are references to NODE. */
399
400static bool
401referred_to_p (symtab_node *node)
402{
403 struct ipa_ref *ref = NULL;
404
405 /* See if there are any references at all. */
406 if (node->iterate_referring (0, ref))
407 return true;
408 /* For functions check also calls. */
409 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
410 if (cn && cn->callers)
411 return true;
412 return false;
413}
414
415/* DECL has been parsed. Take it, queue it, compile it at the whim of the
416 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
417 the garbage collector run at the moment. We would need to either create
418 a new GC context, or just not compile right now. */
419
420void
421cgraph_finalize_function (tree decl, bool no_collect)
422{
423 struct cgraph_node *node = cgraph_node::get_create (decl);
424
425 if (node->definition)
426 {
427 /* Nested functions should only be defined once. */
428 gcc_assert (!DECL_CONTEXT (decl)
429 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
430 node->reset ();
431 node->local.redefined_extern_inline = true;
432 }
433
434 notice_global_symbol (decl);
435 node->definition = true;
436 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
437
438 /* With -fkeep-inline-functions we are keeping all inline functions except
439 for extern inline ones. */
440 if (flag_keep_inline_functions
441 && DECL_DECLARED_INLINE_P (decl)
442 && !DECL_EXTERNAL (decl)
443 && !DECL_DISREGARD_INLINE_LIMITS (decl))
444 node->force_output = 1;
445
446 /* When not optimizing, also output the static functions. (see
447 PR24561), but don't do so for always_inline functions, functions
448 declared inline and nested functions. These were optimized out
449 in the original implementation and it is unclear whether we want
450 to change the behavior here. */
451 if ((!optimize
452 && !node->cpp_implicit_alias
453 && !DECL_DISREGARD_INLINE_LIMITS (decl)
454 && !DECL_DECLARED_INLINE_P (decl)
455 && !(DECL_CONTEXT (decl)
456 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
457 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
458 node->force_output = 1;
459
460 /* If we've not yet emitted decl, tell the debug info about it. */
461 if (!TREE_ASM_WRITTEN (decl))
462 (*debug_hooks->deferred_inline_function) (decl);
463
464 /* Possibly warn about unused parameters. */
465 if (warn_unused_parameter)
466 do_warn_unused_parameter (decl);
467
468 if (!no_collect)
469 ggc_collect ();
470
471 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
472 && (decide_is_symbol_needed (node)
473 || referred_to_p (node)))
474 enqueue_node (node);
475}
476
477/* Add the function FNDECL to the call graph.
478 Unlike cgraph_finalize_function, this function is intended to be used
479 by middle end and allows insertion of new function at arbitrary point
480 of compilation. The function can be either in high, low or SSA form
481 GIMPLE.
482
483 The function is assumed to be reachable and have address taken (so no
484 API breaking optimizations are performed on it).
485
486 Main work done by this function is to enqueue the function for later
487 processing to avoid need the passes to be re-entrant. */
488
489void
490cgraph_node::add_new_function (tree fndecl, bool lowered)
491{
492 gcc::pass_manager *passes = g->get_passes ();
493 struct cgraph_node *node;
494 switch (cgraph_state)
495 {
496 case CGRAPH_STATE_PARSING:
497 cgraph_finalize_function (fndecl, false);
498 break;
499 case CGRAPH_STATE_CONSTRUCTION:
500 /* Just enqueue function to be processed at nearest occurrence. */
501 node = cgraph_node::get_create (fndecl);
502 if (lowered)
503 node->lowered = true;
504 if (!cgraph_new_nodes)
505 cgraph_new_nodes = cgraph_node_set_new ();
506 cgraph_node_set_add (cgraph_new_nodes, node);
507 break;
508
509 case CGRAPH_STATE_IPA:
510 case CGRAPH_STATE_IPA_SSA:
511 case CGRAPH_STATE_EXPANSION:
512 /* Bring the function into finalized state and enqueue for later
513 analyzing and compilation. */
514 node = cgraph_node::get_create (fndecl);
515 node->local.local = false;
516 node->definition = true;
517 node->force_output = true;
518 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
519 {
520 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
521 gimple_register_cfg_hooks ();
522 bitmap_obstack_initialize (NULL);
523 execute_pass_list (cfun, passes->all_lowering_passes);
524 passes->execute_early_local_passes ();
525 bitmap_obstack_release (NULL);
526 pop_cfun ();
527
528 lowered = true;
529 }
530 if (lowered)
531 node->lowered = true;
532 if (!cgraph_new_nodes)
533 cgraph_new_nodes = cgraph_node_set_new ();
534 cgraph_node_set_add (cgraph_new_nodes, node);
535 break;
536
537 case CGRAPH_STATE_FINISHED:
538 /* At the very end of compilation we have to do all the work up
539 to expansion. */
540 node = cgraph_node::create (fndecl);
541 if (lowered)
542 node->lowered = true;
543 node->definition = true;
544 node->analyze ();
545 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
546 gimple_register_cfg_hooks ();
547 bitmap_obstack_initialize (NULL);
548 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
549 g->get_passes ()->execute_early_local_passes ();
550 bitmap_obstack_release (NULL);
551 pop_cfun ();
552 expand_function (node);
553 break;
554
555 default:
556 gcc_unreachable ();
557 }
558
559 /* Set a personality if required and we already passed EH lowering. */
560 if (lowered
561 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
562 == eh_personality_lang))
563 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
564}
565
566/* Add a top-level asm statement to the list. */
567
568struct asm_node *
569add_asm_node (tree asm_str)
570{
571 struct asm_node *node;
572
573 node = ggc_cleared_alloc<asm_node> ();
574 node->asm_str = asm_str;
575 node->order = symtab_order++;
576 node->next = NULL;
577 if (asm_nodes == NULL)
578 asm_nodes = node;
579 else
580 asm_last_node->next = node;
581 asm_last_node = node;
582 return node;
583}
584
585/* Output all asm statements we have stored up to be output. */
586
587static void
588output_asm_statements (void)
589{
590 struct asm_node *can;
591
592 if (seen_error ())
593 return;
594
595 for (can = asm_nodes; can; can = can->next)
596 assemble_asm (can->asm_str);
597 asm_nodes = NULL;
598}
599
600/* Analyze the function scheduled to be output. */
601void
602cgraph_node::analyze (void)
603{
604 tree decl = this->decl;
605 location_t saved_loc = input_location;
606 input_location = DECL_SOURCE_LOCATION (decl);
607
608 if (thunk.thunk_p)
609 {
610 create_edge (cgraph_node::get (thunk.alias),
611 NULL, 0, CGRAPH_FREQ_BASE);
612 if (!expand_thunk (false, false))
613 {
614 thunk.alias = NULL;
615 analyzed = true;
616 return;
617 }
618 thunk.alias = NULL;
619 }
620 if (alias)
621 resolve_alias (cgraph_node::get (alias_target));
622 else if (dispatcher_function)
623 {
624 /* Generate the dispatcher body of multi-versioned functions. */
625 struct cgraph_function_version_info *dispatcher_version_info
626 = function_version ();
627 if (dispatcher_version_info != NULL
628 && (dispatcher_version_info->dispatcher_resolver
629 == NULL_TREE))
630 {
631 tree resolver = NULL_TREE;
632 gcc_assert (targetm.generate_version_dispatcher_body);
633 resolver = targetm.generate_version_dispatcher_body (this);
634 gcc_assert (resolver != NULL_TREE);
635 }
636 }
637 else
638 {
639 push_cfun (DECL_STRUCT_FUNCTION (decl));
640
641 assign_assembler_name_if_neeeded (decl);
642
643 /* Make sure to gimplify bodies only once. During analyzing a
644 function we lower it, which will require gimplified nested
645 functions, so we can end up here with an already gimplified
646 body. */
647 if (!gimple_has_body_p (decl))
648 gimplify_function_tree (decl);
649 dump_function (TDI_generic, decl);
650
651 /* Lower the function. */
652 if (!lowered)
653 {
654 if (nested)
655 lower_nested_functions (decl);
656 gcc_assert (!nested);
657
658 gimple_register_cfg_hooks ();
659 bitmap_obstack_initialize (NULL);
660 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
661 free_dominance_info (CDI_POST_DOMINATORS);
662 free_dominance_info (CDI_DOMINATORS);
663 compact_blocks ();
664 bitmap_obstack_release (NULL);
665 lowered = true;
666 }
667
668 pop_cfun ();
669 }
670 analyzed = true;
671
672 input_location = saved_loc;
673}
674
675/* C++ frontend produce same body aliases all over the place, even before PCH
676 gets streamed out. It relies on us linking the aliases with their function
677 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
678 first produce aliases without links, but once C++ FE is sure he won't sream
679 PCH we build the links via this function. */
680
681void
682cgraph_process_same_body_aliases (void)
683{
684 symtab_node *node;
685 FOR_EACH_SYMBOL (node)
686 if (node->cpp_implicit_alias && !node->analyzed)
687 node->resolve_alias
688 (TREE_CODE (node->alias_target) == VAR_DECL
689 ? (symtab_node *)varpool_node::get_create (node->alias_target)
690 : (symtab_node *)cgraph_node::get_create (node->alias_target));
691 cpp_implicit_aliases_done = true;
692}
693
694/* Process attributes common for vars and functions. */
695
696static void
697process_common_attributes (tree decl)
698{
699 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
700
701 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
702 {
703 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
704 "%<weakref%> attribute should be accompanied with"
705 " an %<alias%> attribute");
706 DECL_WEAK (decl) = 0;
707 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
708 DECL_ATTRIBUTES (decl));
709 }
710}
711
712/* Look for externally_visible and used attributes and mark cgraph nodes
713 accordingly.
714
715 We cannot mark the nodes at the point the attributes are processed (in
716 handle_*_attribute) because the copy of the declarations available at that
717 point may not be canonical. For example, in:
718
719 void f();
720 void f() __attribute__((used));
721
722 the declaration we see in handle_used_attribute will be the second
723 declaration -- but the front end will subsequently merge that declaration
724 with the original declaration and discard the second declaration.
725
726 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
727
728 void f() {}
729 void f() __attribute__((externally_visible));
730
731 is valid.
732
733 So, we walk the nodes at the end of the translation unit, applying the
734 attributes at that point. */
735
736static void
737process_function_and_variable_attributes (struct cgraph_node *first,
738 varpool_node *first_var)
739{
740 struct cgraph_node *node;
741 varpool_node *vnode;
742
743 for (node = cgraph_first_function (); node != first;
744 node = cgraph_next_function (node))
745 {
746 tree decl = node->decl;
747 if (DECL_PRESERVE_P (decl))
748 node->mark_force_output ();
749 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
750 {
751 if (! TREE_PUBLIC (node->decl))
752 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
753 "%<externally_visible%>"
754 " attribute have effect only on public objects");
755 }
756 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
757 && (node->definition && !node->alias))
758 {
759 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
760 "%<weakref%> attribute ignored"
761 " because function is defined");
762 DECL_WEAK (decl) = 0;
763 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
764 DECL_ATTRIBUTES (decl));
765 }
766
767 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
768 && !DECL_DECLARED_INLINE_P (decl)
769 /* redefining extern inline function makes it DECL_UNINLINABLE. */
770 && !DECL_UNINLINABLE (decl))
771 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
772 "always_inline function might not be inlinable");
773
774 process_common_attributes (decl);
775 }
776 for (vnode = varpool_first_variable (); vnode != first_var;
777 vnode = varpool_next_variable (vnode))
778 {
779 tree decl = vnode->decl;
780 if (DECL_EXTERNAL (decl)
781 && DECL_INITIAL (decl))
782 varpool_node::finalize_decl (decl);
783 if (DECL_PRESERVE_P (decl))
784 vnode->force_output = true;
785 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
786 {
787 if (! TREE_PUBLIC (vnode->decl))
788 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
789 "%<externally_visible%>"
790 " attribute have effect only on public objects");
791 }
792 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
793 && vnode->definition
794 && DECL_INITIAL (decl))
795 {
796 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
797 "%<weakref%> attribute ignored"
798 " because variable is initialized");
799 DECL_WEAK (decl) = 0;
800 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
801 DECL_ATTRIBUTES (decl));
802 }
803 process_common_attributes (decl);
804 }
805}
806
807/* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
808 middle end to output the variable to asm file, if needed or externally
809 visible. */
810
811void
812varpool_node::finalize_decl (tree decl)
813{
814 varpool_node *node = varpool_node::get_create (decl);
815
816 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
817
818 if (node->definition)
819 return;
820 notice_global_symbol (decl);
821 node->definition = true;
822 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
823 /* Traditionally we do not eliminate static variables when not
824 optimizing and when not doing toplevel reoder. */
825 || (!flag_toplevel_reorder && !DECL_COMDAT (node->decl)
826 && !DECL_ARTIFICIAL (node->decl)))
827 node->force_output = true;
828
829 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
830 && (decide_is_symbol_needed (node)
831 || referred_to_p (node)))
832 enqueue_node (node);
833 if (cgraph_state >= CGRAPH_STATE_IPA_SSA)
834 node->analyze ();
835 /* Some frontends produce various interface variables after compilation
836 finished. */
837 if (cgraph_state == CGRAPH_STATE_FINISHED
838 || (!flag_toplevel_reorder && cgraph_state == CGRAPH_STATE_EXPANSION))
839 node->assemble_decl ();
840}
841
842/* EDGE is an polymorphic call. Mark all possible targets as reachable
843 and if there is only one target, perform trivial devirtualization.
844 REACHABLE_CALL_TARGETS collects target lists we already walked to
845 avoid udplicate work. */
846
847static void
848walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
849 struct cgraph_edge *edge)
850{
851 unsigned int i;
852 void *cache_token;
853 bool final;
854 vec <cgraph_node *>targets
855 = possible_polymorphic_call_targets
856 (edge, &final, &cache_token);
857
858 if (!reachable_call_targets->add (cache_token))
859 {
860 if (cgraph_dump_file)
861 dump_possible_polymorphic_call_targets
862 (cgraph_dump_file, edge);
863
864 for (i = 0; i < targets.length (); i++)
865 {
866 /* Do not bother to mark virtual methods in anonymous namespace;
867 either we will find use of virtual table defining it, or it is
868 unused. */
869 if (targets[i]->definition
870 && TREE_CODE
871 (TREE_TYPE (targets[i]->decl))
872 == METHOD_TYPE
873 && !type_in_anonymous_namespace_p
874 (method_class_type
875 (TREE_TYPE (targets[i]->decl))))
876 enqueue_node (targets[i]);
877 }
878 }
879
880 /* Very trivial devirtualization; when the type is
881 final or anonymous (so we know all its derivation)
882 and there is only one possible virtual call target,
883 make the edge direct. */
884 if (final)
885 {
886 if (targets.length () <= 1 && dbg_cnt (devirt))
887 {
888 cgraph_node *target;
889 if (targets.length () == 1)
890 target = targets[0];
891 else
892 target = cgraph_node::create
893 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
894
895 if (cgraph_dump_file)
896 {
897 fprintf (cgraph_dump_file,
898 "Devirtualizing call: ");
899 print_gimple_stmt (cgraph_dump_file,
900 edge->call_stmt, 0,
901 TDF_SLIM);
902 }
903 if (dump_enabled_p ())
904 {
905 location_t locus = gimple_location_safe (edge->call_stmt);
906 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
907 "devirtualizing call in %s to %s\n",
908 edge->caller->name (), target->name ());
909 }
910
911 cgraph_make_edge_direct (edge, target);
912 cgraph_redirect_edge_call_stmt_to_callee (edge);
913 if (cgraph_dump_file)
914 {
915 fprintf (cgraph_dump_file,
916 "Devirtualized as: ");
917 print_gimple_stmt (cgraph_dump_file,
918 edge->call_stmt, 0,
919 TDF_SLIM);
920 }
921 }
922 }
923}
924
925
926/* Discover all functions and variables that are trivially needed, analyze
927 them as well as all functions and variables referred by them */
928
929static void
930analyze_functions (void)
931{
932 /* Keep track of already processed nodes when called multiple times for
933 intermodule optimization. */
934 static struct cgraph_node *first_analyzed;
935 struct cgraph_node *first_handled = first_analyzed;
936 static varpool_node *first_analyzed_var;
937 varpool_node *first_handled_var = first_analyzed_var;
938 hash_set<void *> reachable_call_targets;
939
940 symtab_node *node;
941 symtab_node *next;
942 int i;
943 struct ipa_ref *ref;
944 bool changed = true;
945 location_t saved_loc = input_location;
946
947 bitmap_obstack_initialize (NULL);
948 cgraph_state = CGRAPH_STATE_CONSTRUCTION;
949 input_location = UNKNOWN_LOCATION;
950
951 /* Ugly, but the fixup can not happen at a time same body alias is created;
952 C++ FE is confused about the COMDAT groups being right. */
953 if (cpp_implicit_aliases_done)
954 FOR_EACH_SYMBOL (node)
955 if (node->cpp_implicit_alias)
956 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
957 if (optimize && flag_devirtualize)
958 build_type_inheritance_graph ();
959
960 /* Analysis adds static variables that in turn adds references to new functions.
961 So we need to iterate the process until it stabilize. */
962 while (changed)
963 {
964 changed = false;
965 process_function_and_variable_attributes (first_analyzed,
966 first_analyzed_var);
967
968 /* First identify the trivially needed symbols. */
969 for (node = symtab_nodes;
970 node != first_analyzed
971 && node != first_analyzed_var; node = node->next)
972 {
973 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
974 node->get_comdat_group_id ();
975 if (decide_is_symbol_needed (node))
976 {
977 enqueue_node (node);
978 if (!changed && cgraph_dump_file)
979 fprintf (cgraph_dump_file, "Trivially needed symbols:");
980 changed = true;
981 if (cgraph_dump_file)
982 fprintf (cgraph_dump_file, " %s", node->asm_name ());
983 if (!changed && cgraph_dump_file)
984 fprintf (cgraph_dump_file, "\n");
985 }
986 if (node == first_analyzed
987 || node == first_analyzed_var)
988 break;
989 }
990 cgraph_process_new_functions ();
991 first_analyzed_var = varpool_first_variable ();
992 first_analyzed = cgraph_first_function ();
993
994 if (changed && cgraph_dump_file)
995 fprintf (cgraph_dump_file, "\n");
996
997 /* Lower representation, build callgraph edges and references for all trivially
998 needed symbols and all symbols referred by them. */
999 while (queued_nodes != &symtab_terminator)
1000 {
1001 changed = true;
1002 node = queued_nodes;
1003 queued_nodes = (symtab_node *)queued_nodes->aux;
1004 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1005 if (cnode && cnode->definition)
1006 {
1007 struct cgraph_edge *edge;
1008 tree decl = cnode->decl;
1009
1010 /* ??? It is possible to create extern inline function
1011 and later using weak alias attribute to kill its body.
1012 See gcc.c-torture/compile/20011119-1.c */
1013 if (!DECL_STRUCT_FUNCTION (decl)
1014 && !cnode->alias
1015 && !cnode->thunk.thunk_p
1016 && !cnode->dispatcher_function)
1017 {
1018 cnode->reset ();
1019 cnode->local.redefined_extern_inline = true;
1020 continue;
1021 }
1022
1023 if (!cnode->analyzed)
1024 cnode->analyze ();
1025
1026 for (edge = cnode->callees; edge; edge = edge->next_callee)
1027 if (edge->callee->definition)
1028 enqueue_node (edge->callee);
1029 if (optimize && flag_devirtualize)
1030 {
1031 struct cgraph_edge *next;
1032
1033 for (edge = cnode->indirect_calls; edge; edge = next)
1034 {
1035 next = edge->next_callee;
1036 if (edge->indirect_info->polymorphic)
1037 walk_polymorphic_call_targets (&reachable_call_targets,
1038 edge);
1039 }
1040 }
1041
1042 /* If decl is a clone of an abstract function,
1043 mark that abstract function so that we don't release its body.
1044 The DECL_INITIAL() of that abstract function declaration
1045 will be later needed to output debug info. */
1046 if (DECL_ABSTRACT_ORIGIN (decl))
1047 {
1048 struct cgraph_node *origin_node
1049 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1050 origin_node->used_as_abstract_origin = true;
1051 }
1052 }
1053 else
1054 {
1055 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1056 if (vnode && vnode->definition && !vnode->analyzed)
1057 vnode->analyze ();
1058 }
1059
1060 if (node->same_comdat_group)
1061 {
1062 symtab_node *next;
1063 for (next = node->same_comdat_group;
1064 next != node;
1065 next = next->same_comdat_group)
1066 enqueue_node (next);
1067 }
1068 for (i = 0; node->iterate_reference (i, ref); i++)
1069 if (ref->referred->definition)
1070 enqueue_node (ref->referred);
1071 cgraph_process_new_functions ();
1072 }
1073 }
1074 if (optimize && flag_devirtualize)
1075 update_type_inheritance_graph ();
1076
1077 /* Collect entry points to the unit. */
1078 if (cgraph_dump_file)
1079 {
1080 fprintf (cgraph_dump_file, "\n\nInitial ");
1081 symtab_node::dump_table (cgraph_dump_file);
1082 }
1083
1084 if (cgraph_dump_file)
1085 fprintf (cgraph_dump_file, "\nRemoving unused symbols:");
1086
1087 for (node = symtab_nodes;
1088 node != first_handled
1089 && node != first_handled_var; node = next)
1090 {
1091 next = node->next;
1092 if (!node->aux && !referred_to_p (node))
1093 {
1094 if (cgraph_dump_file)
1095 fprintf (cgraph_dump_file, " %s", node->name ());
1096 node->remove ();
1097 continue;
1098 }
1099 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1100 {
1101 tree decl = node->decl;
1102
1103 if (cnode->definition && !gimple_has_body_p (decl)
1104 && !cnode->alias
1105 && !cnode->thunk.thunk_p)
1106 cnode->reset ();
1107
1108 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1109 || cnode->alias
1110 || gimple_has_body_p (decl));
1111 gcc_assert (cnode->analyzed == cnode->definition);
1112 }
1113 node->aux = NULL;
1114 }
1115 for (;node; node = node->next)
1116 node->aux = NULL;
1117 first_analyzed = cgraph_first_function ();
1118 first_analyzed_var = varpool_first_variable ();
1119 if (cgraph_dump_file)
1120 {
1121 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1122 symtab_node::dump_table (cgraph_dump_file);
1123 }
1124 bitmap_obstack_release (NULL);
1125 ggc_collect ();
1126 /* Initialize assembler name hash, in particular we want to trigger C++
1127 mangling and same body alias creation before we free DECL_ARGUMENTS
1128 used by it. */
1129 if (!seen_error ())
1130 symtab_initialize_asm_name_hash ();
1131
1132 input_location = saved_loc;
1133}
1134
1135/* Translate the ugly representation of aliases as alias pairs into nice
1136 representation in callgraph. We don't handle all cases yet,
1137 unfortunately. */
1138
1139static void
1140handle_alias_pairs (void)
1141{
1142 alias_pair *p;
1143 unsigned i;
1144
1145 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1146 {
1147 symtab_node *target_node = symtab_node_for_asm (p->target);
1148
1149 /* Weakrefs with target not defined in current unit are easy to handle:
1150 they behave just as external variables except we need to note the
1151 alias flag to later output the weakref pseudo op into asm file. */
1152 if (!target_node
1153 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1154 {
1155 symtab_node *node = symtab_node::get (p->decl);
1156 if (node)
1157 {
1158 node->alias_target = p->target;
1159 node->weakref = true;
1160 node->alias = true;
1161 }
1162 alias_pairs->unordered_remove (i);
1163 continue;
1164 }
1165 else if (!target_node)
1166 {
1167 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1168 symtab_node *node = symtab_node::get (p->decl);
1169 if (node)
1170 node->alias = false;
1171 alias_pairs->unordered_remove (i);
1172 continue;
1173 }
1174
1175 if (DECL_EXTERNAL (target_node->decl)
1176 /* We use local aliases for C++ thunks to force the tailcall
1177 to bind locally. This is a hack - to keep it working do
1178 the following (which is not strictly correct). */
1179 && (! TREE_CODE (target_node->decl) == FUNCTION_DECL
1180 || ! DECL_VIRTUAL_P (target_node->decl))
1181 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1182 {
1183 error ("%q+D aliased to external symbol %qE",
1184 p->decl, p->target);
1185 }
1186
1187 if (TREE_CODE (p->decl) == FUNCTION_DECL
1188 && target_node && is_a <cgraph_node *> (target_node))
1189 {
1190 struct cgraph_node *src_node = cgraph_node::get (p->decl);
1191 if (src_node && src_node->definition)
1192 src_node->reset ();
1193 cgraph_node::create_alias (p->decl, target_node->decl);
1194 alias_pairs->unordered_remove (i);
1195 }
1196 else if (TREE_CODE (p->decl) == VAR_DECL
1197 && target_node && is_a <varpool_node *> (target_node))
1198 {
1199 varpool_node::create_alias (p->decl, target_node->decl);
1200 alias_pairs->unordered_remove (i);
1201 }
1202 else
1203 {
1204 error ("%q+D alias in between function and variable is not supported",
1205 p->decl);
1206 warning (0, "%q+D aliased declaration",
1207 target_node->decl);
1208 alias_pairs->unordered_remove (i);
1209 }
1210 }
1211 vec_free (alias_pairs);
1212}
1213
1214
1215/* Figure out what functions we want to assemble. */
1216
1217static void
1218mark_functions_to_output (void)
1219{
1220 struct cgraph_node *node;
1221#ifdef ENABLE_CHECKING
1222 bool check_same_comdat_groups = false;
1223
1224 FOR_EACH_FUNCTION (node)
1225 gcc_assert (!node->process);
1226#endif
1227
1228 FOR_EACH_FUNCTION (node)
1229 {
1230 tree decl = node->decl;
1231
1232 gcc_assert (!node->process || node->same_comdat_group);
1233 if (node->process)
1234 continue;
1235
1236 /* We need to output all local functions that are used and not
1237 always inlined, as well as those that are reachable from
1238 outside the current compilation unit. */
1239 if (node->analyzed
1240 && !node->thunk.thunk_p
1241 && !node->alias
1242 && !node->global.inlined_to
1243 && !TREE_ASM_WRITTEN (decl)
1244 && !DECL_EXTERNAL (decl))
1245 {
1246 node->process = 1;
1247 if (node->same_comdat_group)
1248 {
1249 struct cgraph_node *next;
1250 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1251 next != node;
1252 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1253 if (!next->thunk.thunk_p && !next->alias
1254 && !next->comdat_local_p ())
1255 next->process = 1;
1256 }
1257 }
1258 else if (node->same_comdat_group)
1259 {
1260#ifdef ENABLE_CHECKING
1261 check_same_comdat_groups = true;
1262#endif
1263 }
1264 else
1265 {
1266 /* We should've reclaimed all functions that are not needed. */
1267#ifdef ENABLE_CHECKING
1268 if (!node->global.inlined_to
1269 && gimple_has_body_p (decl)
1270 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1271 are inside partition, we can end up not removing the body since we no longer
1272 have analyzed node pointing to it. */
1273 && !node->in_other_partition
1274 && !node->alias
1275 && !node->clones
1276 && !DECL_EXTERNAL (decl))
1277 {
1278 node->debug ();
1279 internal_error ("failed to reclaim unneeded function");
1280 }
1281#endif
1282 gcc_assert (node->global.inlined_to
1283 || !gimple_has_body_p (decl)
1284 || node->in_other_partition
1285 || node->clones
1286 || DECL_ARTIFICIAL (decl)
1287 || DECL_EXTERNAL (decl));
1288
1289 }
1290
1291 }
1292#ifdef ENABLE_CHECKING
1293 if (check_same_comdat_groups)
1294 FOR_EACH_FUNCTION (node)
1295 if (node->same_comdat_group && !node->process)
1296 {
1297 tree decl = node->decl;
1298 if (!node->global.inlined_to
1299 && gimple_has_body_p (decl)
1300 /* FIXME: in an ltrans unit when the offline copy is outside a
1301 partition but inline copies are inside a partition, we can
1302 end up not removing the body since we no longer have an
1303 analyzed node pointing to it. */
1304 && !node->in_other_partition
1305 && !node->clones
1306 && !DECL_EXTERNAL (decl))
1307 {
1308 node->debug ();
1309 internal_error ("failed to reclaim unneeded function in same "
1310 "comdat group");
1311 }
1312 }
1313#endif
1314}
1315
1316/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1317 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1318
1319 Set current_function_decl and cfun to newly constructed empty function body.
1320 return basic block in the function body. */
1321
1322basic_block
1323init_lowered_empty_function (tree decl, bool in_ssa)
1324{
1325 basic_block bb;
1326
1327 current_function_decl = decl;
1328 allocate_struct_function (decl, false);
1329 gimple_register_cfg_hooks ();
1330 init_empty_tree_cfg ();
1331
1332 if (in_ssa)
1333 {
1334 init_tree_ssa (cfun);
1335 init_ssa_operands (cfun);
1336 cfun->gimple_df->in_ssa_p = true;
1337 cfun->curr_properties |= PROP_ssa;
1338 }
1339
1340 DECL_INITIAL (decl) = make_node (BLOCK);
1341
1342 DECL_SAVED_TREE (decl) = error_mark_node;
1343 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1344 | PROP_cfg | PROP_loops);
1345
1346 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1347 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1348 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1349
1350 /* Create BB for body of the function and connect it properly. */
1351 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1352 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1353 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1354 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1355
1356 return bb;
1357}
1358
1359/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1360 offset indicated by VIRTUAL_OFFSET, if that is
1361 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1362 zero for a result adjusting thunk. */
1363
1364static tree
1365thunk_adjust (gimple_stmt_iterator * bsi,
1366 tree ptr, bool this_adjusting,
1367 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1368{
1369 gimple stmt;
1370 tree ret;
1371
1372 if (this_adjusting
1373 && fixed_offset != 0)
1374 {
1375 stmt = gimple_build_assign
1376 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1377 ptr,
1378 fixed_offset));
1379 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1380 }
1381
1382 /* If there's a virtual offset, look up that value in the vtable and
1383 adjust the pointer again. */
1384 if (virtual_offset)
1385 {
1386 tree vtabletmp;
1387 tree vtabletmp2;
1388 tree vtabletmp3;
1389
1390 if (!vtable_entry_type)
1391 {
1392 tree vfunc_type = make_node (FUNCTION_TYPE);
1393 TREE_TYPE (vfunc_type) = integer_type_node;
1394 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1395 layout_type (vfunc_type);
1396
1397 vtable_entry_type = build_pointer_type (vfunc_type);
1398 }
1399
1400 vtabletmp =
1401 create_tmp_reg (build_pointer_type
1402 (build_pointer_type (vtable_entry_type)), "vptr");
1403
1404 /* The vptr is always at offset zero in the object. */
1405 stmt = gimple_build_assign (vtabletmp,
1406 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1407 ptr));
1408 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1409
1410 /* Form the vtable address. */
1411 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1412 "vtableaddr");
1413 stmt = gimple_build_assign (vtabletmp2,
1414 build_simple_mem_ref (vtabletmp));
1415 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1416
1417 /* Find the entry with the vcall offset. */
1418 stmt = gimple_build_assign (vtabletmp2,
1419 fold_build_pointer_plus_loc (input_location,
1420 vtabletmp2,
1421 virtual_offset));
1422 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1423
1424 /* Get the offset itself. */
1425 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1426 "vcalloffset");
1427 stmt = gimple_build_assign (vtabletmp3,
1428 build_simple_mem_ref (vtabletmp2));
1429 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1430
1431 /* Adjust the `this' pointer. */
1432 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1433 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1434 GSI_CONTINUE_LINKING);
1435 }
1436
1437 if (!this_adjusting
1438 && fixed_offset != 0)
1439 /* Adjust the pointer by the constant. */
1440 {
1441 tree ptrtmp;
1442
1443 if (TREE_CODE (ptr) == VAR_DECL)
1444 ptrtmp = ptr;
1445 else
1446 {
1447 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1448 stmt = gimple_build_assign (ptrtmp, ptr);
1449 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1450 }
1451 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1452 ptrtmp, fixed_offset);
1453 }
1454
1455 /* Emit the statement and gimplify the adjustment expression. */
1456 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1457 stmt = gimple_build_assign (ret, ptr);
1458 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1459
1460 return ret;
1461}
1462
1463/* Expand thunk NODE to gimple if possible.
1464 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1465 no assembler is produced.
1466 When OUTPUT_ASM_THUNK is true, also produce assembler for
1467 thunks that are not lowered. */
1468
1469bool
1470cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1471{
1472 bool this_adjusting = thunk.this_adjusting;
1473 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1474 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1475 tree virtual_offset = NULL;
1476 tree alias = callees->callee->decl;
1477 tree thunk_fndecl = decl;
1478 tree a;
1479
1480
1481 if (!force_gimple_thunk && this_adjusting
1482 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1483 virtual_value, alias))
1484 {
1485 const char *fnname;
1486 tree fn_block;
1487 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1488
1489 if (!output_asm_thunks)
1490 return false;
1491
1492 if (in_lto_p)
1493 get_body ();
1494 a = DECL_ARGUMENTS (thunk_fndecl);
1495
1496 current_function_decl = thunk_fndecl;
1497
1498 /* Ensure thunks are emitted in their correct sections. */
1499 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1500
1501 DECL_RESULT (thunk_fndecl)
1502 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1503 RESULT_DECL, 0, restype);
1504 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1505 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1506
1507 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1508 create one. */
1509 fn_block = make_node (BLOCK);
1510 BLOCK_VARS (fn_block) = a;
1511 DECL_INITIAL (thunk_fndecl) = fn_block;
1512 init_function_start (thunk_fndecl);
1513 cfun->is_thunk = 1;
1514 insn_locations_init ();
1515 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1516 prologue_location = curr_insn_location ();
1517 assemble_start_function (thunk_fndecl, fnname);
1518
1519 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1520 fixed_offset, virtual_value, alias);
1521
1522 assemble_end_function (thunk_fndecl, fnname);
1523 insn_locations_finalize ();
1524 init_insn_lengths ();
1525 free_after_compilation (cfun);
1526 set_cfun (NULL);
1527 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1528 thunk.thunk_p = false;
1529 analyzed = false;
1530 }
1531 else
1532 {
1533 tree restype;
1534 basic_block bb, then_bb, else_bb, return_bb;
1535 gimple_stmt_iterator bsi;
1536 int nargs = 0;
1537 tree arg;
1538 int i;
1539 tree resdecl;
1540 tree restmp = NULL;
1541
1542 gimple call;
1543 gimple ret;
1544
1545 if (in_lto_p)
1546 get_body ();
1547 a = DECL_ARGUMENTS (thunk_fndecl);
1548
1549 current_function_decl = thunk_fndecl;
1550
1551 /* Ensure thunks are emitted in their correct sections. */
1552 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1553
1554 DECL_IGNORED_P (thunk_fndecl) = 1;
1555 bitmap_obstack_initialize (NULL);
1556
1557 if (thunk.virtual_offset_p)
1558 virtual_offset = size_int (virtual_value);
1559
1560 /* Build the return declaration for the function. */
1561 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1562 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1563 {
1564 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1565 DECL_ARTIFICIAL (resdecl) = 1;
1566 DECL_IGNORED_P (resdecl) = 1;
1567 DECL_RESULT (thunk_fndecl) = resdecl;
1568 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1569 }
1570 else
1571 resdecl = DECL_RESULT (thunk_fndecl);
1572
1573 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl, true);
1574
1575 bsi = gsi_start_bb (bb);
1576
1577 /* Build call to the function being thunked. */
1578 if (!VOID_TYPE_P (restype))
1579 {
1580 if (DECL_BY_REFERENCE (resdecl))
1581 restmp = gimple_fold_indirect_ref (resdecl);
1582 else if (!is_gimple_reg_type (restype))
1583 {
1584 restmp = resdecl;
1585 add_local_decl (cfun, restmp);
1586 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1587 }
1588 else
1589 restmp = create_tmp_reg (restype, "retval");
1590 }
1591
1592 for (arg = a; arg; arg = DECL_CHAIN (arg))
1593 nargs++;
1594 auto_vec<tree> vargs (nargs);
1595 if (this_adjusting)
1596 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1597 virtual_offset));
1598 else if (nargs)
1599 vargs.quick_push (a);
1600
1601 if (nargs)
1602 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1603 {
1604 tree tmp = arg;
1605 if (!is_gimple_val (arg))
1606 {
1607 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1608 (TREE_TYPE (arg)), "arg");
1609 gimple stmt = gimple_build_assign (tmp, arg);
1610 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1611 }
1612 vargs.quick_push (tmp);
1613 }
1614 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1615 callees->call_stmt = call;
1616 gimple_call_set_from_thunk (call, true);
1617 if (restmp)
1618 {
1619 gimple_call_set_lhs (call, restmp);
1620 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1621 TREE_TYPE (TREE_TYPE (alias))));
1622 }
1623 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1624 if (!(gimple_call_flags (call) & ECF_NORETURN))
1625 {
1626 if (restmp && !this_adjusting
1627 && (fixed_offset || virtual_offset))
1628 {
1629 tree true_label = NULL_TREE;
1630
1631 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1632 {
1633 gimple stmt;
1634 /* If the return type is a pointer, we need to
1635 protect against NULL. We know there will be an
1636 adjustment, because that's why we're emitting a
1637 thunk. */
1638 then_bb = create_basic_block (NULL, (void *) 0, bb);
1639 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1640 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1641 add_bb_to_loop (then_bb, bb->loop_father);
1642 add_bb_to_loop (return_bb, bb->loop_father);
1643 add_bb_to_loop (else_bb, bb->loop_father);
1644 remove_edge (single_succ_edge (bb));
1645 true_label = gimple_block_label (then_bb);
1646 stmt = gimple_build_cond (NE_EXPR, restmp,
1647 build_zero_cst (TREE_TYPE (restmp)),
1648 NULL_TREE, NULL_TREE);
1649 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1650 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1651 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1652 make_edge (return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1653 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1654 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1655 bsi = gsi_last_bb (then_bb);
1656 }
1657
1658 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1659 fixed_offset, virtual_offset);
1660 if (true_label)
1661 {
1662 gimple stmt;
1663 bsi = gsi_last_bb (else_bb);
1664 stmt = gimple_build_assign (restmp,
1665 build_zero_cst (TREE_TYPE (restmp)));
1666 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1667 bsi = gsi_last_bb (return_bb);
1668 }
1669 }
1670 else
1671 gimple_call_set_tail (call, true);
1672
1673 /* Build return value. */
1674 ret = gimple_build_return (restmp);
1675 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1676 }
1677 else
1678 {
1679 gimple_call_set_tail (call, true);
1680 remove_edge (single_succ_edge (bb));
1681 }
1682
1683 cfun->gimple_df->in_ssa_p = true;
1684 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
1685 TREE_ASM_WRITTEN (thunk_fndecl) = false;
1686 delete_unreachable_blocks ();
1687 update_ssa (TODO_update_ssa);
1688#ifdef ENABLE_CHECKING
1689 verify_flow_info ();
1690#endif
1691 free_dominance_info (CDI_DOMINATORS);
1692
1693 /* Since we want to emit the thunk, we explicitly mark its name as
1694 referenced. */
1695 thunk.thunk_p = false;
1696 lowered = true;
1697 bitmap_obstack_release (NULL);
1698 }
1699 current_function_decl = NULL;
1700 set_cfun (NULL);
1701 return true;
1702}
1703
1704/* Assemble thunks and aliases associated to NODE. */
1705
1706static void
1707assemble_thunks_and_aliases (struct cgraph_node *node)
1708{
1709 struct cgraph_edge *e;
1710 struct ipa_ref *ref;
1711
1712 for (e = node->callers; e;)
1713 if (e->caller->thunk.thunk_p)
1714 {
1715 struct cgraph_node *thunk = e->caller;
1716
1717 e = e->next_caller;
1718 thunk->expand_thunk (true, false);
1719 assemble_thunks_and_aliases (thunk);
1720 }
1721 else
1722 e = e->next_caller;
1723
1724 FOR_EACH_ALIAS (node, ref)
1725 {
1726 struct cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
1727 bool saved_written = TREE_ASM_WRITTEN (node->decl);
1728
1729 /* Force assemble_alias to really output the alias this time instead
1730 of buffering it in same alias pairs. */
1731 TREE_ASM_WRITTEN (node->decl) = 1;
1732 do_assemble_alias (alias->decl,
1733 DECL_ASSEMBLER_NAME (node->decl));
1734 assemble_thunks_and_aliases (alias);
1735 TREE_ASM_WRITTEN (node->decl) = saved_written;
1736 }
1737}
1738
1739/* Expand function specified by NODE. */
1740
1741static void
1742expand_function (struct cgraph_node *node)
1743{
1744 tree decl = node->decl;
1745 location_t saved_loc;
1746
1747 /* We ought to not compile any inline clones. */
1748 gcc_assert (!node->global.inlined_to);
1749
1750 announce_function (decl);
1751 node->process = 0;
1752 gcc_assert (node->lowered);
1753 node->get_body ();
1754
1755 /* Generate RTL for the body of DECL. */
1756
1757 timevar_push (TV_REST_OF_COMPILATION);
1758
1759 gcc_assert (cgraph_global_info_ready);
1760
1761 /* Initialize the default bitmap obstack. */
1762 bitmap_obstack_initialize (NULL);
1763
1764 /* Initialize the RTL code for the function. */
1765 current_function_decl = decl;
1766 saved_loc = input_location;
1767 input_location = DECL_SOURCE_LOCATION (decl);
1768 init_function_start (decl);
1769
1770 gimple_register_cfg_hooks ();
1771
1772 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1773
1774 execute_all_ipa_transforms ();
1775
1776 /* Perform all tree transforms and optimizations. */
1777
1778 /* Signal the start of passes. */
1779 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1780
1781 execute_pass_list (cfun, g->get_passes ()->all_passes);
1782
1783 /* Signal the end of passes. */
1784 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1785
1786 bitmap_obstack_release (&reg_obstack);
1787
1788 /* Release the default bitmap obstack. */
1789 bitmap_obstack_release (NULL);
1790
1791 /* If requested, warn about function definitions where the function will
1792 return a value (usually of some struct or union type) which itself will
1793 take up a lot of stack space. */
1794 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1795 {
1796 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1797
1798 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1799 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1800 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1801 larger_than_size))
1802 {
1803 unsigned int size_as_int
1804 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1805
1806 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1807 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1808 decl, size_as_int);
1809 else
1810 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1811 decl, larger_than_size);
1812 }
1813 }
1814
1815 gimple_set_body (decl, NULL);
1816 if (DECL_STRUCT_FUNCTION (decl) == 0
1817 && !cgraph_node::get (decl)->origin)
1818 {
1819 /* Stop pointing to the local nodes about to be freed.
1820 But DECL_INITIAL must remain nonzero so we know this
1821 was an actual function definition.
1822 For a nested function, this is done in c_pop_function_context.
1823 If rest_of_compilation set this to 0, leave it 0. */
1824 if (DECL_INITIAL (decl) != 0)
1825 DECL_INITIAL (decl) = error_mark_node;
1826 }
1827
1828 input_location = saved_loc;
1829
1830 ggc_collect ();
1831 timevar_pop (TV_REST_OF_COMPILATION);
1832
1833 /* Make sure that BE didn't give up on compiling. */
1834 gcc_assert (TREE_ASM_WRITTEN (decl));
1835 set_cfun (NULL);
1836 current_function_decl = NULL;
1837
1838 /* It would make a lot more sense to output thunks before function body to get more
1839 forward and lest backwarding jumps. This however would need solving problem
1840 with comdats. See PR48668. Also aliases must come after function itself to
1841 make one pass assemblers, like one on AIX, happy. See PR 50689.
1842 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1843 groups. */
1844 assemble_thunks_and_aliases (node);
1845 node->release_body ();
1846 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1847 points to the dead function body. */
1848 node->remove_callees ();
1849 node->remove_all_references ();
1850}
1851
1852/* Node comparer that is responsible for the order that corresponds
1853 to time when a function was launched for the first time. */
1854
1855static int
1856node_cmp (const void *pa, const void *pb)
1857{
1858 const struct cgraph_node *a = *(const struct cgraph_node * const *) pa;
1859 const struct cgraph_node *b = *(const struct cgraph_node * const *) pb;
1860
1861 /* Functions with time profile must be before these without profile. */
1862 if (!a->tp_first_run || !b->tp_first_run)
1863 return a->tp_first_run - b->tp_first_run;
1864
1865 return a->tp_first_run != b->tp_first_run
1866 ? b->tp_first_run - a->tp_first_run
1867 : b->order - a->order;
1868}
1869
1870/* Expand all functions that must be output.
1871
1872 Attempt to topologically sort the nodes so function is output when
1873 all called functions are already assembled to allow data to be
1874 propagated across the callgraph. Use a stack to get smaller distance
1875 between a function and its callees (later we may choose to use a more
1876 sophisticated algorithm for function reordering; we will likely want
1877 to use subsections to make the output functions appear in top-down
1878 order). */
1879
1880static void
1881expand_all_functions (void)
1882{
1883 struct cgraph_node *node;
1884 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1885 unsigned int expanded_func_count = 0, profiled_func_count = 0;
1886 int order_pos, new_order_pos = 0;
1887 int i;
1888
1889 order_pos = ipa_reverse_postorder (order);
1890 gcc_assert (order_pos == cgraph_n_nodes);
1891
1892 /* Garbage collector may remove inline clones we eliminate during
1893 optimization. So we must be sure to not reference them. */
1894 for (i = 0; i < order_pos; i++)
1895 if (order[i]->process)
1896 order[new_order_pos++] = order[i];
1897
1898 if (flag_profile_reorder_functions)
1899 qsort (order, new_order_pos, sizeof (struct cgraph_node *), node_cmp);
1900
1901 for (i = new_order_pos - 1; i >= 0; i--)
1902 {
1903 node = order[i];
1904
1905 if (node->process)
1906 {
1907 expanded_func_count++;
1908 if(node->tp_first_run)
1909 profiled_func_count++;
1910
1911 if (cgraph_dump_file)
1912 fprintf (cgraph_dump_file, "Time profile order in expand_all_functions:%s:%d\n", node->asm_name (), node->tp_first_run);
1913
1914 node->process = 0;
1915 expand_function (node);
1916 }
1917 }
1918
1919 if (dump_file)
1920 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
1921 main_input_filename, profiled_func_count, expanded_func_count);
1922
1923 if (cgraph_dump_file && flag_profile_reorder_functions)
1924 fprintf (cgraph_dump_file, "Expanded functions with time profile:%u/%u\n",
1925 profiled_func_count, expanded_func_count);
1926
1927 cgraph_process_new_functions ();
1928 free_gimplify_stack ();
1929
1930 free (order);
1931}
1932
1933/* This is used to sort the node types by the cgraph order number. */
1934
1935enum cgraph_order_sort_kind
1936{
1937 ORDER_UNDEFINED = 0,
1938 ORDER_FUNCTION,
1939 ORDER_VAR,
1940 ORDER_ASM
1941};
1942
1943struct cgraph_order_sort
1944{
1945 enum cgraph_order_sort_kind kind;
1946 union
1947 {
1948 struct cgraph_node *f;
1949 varpool_node *v;
1950 struct asm_node *a;
1951 } u;
1952};
1953
1954/* Output all functions, variables, and asm statements in the order
1955 according to their order fields, which is the order in which they
1956 appeared in the file. This implements -fno-toplevel-reorder. In
1957 this mode we may output functions and variables which don't really
1958 need to be output. */
1959
1960static void
1961output_in_order (void)
1962{
1963 int max;
1964 struct cgraph_order_sort *nodes;
1965 int i;
1966 struct cgraph_node *pf;
1967 varpool_node *pv;
1968 struct asm_node *pa;
1969
1970 max = symtab_order;
1971 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1972
1973 FOR_EACH_DEFINED_FUNCTION (pf)
1974 {
1975 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
1976 {
1977 i = pf->order;
1978 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1979 nodes[i].kind = ORDER_FUNCTION;
1980 nodes[i].u.f = pf;
1981 }
1982 }
1983
1984 FOR_EACH_DEFINED_VARIABLE (pv)
1985 if (!DECL_EXTERNAL (pv->decl))
1986 {
1987 i = pv->order;
1988 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1989 nodes[i].kind = ORDER_VAR;
1990 nodes[i].u.v = pv;
1991 }
1992
1993 for (pa = asm_nodes; pa; pa = pa->next)
1994 {
1995 i = pa->order;
1996 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1997 nodes[i].kind = ORDER_ASM;
1998 nodes[i].u.a = pa;
1999 }
2000
2001 /* In toplevel reorder mode we output all statics; mark them as needed. */
2002
2003 for (i = 0; i < max; ++i)
2004 if (nodes[i].kind == ORDER_VAR)
2005 nodes[i].u.v->finalize_named_section_flags ();
2006
2007 for (i = 0; i < max; ++i)
2008 {
2009 switch (nodes[i].kind)
2010 {
2011 case ORDER_FUNCTION:
2012 nodes[i].u.f->process = 0;
2013 expand_function (nodes[i].u.f);
2014 break;
2015
2016 case ORDER_VAR:
2017 nodes[i].u.v->assemble_decl ();
2018 break;
2019
2020 case ORDER_ASM:
2021 assemble_asm (nodes[i].u.a->asm_str);
2022 break;
2023
2024 case ORDER_UNDEFINED:
2025 break;
2026
2027 default:
2028 gcc_unreachable ();
2029 }
2030 }
2031
2032 asm_nodes = NULL;
2033 free (nodes);
2034}
2035
2036static void
2037ipa_passes (void)
2038{
2039 gcc::pass_manager *passes = g->get_passes ();
2040
2041 set_cfun (NULL);
2042 current_function_decl = NULL;
2043 gimple_register_cfg_hooks ();
2044 bitmap_obstack_initialize (NULL);
2045
2046 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2047
2048 if (!in_lto_p)
2049 {
2050 execute_ipa_pass_list (passes->all_small_ipa_passes);
2051 if (seen_error ())
2052 return;
2053 }
2054
2055 /* We never run removal of unreachable nodes after early passes. This is
2056 because TODO is run before the subpasses. It is important to remove
2057 the unreachable functions to save works at IPA level and to get LTO
2058 symbol tables right. */
2059 symtab_remove_unreachable_nodes (true, cgraph_dump_file);
2060
2061 /* If pass_all_early_optimizations was not scheduled, the state of
2062 the cgraph will not be properly updated. Update it now. */
2063 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
2064 cgraph_state = CGRAPH_STATE_IPA_SSA;
2065
2066 if (!in_lto_p)
2067 {
2068 /* Generate coverage variables and constructors. */
2069 coverage_finish ();
2070
2071 /* Process new functions added. */
2072 set_cfun (NULL);
2073 current_function_decl = NULL;
2074 cgraph_process_new_functions ();
2075
2076 execute_ipa_summary_passes
2077 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2078 }
2079
2080 /* Some targets need to handle LTO assembler output specially. */
2081 if (flag_generate_lto)
2082 targetm.asm_out.lto_start ();
2083
2084 if (!in_lto_p)
2085 ipa_write_summaries ();
2086
2087 if (flag_generate_lto)
2088 targetm.asm_out.lto_end ();
2089
2090 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2091 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2092 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2093
2094 bitmap_obstack_release (NULL);
2095}
2096
2097
2098/* Return string alias is alias of. */
2099
2100static tree
2101get_alias_symbol (tree decl)
2102{
2103 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2104 return get_identifier (TREE_STRING_POINTER
2105 (TREE_VALUE (TREE_VALUE (alias))));
2106}
2107
2108
2109/* Weakrefs may be associated to external decls and thus not output
2110 at expansion time. Emit all necessary aliases. */
2111
2112static void
2113output_weakrefs (void)
2114{
2115 symtab_node *node;
2116 FOR_EACH_SYMBOL (node)
2117 if (node->alias
2118 && !TREE_ASM_WRITTEN (node->decl)
2119 && node->weakref)
2120 {
2121 tree target;
2122
2123 /* Weakrefs are special by not requiring target definition in current
2124 compilation unit. It is thus bit hard to work out what we want to
2125 alias.
2126 When alias target is defined, we need to fetch it from symtab reference,
2127 otherwise it is pointed to by alias_target. */
2128 if (node->alias_target)
2129 target = (DECL_P (node->alias_target)
2130 ? DECL_ASSEMBLER_NAME (node->alias_target)
2131 : node->alias_target);
2132 else if (node->analyzed)
2133 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2134 else
2135 {
2136 gcc_unreachable ();
2137 target = get_alias_symbol (node->decl);
2138 }
2139 do_assemble_alias (node->decl, target);
2140 }
2141}
2142
2143/* Initialize callgraph dump file. */
2144
2145void
2146init_cgraph (void)
2147{
2148 if (!cgraph_dump_file)
2149 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
2150}
2151
2152
2153/* Perform simple optimizations based on callgraph. */
2154
2155void
2156compile (void)
2157{
2158 if (seen_error ())
2159 return;
2160
2161#ifdef ENABLE_CHECKING
2162 symtab_node::verify_symtab_nodes ();
2163#endif
2164
2165 timevar_push (TV_CGRAPHOPT);
2166 if (pre_ipa_mem_report)
2167 {
2168 fprintf (stderr, "Memory consumption before IPA\n");
2169 dump_memory_report (false);
2170 }
2171 if (!quiet_flag)
2172 fprintf (stderr, "Performing interprocedural optimizations\n");
2173 cgraph_state = CGRAPH_STATE_IPA;
2174
2175 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2176 if (flag_lto)
2177 lto_streamer_hooks_init ();
2178
2179 /* Don't run the IPA passes if there was any error or sorry messages. */
2180 if (!seen_error ())
2181 ipa_passes ();
2182
2183 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2184 if (seen_error ()
2185 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2186 {
2187 timevar_pop (TV_CGRAPHOPT);
2188 return;
2189 }
2190
2191 /* This pass remove bodies of extern inline functions we never inlined.
2192 Do this later so other IPA passes see what is really going on. */
2193 symtab_remove_unreachable_nodes (false, dump_file);
2194 cgraph_global_info_ready = true;
2195 if (cgraph_dump_file)
2196 {
2197 fprintf (cgraph_dump_file, "Optimized ");
2198 symtab_node:: dump_table (cgraph_dump_file);
2199 }
2200 if (post_ipa_mem_report)
2201 {
2202 fprintf (stderr, "Memory consumption after IPA\n");
2203 dump_memory_report (false);
2204 }
2205 timevar_pop (TV_CGRAPHOPT);
2206
2207 /* Output everything. */
2208 (*debug_hooks->assembly_start) ();
2209 if (!quiet_flag)
2210 fprintf (stderr, "Assembling functions:\n");
2211#ifdef ENABLE_CHECKING
2212 symtab_node::verify_symtab_nodes ();
2213#endif
2214
2215 cgraph_materialize_all_clones ();
2216 bitmap_obstack_initialize (NULL);
2217 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2218 symtab_remove_unreachable_nodes (true, dump_file);
2219#ifdef ENABLE_CHECKING
2220 symtab_node::verify_symtab_nodes ();
2221#endif
2222 bitmap_obstack_release (NULL);
2223 mark_functions_to_output ();
2224
2225 /* When weakref support is missing, we autmatically translate all
2226 references to NODE to references to its ultimate alias target.
2227 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2228 TREE_CHAIN.
2229
2230 Set up this mapping before we output any assembler but once we are sure
2231 that all symbol renaming is done.
2232
2233 FIXME: All this uglyness can go away if we just do renaming at gimple
2234 level by physically rewritting the IL. At the moment we can only redirect
2235 calls, so we need infrastructure for renaming references as well. */
2236#ifndef ASM_OUTPUT_WEAKREF
2237 symtab_node *node;
2238
2239 FOR_EACH_SYMBOL (node)
2240 if (node->alias
2241 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2242 {
2243 IDENTIFIER_TRANSPARENT_ALIAS
2244 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2245 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2246 = (node->alias_target ? node->alias_target
2247 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2248 }
2249#endif
2250
2251 cgraph_state = CGRAPH_STATE_EXPANSION;
2252
2253 if (!flag_toplevel_reorder)
2254 output_in_order ();
2255 else
2256 {
2257 output_asm_statements ();
2258
2259 expand_all_functions ();
2260 varpool_node::output_variables ();
2261 }
2262
2263 cgraph_process_new_functions ();
2264 cgraph_state = CGRAPH_STATE_FINISHED;
2265 output_weakrefs ();
2266
2267 if (cgraph_dump_file)
2268 {
2269 fprintf (cgraph_dump_file, "\nFinal ");
2270 symtab_node::dump_table (cgraph_dump_file);
2271 }
2272#ifdef ENABLE_CHECKING
2273 symtab_node::verify_symtab_nodes ();
2274 /* Double check that all inline clones are gone and that all
2275 function bodies have been released from memory. */
2276 if (!seen_error ())
2277 {
2278 struct cgraph_node *node;
2279 bool error_found = false;
2280
2281 FOR_EACH_DEFINED_FUNCTION (node)
2282 if (node->global.inlined_to
2283 || gimple_has_body_p (node->decl))
2284 {
2285 error_found = true;
2286 node->debug ();
2287 }
2288 if (error_found)
2289 internal_error ("nodes with unreleased memory found");
2290 }
2291#endif
2292}
2293
2294
2295/* Analyze the whole compilation unit once it is parsed completely. */
2296
2297void
2298finalize_compilation_unit (void)
2299{
2300 timevar_push (TV_CGRAPH);
2301
2302 /* If we're here there's no current function anymore. Some frontends
2303 are lazy in clearing these. */
2304 current_function_decl = NULL;
2305 set_cfun (NULL);
2306
2307 /* Do not skip analyzing the functions if there were errors, we
2308 miss diagnostics for following functions otherwise. */
2309
2310 /* Emit size functions we didn't inline. */
2311 finalize_size_functions ();
2312
2313 /* Mark alias targets necessary and emit diagnostics. */
2314 handle_alias_pairs ();
2315
2316 if (!quiet_flag)
2317 {
2318 fprintf (stderr, "\nAnalyzing compilation unit\n");
2319 fflush (stderr);
2320 }
2321
2322 if (flag_dump_passes)
2323 dump_passes ();
2324
2325 /* Gimplify and lower all functions, compute reachability and
2326 remove unreachable nodes. */
2327 analyze_functions ();
2328
2329 /* Mark alias targets necessary and emit diagnostics. */
2330 handle_alias_pairs ();
2331
2332 /* Gimplify and lower thunks. */
2333 analyze_functions ();
2334
2335 /* Finally drive the pass manager. */
2336 compile ();
2337
2338 timevar_pop (TV_CGRAPH);
2339}
2340
2341/* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2342 kind of wrapper method. */
2343
2344void
2345cgraph_node::create_wrapper (struct cgraph_node *target)
2346{
2347 /* Preserve DECL_RESULT so we get right by reference flag. */
2348 tree decl_result = DECL_RESULT (decl);
2349
2350 /* Remove the function's body. */
2351 release_body ();
2352 reset ();
2353
2354 DECL_RESULT (decl) = decl_result;
2355 DECL_INITIAL (decl) = NULL;
2356 allocate_struct_function (decl, false);
2357 set_cfun (NULL);
2358
2359 /* Turn alias into thunk and expand it into GIMPLE representation. */
2360 definition = true;
2361 thunk.thunk_p = true;
2362 thunk.this_adjusting = false;
2363
2364 struct cgraph_edge *e = create_edge (target, NULL, 0, CGRAPH_FREQ_BASE);
2365
2366 if (!expand_thunk (false, true))
2367 analyzed = true;
2368
2369 e->call_stmt_cannot_inline_p = true;
2370
2371 /* Inline summary set-up. */
2372 analyze ();
2373 inline_analyze_function (this);
2374}
2375
2376#include "gt-cgraphunit.h"