]> git.ipfire.org Git - thirdparty/gcc.git/blame_incremental - gcc/cgraphunit.c
* Initial implementation.
[thirdparty/gcc.git] / gcc / cgraphunit.c
... / ...
CommitLineData
1/* Driver of optimization process
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21/* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158*/
159
160#include "config.h"
161#include "system.h"
162#include "coretypes.h"
163#include "tm.h"
164#include "hash-set.h"
165#include "machmode.h"
166#include "vec.h"
167#include "double-int.h"
168#include "input.h"
169#include "alias.h"
170#include "symtab.h"
171#include "wide-int.h"
172#include "inchash.h"
173#include "tree.h"
174#include "fold-const.h"
175#include "varasm.h"
176#include "stor-layout.h"
177#include "stringpool.h"
178#include "output.h"
179#include "rtl.h"
180#include "predict.h"
181#include "hard-reg-set.h"
182#include "input.h"
183#include "function.h"
184#include "basic-block.h"
185#include "tree-ssa-alias.h"
186#include "internal-fn.h"
187#include "gimple-fold.h"
188#include "gimple-expr.h"
189#include "is-a.h"
190#include "gimple.h"
191#include "gimplify.h"
192#include "gimple-iterator.h"
193#include "gimplify-me.h"
194#include "gimple-ssa.h"
195#include "tree-cfg.h"
196#include "tree-into-ssa.h"
197#include "tree-ssa.h"
198#include "tree-inline.h"
199#include "langhooks.h"
200#include "toplev.h"
201#include "flags.h"
202#include "debug.h"
203#include "target.h"
204#include "diagnostic.h"
205#include "params.h"
206#include "intl.h"
207#include "hash-map.h"
208#include "plugin-api.h"
209#include "ipa-ref.h"
210#include "cgraph.h"
211#include "alloc-pool.h"
212#include "symbol-summary.h"
213#include "ipa-prop.h"
214#include "tree-iterator.h"
215#include "tree-pass.h"
216#include "tree-dump.h"
217#include "gimple-pretty-print.h"
218#include "output.h"
219#include "coverage.h"
220#include "plugin.h"
221#include "ipa-inline.h"
222#include "ipa-utils.h"
223#include "lto-streamer.h"
224#include "except.h"
225#include "cfgloop.h"
226#include "regset.h" /* FIXME: For reg_obstack. */
227#include "context.h"
228#include "pass_manager.h"
229#include "tree-nested.h"
230#include "gimplify.h"
231#include "dbgcnt.h"
232#include "tree-chkp.h"
233#include "lto-section-names.h"
234#include "omp-low.h"
235#include "print-tree.h"
236
237/* Queue of cgraph nodes scheduled to be added into cgraph. This is a
238 secondary queue used during optimization to accommodate passes that
239 may generate new functions that need to be optimized and expanded. */
240vec<cgraph_node *> cgraph_new_nodes;
241
242static void expand_all_functions (void);
243static void mark_functions_to_output (void);
244static void handle_alias_pairs (void);
245
246/* Used for vtable lookup in thunk adjusting. */
247static GTY (()) tree vtable_entry_type;
248
249/* Determine if symbol declaration is needed. That is, visible to something
250 either outside this translation unit, something magic in the system
251 configury */
252bool
253symtab_node::needed_p (void)
254{
255 /* Double check that no one output the function into assembly file
256 early. */
257 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
258 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
259
260 if (!definition)
261 return false;
262
263 if (DECL_EXTERNAL (decl))
264 return false;
265
266 /* If the user told us it is used, then it must be so. */
267 if (force_output)
268 return true;
269
270 /* ABI forced symbols are needed when they are external. */
271 if (forced_by_abi && TREE_PUBLIC (decl))
272 return true;
273
274 /* Keep constructors, destructors and virtual functions. */
275 if (TREE_CODE (decl) == FUNCTION_DECL
276 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
277 return true;
278
279 /* Externally visible variables must be output. The exception is
280 COMDAT variables that must be output only when they are needed. */
281 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
282 return true;
283
284 return false;
285}
286
287/* Head and terminator of the queue of nodes to be processed while building
288 callgraph. */
289
290static symtab_node symtab_terminator;
291static symtab_node *queued_nodes = &symtab_terminator;
292
293/* Add NODE to queue starting at QUEUED_NODES.
294 The queue is linked via AUX pointers and terminated by pointer to 1. */
295
296static void
297enqueue_node (symtab_node *node)
298{
299 if (node->aux)
300 return;
301 gcc_checking_assert (queued_nodes);
302 node->aux = queued_nodes;
303 queued_nodes = node;
304}
305
306/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
307 functions into callgraph in a way so they look like ordinary reachable
308 functions inserted into callgraph already at construction time. */
309
310void
311symbol_table::process_new_functions (void)
312{
313 tree fndecl;
314
315 if (!cgraph_new_nodes.exists ())
316 return;
317
318 handle_alias_pairs ();
319 /* Note that this queue may grow as its being processed, as the new
320 functions may generate new ones. */
321 for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
322 {
323 cgraph_node *node = cgraph_new_nodes[i];
324 fndecl = node->decl;
325 switch (state)
326 {
327 case CONSTRUCTION:
328 /* At construction time we just need to finalize function and move
329 it into reachable functions list. */
330
331 cgraph_node::finalize_function (fndecl, false);
332 call_cgraph_insertion_hooks (node);
333 enqueue_node (node);
334 break;
335
336 case IPA:
337 case IPA_SSA:
338 case IPA_SSA_AFTER_INLINING:
339 /* When IPA optimization already started, do all essential
340 transformations that has been already performed on the whole
341 cgraph but not on this function. */
342
343 gimple_register_cfg_hooks ();
344 if (!node->analyzed)
345 node->analyze ();
346 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
347 if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
348 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
349 g->get_passes ()->execute_early_local_passes ();
350 else if (inline_summaries != NULL)
351 compute_inline_parameters (node, true);
352 free_dominance_info (CDI_POST_DOMINATORS);
353 free_dominance_info (CDI_DOMINATORS);
354 pop_cfun ();
355 call_cgraph_insertion_hooks (node);
356 break;
357
358 case EXPANSION:
359 /* Functions created during expansion shall be compiled
360 directly. */
361 node->process = 0;
362 call_cgraph_insertion_hooks (node);
363 node->expand ();
364 break;
365
366 default:
367 gcc_unreachable ();
368 break;
369 }
370 }
371
372 cgraph_new_nodes.release ();
373}
374
375/* As an GCC extension we allow redefinition of the function. The
376 semantics when both copies of bodies differ is not well defined.
377 We replace the old body with new body so in unit at a time mode
378 we always use new body, while in normal mode we may end up with
379 old body inlined into some functions and new body expanded and
380 inlined in others.
381
382 ??? It may make more sense to use one body for inlining and other
383 body for expanding the function but this is difficult to do. */
384
385void
386cgraph_node::reset (void)
387{
388 /* If process is set, then we have already begun whole-unit analysis.
389 This is *not* testing for whether we've already emitted the function.
390 That case can be sort-of legitimately seen with real function redefinition
391 errors. I would argue that the front end should never present us with
392 such a case, but don't enforce that for now. */
393 gcc_assert (!process);
394
395 /* Reset our data structures so we can analyze the function again. */
396 memset (&local, 0, sizeof (local));
397 memset (&global, 0, sizeof (global));
398 memset (&rtl, 0, sizeof (rtl));
399 analyzed = false;
400 definition = false;
401 alias = false;
402 weakref = false;
403 cpp_implicit_alias = false;
404
405 remove_callees ();
406 remove_all_references ();
407}
408
409/* Return true when there are references to the node. */
410
411bool
412symtab_node::referred_to_p (void)
413{
414 ipa_ref *ref = NULL;
415
416 /* See if there are any references at all. */
417 if (iterate_referring (0, ref))
418 return true;
419 /* For functions check also calls. */
420 cgraph_node *cn = dyn_cast <cgraph_node *> (this);
421 if (cn && cn->callers)
422 return true;
423 return false;
424}
425
426/* DECL has been parsed. Take it, queue it, compile it at the whim of the
427 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
428 the garbage collector run at the moment. We would need to either create
429 a new GC context, or just not compile right now. */
430
431void
432cgraph_node::finalize_function (tree decl, bool no_collect)
433{
434 cgraph_node *node = cgraph_node::get_create (decl);
435
436 if (node->definition)
437 {
438 /* Nested functions should only be defined once. */
439 gcc_assert (!DECL_CONTEXT (decl)
440 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
441 node->reset ();
442 node->local.redefined_extern_inline = true;
443 }
444
445 notice_global_symbol (decl);
446 node->definition = true;
447 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
448
449 /* With -fkeep-inline-functions we are keeping all inline functions except
450 for extern inline ones. */
451 if (flag_keep_inline_functions
452 && DECL_DECLARED_INLINE_P (decl)
453 && !DECL_EXTERNAL (decl)
454 && !DECL_DISREGARD_INLINE_LIMITS (decl))
455 node->force_output = 1;
456
457 /* When not optimizing, also output the static functions. (see
458 PR24561), but don't do so for always_inline functions, functions
459 declared inline and nested functions. These were optimized out
460 in the original implementation and it is unclear whether we want
461 to change the behavior here. */
462 if ((!opt_for_fn (decl, optimize)
463 && !node->cpp_implicit_alias
464 && !DECL_DISREGARD_INLINE_LIMITS (decl)
465 && !DECL_DECLARED_INLINE_P (decl)
466 && !(DECL_CONTEXT (decl)
467 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
468 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
469 node->force_output = 1;
470
471 /* If we've not yet emitted decl, tell the debug info about it. */
472 if (!TREE_ASM_WRITTEN (decl))
473 (*debug_hooks->deferred_inline_function) (decl);
474
475 /* Possibly warn about unused parameters. */
476 if (warn_unused_parameter)
477 do_warn_unused_parameter (decl);
478
479 if (!no_collect)
480 ggc_collect ();
481
482 if (symtab->state == CONSTRUCTION
483 && (node->needed_p () || node->referred_to_p ()))
484 enqueue_node (node);
485}
486
487/* Add the function FNDECL to the call graph.
488 Unlike finalize_function, this function is intended to be used
489 by middle end and allows insertion of new function at arbitrary point
490 of compilation. The function can be either in high, low or SSA form
491 GIMPLE.
492
493 The function is assumed to be reachable and have address taken (so no
494 API breaking optimizations are performed on it).
495
496 Main work done by this function is to enqueue the function for later
497 processing to avoid need the passes to be re-entrant. */
498
499void
500cgraph_node::add_new_function (tree fndecl, bool lowered)
501{
502 gcc::pass_manager *passes = g->get_passes ();
503 cgraph_node *node;
504 switch (symtab->state)
505 {
506 case PARSING:
507 cgraph_node::finalize_function (fndecl, false);
508 break;
509 case CONSTRUCTION:
510 /* Just enqueue function to be processed at nearest occurrence. */
511 node = cgraph_node::get_create (fndecl);
512 if (lowered)
513 node->lowered = true;
514 cgraph_new_nodes.safe_push (node);
515 break;
516
517 case IPA:
518 case IPA_SSA:
519 case IPA_SSA_AFTER_INLINING:
520 case EXPANSION:
521 /* Bring the function into finalized state and enqueue for later
522 analyzing and compilation. */
523 node = cgraph_node::get_create (fndecl);
524 node->local.local = false;
525 node->definition = true;
526 node->force_output = true;
527 if (!lowered && symtab->state == EXPANSION)
528 {
529 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
530 gimple_register_cfg_hooks ();
531 bitmap_obstack_initialize (NULL);
532 execute_pass_list (cfun, passes->all_lowering_passes);
533 passes->execute_early_local_passes ();
534 bitmap_obstack_release (NULL);
535 pop_cfun ();
536
537 lowered = true;
538 }
539 if (lowered)
540 node->lowered = true;
541 cgraph_new_nodes.safe_push (node);
542 break;
543
544 case FINISHED:
545 /* At the very end of compilation we have to do all the work up
546 to expansion. */
547 node = cgraph_node::create (fndecl);
548 if (lowered)
549 node->lowered = true;
550 node->definition = true;
551 node->analyze ();
552 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
553 gimple_register_cfg_hooks ();
554 bitmap_obstack_initialize (NULL);
555 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
556 g->get_passes ()->execute_early_local_passes ();
557 bitmap_obstack_release (NULL);
558 pop_cfun ();
559 node->expand ();
560 break;
561
562 default:
563 gcc_unreachable ();
564 }
565
566 /* Set a personality if required and we already passed EH lowering. */
567 if (lowered
568 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
569 == eh_personality_lang))
570 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
571}
572
573/* Analyze the function scheduled to be output. */
574void
575cgraph_node::analyze (void)
576{
577 tree decl = this->decl;
578 location_t saved_loc = input_location;
579 input_location = DECL_SOURCE_LOCATION (decl);
580
581 if (thunk.thunk_p)
582 {
583 create_edge (cgraph_node::get (thunk.alias),
584 NULL, 0, CGRAPH_FREQ_BASE);
585 if (!expand_thunk (false, false))
586 {
587 thunk.alias = NULL;
588 return;
589 }
590 thunk.alias = NULL;
591 }
592 if (alias)
593 resolve_alias (cgraph_node::get (alias_target));
594 else if (dispatcher_function)
595 {
596 /* Generate the dispatcher body of multi-versioned functions. */
597 cgraph_function_version_info *dispatcher_version_info
598 = function_version ();
599 if (dispatcher_version_info != NULL
600 && (dispatcher_version_info->dispatcher_resolver
601 == NULL_TREE))
602 {
603 tree resolver = NULL_TREE;
604 gcc_assert (targetm.generate_version_dispatcher_body);
605 resolver = targetm.generate_version_dispatcher_body (this);
606 gcc_assert (resolver != NULL_TREE);
607 }
608 }
609 else
610 {
611 push_cfun (DECL_STRUCT_FUNCTION (decl));
612
613 assign_assembler_name_if_neeeded (decl);
614
615 /* Make sure to gimplify bodies only once. During analyzing a
616 function we lower it, which will require gimplified nested
617 functions, so we can end up here with an already gimplified
618 body. */
619 if (!gimple_has_body_p (decl))
620 gimplify_function_tree (decl);
621 dump_function (TDI_generic, decl);
622
623 /* Lower the function. */
624 if (!lowered)
625 {
626 if (nested)
627 lower_nested_functions (decl);
628 gcc_assert (!nested);
629
630 gimple_register_cfg_hooks ();
631 bitmap_obstack_initialize (NULL);
632 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
633 free_dominance_info (CDI_POST_DOMINATORS);
634 free_dominance_info (CDI_DOMINATORS);
635 compact_blocks ();
636 bitmap_obstack_release (NULL);
637 lowered = true;
638 }
639
640 pop_cfun ();
641 }
642 analyzed = true;
643
644 input_location = saved_loc;
645}
646
647/* C++ frontend produce same body aliases all over the place, even before PCH
648 gets streamed out. It relies on us linking the aliases with their function
649 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
650 first produce aliases without links, but once C++ FE is sure he won't sream
651 PCH we build the links via this function. */
652
653void
654symbol_table::process_same_body_aliases (void)
655{
656 symtab_node *node;
657 FOR_EACH_SYMBOL (node)
658 if (node->cpp_implicit_alias && !node->analyzed)
659 node->resolve_alias
660 (TREE_CODE (node->alias_target) == VAR_DECL
661 ? (symtab_node *)varpool_node::get_create (node->alias_target)
662 : (symtab_node *)cgraph_node::get_create (node->alias_target));
663 cpp_implicit_aliases_done = true;
664}
665
666/* Process attributes common for vars and functions. */
667
668static void
669process_common_attributes (symtab_node *node, tree decl)
670{
671 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
672
673 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
674 {
675 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
676 "%<weakref%> attribute should be accompanied with"
677 " an %<alias%> attribute");
678 DECL_WEAK (decl) = 0;
679 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
680 DECL_ATTRIBUTES (decl));
681 }
682
683 if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
684 node->no_reorder = 1;
685}
686
687/* Look for externally_visible and used attributes and mark cgraph nodes
688 accordingly.
689
690 We cannot mark the nodes at the point the attributes are processed (in
691 handle_*_attribute) because the copy of the declarations available at that
692 point may not be canonical. For example, in:
693
694 void f();
695 void f() __attribute__((used));
696
697 the declaration we see in handle_used_attribute will be the second
698 declaration -- but the front end will subsequently merge that declaration
699 with the original declaration and discard the second declaration.
700
701 Furthermore, we can't mark these nodes in finalize_function because:
702
703 void f() {}
704 void f() __attribute__((externally_visible));
705
706 is valid.
707
708 So, we walk the nodes at the end of the translation unit, applying the
709 attributes at that point. */
710
711static void
712process_function_and_variable_attributes (cgraph_node *first,
713 varpool_node *first_var)
714{
715 cgraph_node *node;
716 varpool_node *vnode;
717
718 for (node = symtab->first_function (); node != first;
719 node = symtab->next_function (node))
720 {
721 tree decl = node->decl;
722 if (DECL_PRESERVE_P (decl))
723 node->mark_force_output ();
724 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
725 {
726 if (! TREE_PUBLIC (node->decl))
727 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
728 "%<externally_visible%>"
729 " attribute have effect only on public objects");
730 }
731 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
732 && (node->definition && !node->alias))
733 {
734 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
735 "%<weakref%> attribute ignored"
736 " because function is defined");
737 DECL_WEAK (decl) = 0;
738 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
739 DECL_ATTRIBUTES (decl));
740 }
741
742 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
743 && !DECL_DECLARED_INLINE_P (decl)
744 /* redefining extern inline function makes it DECL_UNINLINABLE. */
745 && !DECL_UNINLINABLE (decl))
746 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
747 "always_inline function might not be inlinable");
748
749 process_common_attributes (node, decl);
750 }
751 for (vnode = symtab->first_variable (); vnode != first_var;
752 vnode = symtab->next_variable (vnode))
753 {
754 tree decl = vnode->decl;
755 if (DECL_EXTERNAL (decl)
756 && DECL_INITIAL (decl))
757 varpool_node::finalize_decl (decl);
758 if (DECL_PRESERVE_P (decl))
759 vnode->force_output = true;
760 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
761 {
762 if (! TREE_PUBLIC (vnode->decl))
763 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
764 "%<externally_visible%>"
765 " attribute have effect only on public objects");
766 }
767 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
768 && vnode->definition
769 && DECL_INITIAL (decl))
770 {
771 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
772 "%<weakref%> attribute ignored"
773 " because variable is initialized");
774 DECL_WEAK (decl) = 0;
775 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
776 DECL_ATTRIBUTES (decl));
777 }
778 process_common_attributes (vnode, decl);
779 }
780}
781
782/* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
783 middle end to output the variable to asm file, if needed or externally
784 visible. */
785
786void
787varpool_node::finalize_decl (tree decl)
788{
789 varpool_node *node = varpool_node::get_create (decl);
790
791 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
792
793 if (node->definition)
794 return;
795 notice_global_symbol (decl);
796 node->definition = true;
797 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
798 /* Traditionally we do not eliminate static variables when not
799 optimizing and when not doing toplevel reoder. */
800 || node->no_reorder
801 || ((!flag_toplevel_reorder
802 && !DECL_COMDAT (node->decl)
803 && !DECL_ARTIFICIAL (node->decl))))
804 node->force_output = true;
805
806 if (symtab->state == CONSTRUCTION
807 && (node->needed_p () || node->referred_to_p ()))
808 enqueue_node (node);
809 if (symtab->state >= IPA_SSA)
810 node->analyze ();
811 /* Some frontends produce various interface variables after compilation
812 finished. */
813 if (symtab->state == FINISHED
814 || (!flag_toplevel_reorder
815 && symtab->state == EXPANSION))
816 node->assemble_decl ();
817
818 if (DECL_INITIAL (decl))
819 chkp_register_var_initializer (decl);
820}
821
822/* EDGE is an polymorphic call. Mark all possible targets as reachable
823 and if there is only one target, perform trivial devirtualization.
824 REACHABLE_CALL_TARGETS collects target lists we already walked to
825 avoid udplicate work. */
826
827static void
828walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
829 cgraph_edge *edge)
830{
831 unsigned int i;
832 void *cache_token;
833 bool final;
834 vec <cgraph_node *>targets
835 = possible_polymorphic_call_targets
836 (edge, &final, &cache_token);
837
838 if (!reachable_call_targets->add (cache_token))
839 {
840 if (symtab->dump_file)
841 dump_possible_polymorphic_call_targets
842 (symtab->dump_file, edge);
843
844 for (i = 0; i < targets.length (); i++)
845 {
846 /* Do not bother to mark virtual methods in anonymous namespace;
847 either we will find use of virtual table defining it, or it is
848 unused. */
849 if (targets[i]->definition
850 && TREE_CODE
851 (TREE_TYPE (targets[i]->decl))
852 == METHOD_TYPE
853 && !type_in_anonymous_namespace_p
854 (method_class_type
855 (TREE_TYPE (targets[i]->decl))))
856 enqueue_node (targets[i]);
857 }
858 }
859
860 /* Very trivial devirtualization; when the type is
861 final or anonymous (so we know all its derivation)
862 and there is only one possible virtual call target,
863 make the edge direct. */
864 if (final)
865 {
866 if (targets.length () <= 1 && dbg_cnt (devirt))
867 {
868 cgraph_node *target;
869 if (targets.length () == 1)
870 target = targets[0];
871 else
872 target = cgraph_node::create
873 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
874
875 if (symtab->dump_file)
876 {
877 fprintf (symtab->dump_file,
878 "Devirtualizing call: ");
879 print_gimple_stmt (symtab->dump_file,
880 edge->call_stmt, 0,
881 TDF_SLIM);
882 }
883 if (dump_enabled_p ())
884 {
885 location_t locus = gimple_location_safe (edge->call_stmt);
886 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
887 "devirtualizing call in %s to %s\n",
888 edge->caller->name (), target->name ());
889 }
890
891 edge->make_direct (target);
892 edge->redirect_call_stmt_to_callee ();
893
894 /* Call to __builtin_unreachable shouldn't be instrumented. */
895 if (!targets.length ())
896 gimple_call_set_with_bounds (edge->call_stmt, false);
897
898 if (symtab->dump_file)
899 {
900 fprintf (symtab->dump_file,
901 "Devirtualized as: ");
902 print_gimple_stmt (symtab->dump_file,
903 edge->call_stmt, 0,
904 TDF_SLIM);
905 }
906 }
907 }
908}
909
910
911/* Discover all functions and variables that are trivially needed, analyze
912 them as well as all functions and variables referred by them */
913static cgraph_node *first_analyzed;
914static varpool_node *first_analyzed_var;
915
916static void
917analyze_functions (void)
918{
919 /* Keep track of already processed nodes when called multiple times for
920 intermodule optimization. */
921 cgraph_node *first_handled = first_analyzed;
922 varpool_node *first_handled_var = first_analyzed_var;
923 hash_set<void *> reachable_call_targets;
924
925 symtab_node *node;
926 symtab_node *next;
927 int i;
928 ipa_ref *ref;
929 bool changed = true;
930 location_t saved_loc = input_location;
931
932 bitmap_obstack_initialize (NULL);
933 symtab->state = CONSTRUCTION;
934 input_location = UNKNOWN_LOCATION;
935
936 /* Ugly, but the fixup can not happen at a time same body alias is created;
937 C++ FE is confused about the COMDAT groups being right. */
938 if (symtab->cpp_implicit_aliases_done)
939 FOR_EACH_SYMBOL (node)
940 if (node->cpp_implicit_alias)
941 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
942 build_type_inheritance_graph ();
943
944 /* Analysis adds static variables that in turn adds references to new functions.
945 So we need to iterate the process until it stabilize. */
946 while (changed)
947 {
948 changed = false;
949 process_function_and_variable_attributes (first_analyzed,
950 first_analyzed_var);
951
952 /* First identify the trivially needed symbols. */
953 for (node = symtab->first_symbol ();
954 node != first_analyzed
955 && node != first_analyzed_var; node = node->next)
956 {
957 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
958 node->get_comdat_group_id ();
959 if (node->needed_p ())
960 {
961 enqueue_node (node);
962 if (!changed && symtab->dump_file)
963 fprintf (symtab->dump_file, "Trivially needed symbols:");
964 changed = true;
965 if (symtab->dump_file)
966 fprintf (symtab->dump_file, " %s", node->asm_name ());
967 if (!changed && symtab->dump_file)
968 fprintf (symtab->dump_file, "\n");
969 }
970 if (node == first_analyzed
971 || node == first_analyzed_var)
972 break;
973 }
974 symtab->process_new_functions ();
975 first_analyzed_var = symtab->first_variable ();
976 first_analyzed = symtab->first_function ();
977
978 if (changed && symtab->dump_file)
979 fprintf (symtab->dump_file, "\n");
980
981 /* Lower representation, build callgraph edges and references for all trivially
982 needed symbols and all symbols referred by them. */
983 while (queued_nodes != &symtab_terminator)
984 {
985 changed = true;
986 node = queued_nodes;
987 queued_nodes = (symtab_node *)queued_nodes->aux;
988 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
989 if (cnode && cnode->definition)
990 {
991 cgraph_edge *edge;
992 tree decl = cnode->decl;
993
994 /* ??? It is possible to create extern inline function
995 and later using weak alias attribute to kill its body.
996 See gcc.c-torture/compile/20011119-1.c */
997 if (!DECL_STRUCT_FUNCTION (decl)
998 && !cnode->alias
999 && !cnode->thunk.thunk_p
1000 && !cnode->dispatcher_function)
1001 {
1002 cnode->reset ();
1003 cnode->local.redefined_extern_inline = true;
1004 continue;
1005 }
1006
1007 if (!cnode->analyzed)
1008 cnode->analyze ();
1009
1010 for (edge = cnode->callees; edge; edge = edge->next_callee)
1011 if (edge->callee->definition
1012 && (!DECL_EXTERNAL (edge->callee->decl)
1013 /* When not optimizing, do not try to analyze extern
1014 inline functions. Doing so is pointless. */
1015 || opt_for_fn (edge->callee->decl, optimize)
1016 /* Weakrefs needs to be preserved. */
1017 || edge->callee->alias
1018 /* always_inline functions are inlined aven at -O0. */
1019 || lookup_attribute
1020 ("always_inline",
1021 DECL_ATTRIBUTES (edge->callee->decl))
1022 /* Multiversioned functions needs the dispatcher to
1023 be produced locally even for extern functions. */
1024 || edge->callee->function_version ()))
1025 enqueue_node (edge->callee);
1026 if (opt_for_fn (cnode->decl, optimize)
1027 && opt_for_fn (cnode->decl, flag_devirtualize))
1028 {
1029 cgraph_edge *next;
1030
1031 for (edge = cnode->indirect_calls; edge; edge = next)
1032 {
1033 next = edge->next_callee;
1034 if (edge->indirect_info->polymorphic)
1035 walk_polymorphic_call_targets (&reachable_call_targets,
1036 edge);
1037 }
1038 }
1039
1040 /* If decl is a clone of an abstract function,
1041 mark that abstract function so that we don't release its body.
1042 The DECL_INITIAL() of that abstract function declaration
1043 will be later needed to output debug info. */
1044 if (DECL_ABSTRACT_ORIGIN (decl))
1045 {
1046 cgraph_node *origin_node
1047 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1048 origin_node->used_as_abstract_origin = true;
1049 }
1050 }
1051 else
1052 {
1053 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1054 if (vnode && vnode->definition && !vnode->analyzed)
1055 vnode->analyze ();
1056 }
1057
1058 if (node->same_comdat_group)
1059 {
1060 symtab_node *next;
1061 for (next = node->same_comdat_group;
1062 next != node;
1063 next = next->same_comdat_group)
1064 if (!next->comdat_local_p ())
1065 enqueue_node (next);
1066 }
1067 for (i = 0; node->iterate_reference (i, ref); i++)
1068 if (ref->referred->definition
1069 && (!DECL_EXTERNAL (ref->referred->decl)
1070 || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
1071 && optimize)
1072 || (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
1073 && opt_for_fn (ref->referred->decl, optimize))
1074 || node->alias
1075 || ref->referred->alias)))
1076 enqueue_node (ref->referred);
1077 symtab->process_new_functions ();
1078 }
1079 }
1080 update_type_inheritance_graph ();
1081
1082 /* Collect entry points to the unit. */
1083 if (symtab->dump_file)
1084 {
1085 fprintf (symtab->dump_file, "\n\nInitial ");
1086 symtab_node::dump_table (symtab->dump_file);
1087 }
1088
1089 if (symtab->dump_file)
1090 fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1091
1092 for (node = symtab->first_symbol ();
1093 node != first_handled
1094 && node != first_handled_var; node = next)
1095 {
1096 next = node->next;
1097 if (!node->aux && !node->referred_to_p ())
1098 {
1099 if (symtab->dump_file)
1100 fprintf (symtab->dump_file, " %s", node->name ());
1101 node->remove ();
1102 continue;
1103 }
1104 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1105 {
1106 tree decl = node->decl;
1107
1108 if (cnode->definition && !gimple_has_body_p (decl)
1109 && !cnode->alias
1110 && !cnode->thunk.thunk_p)
1111 cnode->reset ();
1112
1113 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1114 || cnode->alias
1115 || gimple_has_body_p (decl));
1116 gcc_assert (cnode->analyzed == cnode->definition);
1117 }
1118 node->aux = NULL;
1119 }
1120 for (;node; node = node->next)
1121 node->aux = NULL;
1122 first_analyzed = symtab->first_function ();
1123 first_analyzed_var = symtab->first_variable ();
1124 if (symtab->dump_file)
1125 {
1126 fprintf (symtab->dump_file, "\n\nReclaimed ");
1127 symtab_node::dump_table (symtab->dump_file);
1128 }
1129 bitmap_obstack_release (NULL);
1130 ggc_collect ();
1131 /* Initialize assembler name hash, in particular we want to trigger C++
1132 mangling and same body alias creation before we free DECL_ARGUMENTS
1133 used by it. */
1134 if (!seen_error ())
1135 symtab->symtab_initialize_asm_name_hash ();
1136
1137 input_location = saved_loc;
1138}
1139
1140/* Translate the ugly representation of aliases as alias pairs into nice
1141 representation in callgraph. We don't handle all cases yet,
1142 unfortunately. */
1143
1144static void
1145handle_alias_pairs (void)
1146{
1147 alias_pair *p;
1148 unsigned i;
1149
1150 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1151 {
1152 symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1153
1154 /* Weakrefs with target not defined in current unit are easy to handle:
1155 they behave just as external variables except we need to note the
1156 alias flag to later output the weakref pseudo op into asm file. */
1157 if (!target_node
1158 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1159 {
1160 symtab_node *node = symtab_node::get (p->decl);
1161 if (node)
1162 {
1163 node->alias_target = p->target;
1164 node->weakref = true;
1165 node->alias = true;
1166 }
1167 alias_pairs->unordered_remove (i);
1168 continue;
1169 }
1170 else if (!target_node)
1171 {
1172 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1173 symtab_node *node = symtab_node::get (p->decl);
1174 if (node)
1175 node->alias = false;
1176 alias_pairs->unordered_remove (i);
1177 continue;
1178 }
1179
1180 if (DECL_EXTERNAL (target_node->decl)
1181 /* We use local aliases for C++ thunks to force the tailcall
1182 to bind locally. This is a hack - to keep it working do
1183 the following (which is not strictly correct). */
1184 && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1185 || ! DECL_VIRTUAL_P (target_node->decl))
1186 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1187 {
1188 error ("%q+D aliased to external symbol %qE",
1189 p->decl, p->target);
1190 }
1191
1192 if (TREE_CODE (p->decl) == FUNCTION_DECL
1193 && target_node && is_a <cgraph_node *> (target_node))
1194 {
1195 cgraph_node *src_node = cgraph_node::get (p->decl);
1196 if (src_node && src_node->definition)
1197 src_node->reset ();
1198 cgraph_node::create_alias (p->decl, target_node->decl);
1199 alias_pairs->unordered_remove (i);
1200 }
1201 else if (TREE_CODE (p->decl) == VAR_DECL
1202 && target_node && is_a <varpool_node *> (target_node))
1203 {
1204 varpool_node::create_alias (p->decl, target_node->decl);
1205 alias_pairs->unordered_remove (i);
1206 }
1207 else
1208 {
1209 error ("%q+D alias in between function and variable is not supported",
1210 p->decl);
1211 warning (0, "%q+D aliased declaration",
1212 target_node->decl);
1213 alias_pairs->unordered_remove (i);
1214 }
1215 }
1216 vec_free (alias_pairs);
1217}
1218
1219
1220/* Figure out what functions we want to assemble. */
1221
1222static void
1223mark_functions_to_output (void)
1224{
1225 cgraph_node *node;
1226#ifdef ENABLE_CHECKING
1227 bool check_same_comdat_groups = false;
1228
1229 FOR_EACH_FUNCTION (node)
1230 gcc_assert (!node->process);
1231#endif
1232
1233 FOR_EACH_FUNCTION (node)
1234 {
1235 tree decl = node->decl;
1236
1237 gcc_assert (!node->process || node->same_comdat_group);
1238 if (node->process)
1239 continue;
1240
1241 /* We need to output all local functions that are used and not
1242 always inlined, as well as those that are reachable from
1243 outside the current compilation unit. */
1244 if (node->analyzed
1245 && !node->thunk.thunk_p
1246 && !node->alias
1247 && !node->global.inlined_to
1248 && !TREE_ASM_WRITTEN (decl)
1249 && !DECL_EXTERNAL (decl))
1250 {
1251 node->process = 1;
1252 if (node->same_comdat_group)
1253 {
1254 cgraph_node *next;
1255 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1256 next != node;
1257 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1258 if (!next->thunk.thunk_p && !next->alias
1259 && !next->comdat_local_p ())
1260 next->process = 1;
1261 }
1262 }
1263 else if (node->same_comdat_group)
1264 {
1265#ifdef ENABLE_CHECKING
1266 check_same_comdat_groups = true;
1267#endif
1268 }
1269 else
1270 {
1271 /* We should've reclaimed all functions that are not needed. */
1272#ifdef ENABLE_CHECKING
1273 if (!node->global.inlined_to
1274 && gimple_has_body_p (decl)
1275 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1276 are inside partition, we can end up not removing the body since we no longer
1277 have analyzed node pointing to it. */
1278 && !node->in_other_partition
1279 && !node->alias
1280 && !node->clones
1281 && !DECL_EXTERNAL (decl))
1282 {
1283 node->debug ();
1284 internal_error ("failed to reclaim unneeded function");
1285 }
1286#endif
1287 gcc_assert (node->global.inlined_to
1288 || !gimple_has_body_p (decl)
1289 || node->in_other_partition
1290 || node->clones
1291 || DECL_ARTIFICIAL (decl)
1292 || DECL_EXTERNAL (decl));
1293
1294 }
1295
1296 }
1297#ifdef ENABLE_CHECKING
1298 if (check_same_comdat_groups)
1299 FOR_EACH_FUNCTION (node)
1300 if (node->same_comdat_group && !node->process)
1301 {
1302 tree decl = node->decl;
1303 if (!node->global.inlined_to
1304 && gimple_has_body_p (decl)
1305 /* FIXME: in an ltrans unit when the offline copy is outside a
1306 partition but inline copies are inside a partition, we can
1307 end up not removing the body since we no longer have an
1308 analyzed node pointing to it. */
1309 && !node->in_other_partition
1310 && !node->clones
1311 && !DECL_EXTERNAL (decl))
1312 {
1313 node->debug ();
1314 internal_error ("failed to reclaim unneeded function in same "
1315 "comdat group");
1316 }
1317 }
1318#endif
1319}
1320
1321/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1322 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1323
1324 Set current_function_decl and cfun to newly constructed empty function body.
1325 return basic block in the function body. */
1326
1327basic_block
1328init_lowered_empty_function (tree decl, bool in_ssa)
1329{
1330 basic_block bb;
1331
1332 current_function_decl = decl;
1333 allocate_struct_function (decl, false);
1334 gimple_register_cfg_hooks ();
1335 init_empty_tree_cfg ();
1336
1337 if (in_ssa)
1338 {
1339 init_tree_ssa (cfun);
1340 init_ssa_operands (cfun);
1341 cfun->gimple_df->in_ssa_p = true;
1342 cfun->curr_properties |= PROP_ssa;
1343 }
1344
1345 DECL_INITIAL (decl) = make_node (BLOCK);
1346
1347 DECL_SAVED_TREE (decl) = error_mark_node;
1348 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1349 | PROP_cfg | PROP_loops);
1350
1351 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1352 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1353 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1354
1355 /* Create BB for body of the function and connect it properly. */
1356 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1357 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1358 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1359 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1360
1361 return bb;
1362}
1363
1364/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1365 offset indicated by VIRTUAL_OFFSET, if that is
1366 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1367 zero for a result adjusting thunk. */
1368
1369static tree
1370thunk_adjust (gimple_stmt_iterator * bsi,
1371 tree ptr, bool this_adjusting,
1372 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1373{
1374 gassign *stmt;
1375 tree ret;
1376
1377 if (this_adjusting
1378 && fixed_offset != 0)
1379 {
1380 stmt = gimple_build_assign
1381 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1382 ptr,
1383 fixed_offset));
1384 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1385 }
1386
1387 /* If there's a virtual offset, look up that value in the vtable and
1388 adjust the pointer again. */
1389 if (virtual_offset)
1390 {
1391 tree vtabletmp;
1392 tree vtabletmp2;
1393 tree vtabletmp3;
1394
1395 if (!vtable_entry_type)
1396 {
1397 tree vfunc_type = make_node (FUNCTION_TYPE);
1398 TREE_TYPE (vfunc_type) = integer_type_node;
1399 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1400 layout_type (vfunc_type);
1401
1402 vtable_entry_type = build_pointer_type (vfunc_type);
1403 }
1404
1405 vtabletmp =
1406 create_tmp_reg (build_pointer_type
1407 (build_pointer_type (vtable_entry_type)), "vptr");
1408
1409 /* The vptr is always at offset zero in the object. */
1410 stmt = gimple_build_assign (vtabletmp,
1411 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1412 ptr));
1413 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1414
1415 /* Form the vtable address. */
1416 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1417 "vtableaddr");
1418 stmt = gimple_build_assign (vtabletmp2,
1419 build_simple_mem_ref (vtabletmp));
1420 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1421
1422 /* Find the entry with the vcall offset. */
1423 stmt = gimple_build_assign (vtabletmp2,
1424 fold_build_pointer_plus_loc (input_location,
1425 vtabletmp2,
1426 virtual_offset));
1427 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1428
1429 /* Get the offset itself. */
1430 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1431 "vcalloffset");
1432 stmt = gimple_build_assign (vtabletmp3,
1433 build_simple_mem_ref (vtabletmp2));
1434 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1435
1436 /* Adjust the `this' pointer. */
1437 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1438 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1439 GSI_CONTINUE_LINKING);
1440 }
1441
1442 if (!this_adjusting
1443 && fixed_offset != 0)
1444 /* Adjust the pointer by the constant. */
1445 {
1446 tree ptrtmp;
1447
1448 if (TREE_CODE (ptr) == VAR_DECL)
1449 ptrtmp = ptr;
1450 else
1451 {
1452 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1453 stmt = gimple_build_assign (ptrtmp, ptr);
1454 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1455 }
1456 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1457 ptrtmp, fixed_offset);
1458 }
1459
1460 /* Emit the statement and gimplify the adjustment expression. */
1461 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1462 stmt = gimple_build_assign (ret, ptr);
1463 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1464
1465 return ret;
1466}
1467
1468/* Expand thunk NODE to gimple if possible.
1469 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1470 no assembler is produced.
1471 When OUTPUT_ASM_THUNK is true, also produce assembler for
1472 thunks that are not lowered. */
1473
1474bool
1475cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1476{
1477 bool this_adjusting = thunk.this_adjusting;
1478 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1479 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1480 tree virtual_offset = NULL;
1481 tree alias = callees->callee->decl;
1482 tree thunk_fndecl = decl;
1483 tree a;
1484
1485
1486 if (!force_gimple_thunk && this_adjusting
1487 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1488 virtual_value, alias))
1489 {
1490 const char *fnname;
1491 tree fn_block;
1492 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1493
1494 if (!output_asm_thunks)
1495 {
1496 analyzed = true;
1497 return false;
1498 }
1499
1500 if (in_lto_p)
1501 get_untransformed_body ();
1502 a = DECL_ARGUMENTS (thunk_fndecl);
1503
1504 current_function_decl = thunk_fndecl;
1505
1506 /* Ensure thunks are emitted in their correct sections. */
1507 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1508
1509 DECL_RESULT (thunk_fndecl)
1510 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1511 RESULT_DECL, 0, restype);
1512 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1513 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1514
1515 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1516 create one. */
1517 fn_block = make_node (BLOCK);
1518 BLOCK_VARS (fn_block) = a;
1519 DECL_INITIAL (thunk_fndecl) = fn_block;
1520 init_function_start (thunk_fndecl);
1521 cfun->is_thunk = 1;
1522 insn_locations_init ();
1523 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1524 prologue_location = curr_insn_location ();
1525 assemble_start_function (thunk_fndecl, fnname);
1526
1527 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1528 fixed_offset, virtual_value, alias);
1529
1530 assemble_end_function (thunk_fndecl, fnname);
1531 insn_locations_finalize ();
1532 init_insn_lengths ();
1533 free_after_compilation (cfun);
1534 set_cfun (NULL);
1535 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1536 thunk.thunk_p = false;
1537 analyzed = false;
1538 }
1539 else
1540 {
1541 tree restype;
1542 basic_block bb, then_bb, else_bb, return_bb;
1543 gimple_stmt_iterator bsi;
1544 int nargs = 0;
1545 tree arg;
1546 int i;
1547 tree resdecl;
1548 tree restmp = NULL;
1549
1550 gcall *call;
1551 greturn *ret;
1552
1553 if (in_lto_p)
1554 get_untransformed_body ();
1555 a = DECL_ARGUMENTS (thunk_fndecl);
1556
1557 current_function_decl = thunk_fndecl;
1558
1559 /* Ensure thunks are emitted in their correct sections. */
1560 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1561
1562 DECL_IGNORED_P (thunk_fndecl) = 1;
1563 bitmap_obstack_initialize (NULL);
1564
1565 if (thunk.virtual_offset_p)
1566 virtual_offset = size_int (virtual_value);
1567
1568 /* Build the return declaration for the function. */
1569 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1570 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1571 {
1572 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1573 DECL_ARTIFICIAL (resdecl) = 1;
1574 DECL_IGNORED_P (resdecl) = 1;
1575 DECL_RESULT (thunk_fndecl) = resdecl;
1576 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1577 }
1578 else
1579 resdecl = DECL_RESULT (thunk_fndecl);
1580
1581 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl, true);
1582
1583 bsi = gsi_start_bb (bb);
1584
1585 /* Build call to the function being thunked. */
1586 if (!VOID_TYPE_P (restype))
1587 {
1588 if (DECL_BY_REFERENCE (resdecl))
1589 {
1590 restmp = gimple_fold_indirect_ref (resdecl);
1591 if (!restmp)
1592 restmp = build2 (MEM_REF,
1593 TREE_TYPE (TREE_TYPE (DECL_RESULT (alias))),
1594 resdecl,
1595 build_int_cst (TREE_TYPE
1596 (DECL_RESULT (alias)), 0));
1597 }
1598 else if (!is_gimple_reg_type (restype))
1599 {
1600 restmp = resdecl;
1601
1602 if (TREE_CODE (restmp) == VAR_DECL)
1603 add_local_decl (cfun, restmp);
1604 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1605 }
1606 else
1607 restmp = create_tmp_reg (restype, "retval");
1608 }
1609
1610 for (arg = a; arg; arg = DECL_CHAIN (arg))
1611 nargs++;
1612 auto_vec<tree> vargs (nargs);
1613 if (this_adjusting)
1614 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1615 virtual_offset));
1616 else if (nargs)
1617 vargs.quick_push (a);
1618
1619 if (nargs)
1620 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1621 {
1622 tree tmp = arg;
1623 if (!is_gimple_val (arg))
1624 {
1625 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1626 (TREE_TYPE (arg)), "arg");
1627 gimple stmt = gimple_build_assign (tmp, arg);
1628 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1629 }
1630 vargs.quick_push (tmp);
1631 }
1632 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1633 callees->call_stmt = call;
1634 gimple_call_set_from_thunk (call, true);
1635 gimple_call_set_with_bounds (call, instrumentation_clone);
1636 if (restmp)
1637 {
1638 gimple_call_set_lhs (call, restmp);
1639 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1640 TREE_TYPE (TREE_TYPE (alias))));
1641 }
1642 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1643 if (!(gimple_call_flags (call) & ECF_NORETURN))
1644 {
1645 if (restmp && !this_adjusting
1646 && (fixed_offset || virtual_offset))
1647 {
1648 tree true_label = NULL_TREE;
1649
1650 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1651 {
1652 gimple stmt;
1653 /* If the return type is a pointer, we need to
1654 protect against NULL. We know there will be an
1655 adjustment, because that's why we're emitting a
1656 thunk. */
1657 then_bb = create_basic_block (NULL, (void *) 0, bb);
1658 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1659 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1660 add_bb_to_loop (then_bb, bb->loop_father);
1661 add_bb_to_loop (return_bb, bb->loop_father);
1662 add_bb_to_loop (else_bb, bb->loop_father);
1663 remove_edge (single_succ_edge (bb));
1664 true_label = gimple_block_label (then_bb);
1665 stmt = gimple_build_cond (NE_EXPR, restmp,
1666 build_zero_cst (TREE_TYPE (restmp)),
1667 NULL_TREE, NULL_TREE);
1668 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1669 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1670 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1671 make_edge (return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1672 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1673 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1674 bsi = gsi_last_bb (then_bb);
1675 }
1676
1677 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1678 fixed_offset, virtual_offset);
1679 if (true_label)
1680 {
1681 gimple stmt;
1682 bsi = gsi_last_bb (else_bb);
1683 stmt = gimple_build_assign (restmp,
1684 build_zero_cst (TREE_TYPE (restmp)));
1685 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1686 bsi = gsi_last_bb (return_bb);
1687 }
1688 }
1689 else
1690 gimple_call_set_tail (call, true);
1691
1692 /* Build return value. */
1693 if (!DECL_BY_REFERENCE (resdecl))
1694 ret = gimple_build_return (restmp);
1695 else
1696 ret = gimple_build_return (resdecl);
1697
1698 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1699 }
1700 else
1701 {
1702 gimple_call_set_tail (call, true);
1703 remove_edge (single_succ_edge (bb));
1704 }
1705
1706 cfun->gimple_df->in_ssa_p = true;
1707 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
1708 TREE_ASM_WRITTEN (thunk_fndecl) = false;
1709 delete_unreachable_blocks ();
1710 update_ssa (TODO_update_ssa);
1711#ifdef ENABLE_CHECKING
1712 verify_flow_info ();
1713#endif
1714 free_dominance_info (CDI_DOMINATORS);
1715
1716 /* Since we want to emit the thunk, we explicitly mark its name as
1717 referenced. */
1718 thunk.thunk_p = false;
1719 lowered = true;
1720 bitmap_obstack_release (NULL);
1721 }
1722 current_function_decl = NULL;
1723 set_cfun (NULL);
1724 return true;
1725}
1726
1727/* Assemble thunks and aliases associated to node. */
1728
1729void
1730cgraph_node::assemble_thunks_and_aliases (void)
1731{
1732 cgraph_edge *e;
1733 ipa_ref *ref;
1734
1735 for (e = callers; e;)
1736 if (e->caller->thunk.thunk_p
1737 && !e->caller->thunk.add_pointer_bounds_args)
1738 {
1739 cgraph_node *thunk = e->caller;
1740
1741 e = e->next_caller;
1742 thunk->expand_thunk (true, false);
1743 thunk->assemble_thunks_and_aliases ();
1744 }
1745 else
1746 e = e->next_caller;
1747
1748 FOR_EACH_ALIAS (this, ref)
1749 {
1750 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
1751 bool saved_written = TREE_ASM_WRITTEN (decl);
1752
1753 /* Force assemble_alias to really output the alias this time instead
1754 of buffering it in same alias pairs. */
1755 TREE_ASM_WRITTEN (decl) = 1;
1756 do_assemble_alias (alias->decl,
1757 DECL_ASSEMBLER_NAME (decl));
1758 alias->assemble_thunks_and_aliases ();
1759 TREE_ASM_WRITTEN (decl) = saved_written;
1760 }
1761}
1762
1763/* Expand function specified by node. */
1764
1765void
1766cgraph_node::expand (void)
1767{
1768 location_t saved_loc;
1769
1770 /* We ought to not compile any inline clones. */
1771 gcc_assert (!global.inlined_to);
1772
1773 announce_function (decl);
1774 process = 0;
1775 gcc_assert (lowered);
1776 get_untransformed_body ();
1777
1778 /* Generate RTL for the body of DECL. */
1779
1780 timevar_push (TV_REST_OF_COMPILATION);
1781
1782 gcc_assert (symtab->global_info_ready);
1783
1784 /* Initialize the default bitmap obstack. */
1785 bitmap_obstack_initialize (NULL);
1786
1787 /* Initialize the RTL code for the function. */
1788 current_function_decl = decl;
1789 saved_loc = input_location;
1790 input_location = DECL_SOURCE_LOCATION (decl);
1791 init_function_start (decl);
1792
1793 gimple_register_cfg_hooks ();
1794
1795 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1796
1797 execute_all_ipa_transforms ();
1798
1799 /* Perform all tree transforms and optimizations. */
1800
1801 /* Signal the start of passes. */
1802 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1803
1804 execute_pass_list (cfun, g->get_passes ()->all_passes);
1805
1806 /* Signal the end of passes. */
1807 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1808
1809 bitmap_obstack_release (&reg_obstack);
1810
1811 /* Release the default bitmap obstack. */
1812 bitmap_obstack_release (NULL);
1813
1814 /* If requested, warn about function definitions where the function will
1815 return a value (usually of some struct or union type) which itself will
1816 take up a lot of stack space. */
1817 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1818 {
1819 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1820
1821 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1822 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1823 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1824 larger_than_size))
1825 {
1826 unsigned int size_as_int
1827 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1828
1829 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1830 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1831 decl, size_as_int);
1832 else
1833 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1834 decl, larger_than_size);
1835 }
1836 }
1837
1838 gimple_set_body (decl, NULL);
1839 if (DECL_STRUCT_FUNCTION (decl) == 0
1840 && !cgraph_node::get (decl)->origin)
1841 {
1842 /* Stop pointing to the local nodes about to be freed.
1843 But DECL_INITIAL must remain nonzero so we know this
1844 was an actual function definition.
1845 For a nested function, this is done in c_pop_function_context.
1846 If rest_of_compilation set this to 0, leave it 0. */
1847 if (DECL_INITIAL (decl) != 0)
1848 DECL_INITIAL (decl) = error_mark_node;
1849 }
1850
1851 input_location = saved_loc;
1852
1853 ggc_collect ();
1854 timevar_pop (TV_REST_OF_COMPILATION);
1855
1856 /* Make sure that BE didn't give up on compiling. */
1857 gcc_assert (TREE_ASM_WRITTEN (decl));
1858 set_cfun (NULL);
1859 current_function_decl = NULL;
1860
1861 /* It would make a lot more sense to output thunks before function body to get more
1862 forward and lest backwarding jumps. This however would need solving problem
1863 with comdats. See PR48668. Also aliases must come after function itself to
1864 make one pass assemblers, like one on AIX, happy. See PR 50689.
1865 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1866 groups. */
1867 assemble_thunks_and_aliases ();
1868 release_body ();
1869 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1870 points to the dead function body. */
1871 remove_callees ();
1872 remove_all_references ();
1873}
1874
1875/* Node comparer that is responsible for the order that corresponds
1876 to time when a function was launched for the first time. */
1877
1878static int
1879node_cmp (const void *pa, const void *pb)
1880{
1881 const cgraph_node *a = *(const cgraph_node * const *) pa;
1882 const cgraph_node *b = *(const cgraph_node * const *) pb;
1883
1884 /* Functions with time profile must be before these without profile. */
1885 if (!a->tp_first_run || !b->tp_first_run)
1886 return a->tp_first_run - b->tp_first_run;
1887
1888 return a->tp_first_run != b->tp_first_run
1889 ? b->tp_first_run - a->tp_first_run
1890 : b->order - a->order;
1891}
1892
1893/* Expand all functions that must be output.
1894
1895 Attempt to topologically sort the nodes so function is output when
1896 all called functions are already assembled to allow data to be
1897 propagated across the callgraph. Use a stack to get smaller distance
1898 between a function and its callees (later we may choose to use a more
1899 sophisticated algorithm for function reordering; we will likely want
1900 to use subsections to make the output functions appear in top-down
1901 order). */
1902
1903static void
1904expand_all_functions (void)
1905{
1906 cgraph_node *node;
1907 cgraph_node **order = XCNEWVEC (cgraph_node *,
1908 symtab->cgraph_count);
1909 unsigned int expanded_func_count = 0, profiled_func_count = 0;
1910 int order_pos, new_order_pos = 0;
1911 int i;
1912
1913 order_pos = ipa_reverse_postorder (order);
1914 gcc_assert (order_pos == symtab->cgraph_count);
1915
1916 /* Garbage collector may remove inline clones we eliminate during
1917 optimization. So we must be sure to not reference them. */
1918 for (i = 0; i < order_pos; i++)
1919 if (order[i]->process)
1920 order[new_order_pos++] = order[i];
1921
1922 if (flag_profile_reorder_functions)
1923 qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
1924
1925 for (i = new_order_pos - 1; i >= 0; i--)
1926 {
1927 node = order[i];
1928
1929 if (node->process)
1930 {
1931 expanded_func_count++;
1932 if(node->tp_first_run)
1933 profiled_func_count++;
1934
1935 if (symtab->dump_file)
1936 fprintf (symtab->dump_file,
1937 "Time profile order in expand_all_functions:%s:%d\n",
1938 node->asm_name (), node->tp_first_run);
1939 node->process = 0;
1940 node->expand ();
1941 }
1942 }
1943
1944 if (dump_file)
1945 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
1946 main_input_filename, profiled_func_count, expanded_func_count);
1947
1948 if (symtab->dump_file && flag_profile_reorder_functions)
1949 fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
1950 profiled_func_count, expanded_func_count);
1951
1952 symtab->process_new_functions ();
1953 free_gimplify_stack ();
1954
1955 free (order);
1956}
1957
1958/* This is used to sort the node types by the cgraph order number. */
1959
1960enum cgraph_order_sort_kind
1961{
1962 ORDER_UNDEFINED = 0,
1963 ORDER_FUNCTION,
1964 ORDER_VAR,
1965 ORDER_ASM
1966};
1967
1968struct cgraph_order_sort
1969{
1970 enum cgraph_order_sort_kind kind;
1971 union
1972 {
1973 cgraph_node *f;
1974 varpool_node *v;
1975 asm_node *a;
1976 } u;
1977};
1978
1979/* Output all functions, variables, and asm statements in the order
1980 according to their order fields, which is the order in which they
1981 appeared in the file. This implements -fno-toplevel-reorder. In
1982 this mode we may output functions and variables which don't really
1983 need to be output.
1984 When NO_REORDER is true only do this for symbols marked no reorder. */
1985
1986static void
1987output_in_order (bool no_reorder)
1988{
1989 int max;
1990 cgraph_order_sort *nodes;
1991 int i;
1992 cgraph_node *pf;
1993 varpool_node *pv;
1994 asm_node *pa;
1995 max = symtab->order;
1996 nodes = XCNEWVEC (cgraph_order_sort, max);
1997
1998 FOR_EACH_DEFINED_FUNCTION (pf)
1999 {
2000 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
2001 {
2002 if (no_reorder && !pf->no_reorder)
2003 continue;
2004 i = pf->order;
2005 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2006 nodes[i].kind = ORDER_FUNCTION;
2007 nodes[i].u.f = pf;
2008 }
2009 }
2010
2011 FOR_EACH_DEFINED_VARIABLE (pv)
2012 if (!DECL_EXTERNAL (pv->decl))
2013 {
2014 if (no_reorder && !pv->no_reorder)
2015 continue;
2016 i = pv->order;
2017 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2018 nodes[i].kind = ORDER_VAR;
2019 nodes[i].u.v = pv;
2020 }
2021
2022 for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
2023 {
2024 i = pa->order;
2025 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2026 nodes[i].kind = ORDER_ASM;
2027 nodes[i].u.a = pa;
2028 }
2029
2030 /* In toplevel reorder mode we output all statics; mark them as needed. */
2031
2032 for (i = 0; i < max; ++i)
2033 if (nodes[i].kind == ORDER_VAR)
2034 nodes[i].u.v->finalize_named_section_flags ();
2035
2036 for (i = 0; i < max; ++i)
2037 {
2038 switch (nodes[i].kind)
2039 {
2040 case ORDER_FUNCTION:
2041 nodes[i].u.f->process = 0;
2042 nodes[i].u.f->expand ();
2043 break;
2044
2045 case ORDER_VAR:
2046 nodes[i].u.v->assemble_decl ();
2047 break;
2048
2049 case ORDER_ASM:
2050 assemble_asm (nodes[i].u.a->asm_str);
2051 break;
2052
2053 case ORDER_UNDEFINED:
2054 break;
2055
2056 default:
2057 gcc_unreachable ();
2058 }
2059 }
2060
2061 symtab->clear_asm_symbols ();
2062
2063 free (nodes);
2064}
2065
2066static void
2067ipa_passes (void)
2068{
2069 gcc::pass_manager *passes = g->get_passes ();
2070
2071 set_cfun (NULL);
2072 current_function_decl = NULL;
2073 gimple_register_cfg_hooks ();
2074 bitmap_obstack_initialize (NULL);
2075
2076 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2077
2078 if (!in_lto_p)
2079 {
2080 execute_ipa_pass_list (passes->all_small_ipa_passes);
2081 if (seen_error ())
2082 return;
2083 }
2084
2085 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2086 devirtualization and other changes where removal iterate. */
2087 symtab->remove_unreachable_nodes (symtab->dump_file);
2088
2089 /* If pass_all_early_optimizations was not scheduled, the state of
2090 the cgraph will not be properly updated. Update it now. */
2091 if (symtab->state < IPA_SSA)
2092 symtab->state = IPA_SSA;
2093
2094 if (!in_lto_p)
2095 {
2096 /* Generate coverage variables and constructors. */
2097 coverage_finish ();
2098
2099 /* Process new functions added. */
2100 set_cfun (NULL);
2101 current_function_decl = NULL;
2102 symtab->process_new_functions ();
2103
2104 execute_ipa_summary_passes
2105 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2106 }
2107
2108 /* Some targets need to handle LTO assembler output specially. */
2109 if (flag_generate_lto || flag_generate_offload)
2110 targetm.asm_out.lto_start ();
2111
2112 if (!in_lto_p)
2113 {
2114 if (g->have_offload)
2115 {
2116 section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2117 ipa_write_summaries (true);
2118 }
2119 if (flag_lto)
2120 {
2121 section_name_prefix = LTO_SECTION_NAME_PREFIX;
2122 ipa_write_summaries (false);
2123 }
2124 }
2125
2126 if (flag_generate_lto || flag_generate_offload)
2127 targetm.asm_out.lto_end ();
2128
2129 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2130 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2131 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2132
2133 bitmap_obstack_release (NULL);
2134}
2135
2136
2137/* Return string alias is alias of. */
2138
2139static tree
2140get_alias_symbol (tree decl)
2141{
2142 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2143 return get_identifier (TREE_STRING_POINTER
2144 (TREE_VALUE (TREE_VALUE (alias))));
2145}
2146
2147
2148/* Weakrefs may be associated to external decls and thus not output
2149 at expansion time. Emit all necessary aliases. */
2150
2151void
2152symbol_table::output_weakrefs (void)
2153{
2154 symtab_node *node;
2155 cgraph_node *cnode;
2156 FOR_EACH_SYMBOL (node)
2157 if (node->alias
2158 && !TREE_ASM_WRITTEN (node->decl)
2159 && (!(cnode = dyn_cast <cgraph_node *> (node))
2160 || !cnode->instrumented_version
2161 || !TREE_ASM_WRITTEN (cnode->instrumented_version->decl))
2162 && node->weakref)
2163 {
2164 tree target;
2165
2166 /* Weakrefs are special by not requiring target definition in current
2167 compilation unit. It is thus bit hard to work out what we want to
2168 alias.
2169 When alias target is defined, we need to fetch it from symtab reference,
2170 otherwise it is pointed to by alias_target. */
2171 if (node->alias_target)
2172 target = (DECL_P (node->alias_target)
2173 ? DECL_ASSEMBLER_NAME (node->alias_target)
2174 : node->alias_target);
2175 else if (node->analyzed)
2176 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2177 else
2178 {
2179 gcc_unreachable ();
2180 target = get_alias_symbol (node->decl);
2181 }
2182 do_assemble_alias (node->decl, target);
2183 }
2184}
2185
2186/* Perform simple optimizations based on callgraph. */
2187
2188void
2189symbol_table::compile (void)
2190{
2191 if (seen_error ())
2192 return;
2193
2194#ifdef ENABLE_CHECKING
2195 symtab_node::verify_symtab_nodes ();
2196#endif
2197
2198 timevar_push (TV_CGRAPHOPT);
2199 if (pre_ipa_mem_report)
2200 {
2201 fprintf (stderr, "Memory consumption before IPA\n");
2202 dump_memory_report (false);
2203 }
2204 if (!quiet_flag)
2205 fprintf (stderr, "Performing interprocedural optimizations\n");
2206 state = IPA;
2207
2208 /* Offloading requires LTO infrastructure. */
2209 if (!in_lto_p && g->have_offload)
2210 flag_generate_offload = 1;
2211
2212 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2213 if (flag_generate_lto || flag_generate_offload)
2214 lto_streamer_hooks_init ();
2215
2216 /* Don't run the IPA passes if there was any error or sorry messages. */
2217 if (!seen_error ())
2218 ipa_passes ();
2219
2220 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2221 if (seen_error ()
2222 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2223 {
2224 timevar_pop (TV_CGRAPHOPT);
2225 return;
2226 }
2227
2228 global_info_ready = true;
2229 if (dump_file)
2230 {
2231 fprintf (dump_file, "Optimized ");
2232 symtab_node:: dump_table (dump_file);
2233 }
2234 if (post_ipa_mem_report)
2235 {
2236 fprintf (stderr, "Memory consumption after IPA\n");
2237 dump_memory_report (false);
2238 }
2239 timevar_pop (TV_CGRAPHOPT);
2240
2241 /* Output everything. */
2242 (*debug_hooks->assembly_start) ();
2243 if (!quiet_flag)
2244 fprintf (stderr, "Assembling functions:\n");
2245#ifdef ENABLE_CHECKING
2246 symtab_node::verify_symtab_nodes ();
2247#endif
2248
2249 materialize_all_clones ();
2250 bitmap_obstack_initialize (NULL);
2251 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2252 bitmap_obstack_release (NULL);
2253 mark_functions_to_output ();
2254
2255 /* When weakref support is missing, we autmatically translate all
2256 references to NODE to references to its ultimate alias target.
2257 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2258 TREE_CHAIN.
2259
2260 Set up this mapping before we output any assembler but once we are sure
2261 that all symbol renaming is done.
2262
2263 FIXME: All this uglyness can go away if we just do renaming at gimple
2264 level by physically rewritting the IL. At the moment we can only redirect
2265 calls, so we need infrastructure for renaming references as well. */
2266#ifndef ASM_OUTPUT_WEAKREF
2267 symtab_node *node;
2268
2269 FOR_EACH_SYMBOL (node)
2270 if (node->alias
2271 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2272 {
2273 IDENTIFIER_TRANSPARENT_ALIAS
2274 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2275 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2276 = (node->alias_target ? node->alias_target
2277 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2278 }
2279#endif
2280
2281 state = EXPANSION;
2282
2283 if (!flag_toplevel_reorder)
2284 output_in_order (false);
2285 else
2286 {
2287 /* Output first asm statements and anything ordered. The process
2288 flag is cleared for these nodes, so we skip them later. */
2289 output_in_order (true);
2290 expand_all_functions ();
2291 output_variables ();
2292 }
2293
2294 process_new_functions ();
2295 state = FINISHED;
2296 output_weakrefs ();
2297
2298 if (dump_file)
2299 {
2300 fprintf (dump_file, "\nFinal ");
2301 symtab_node::dump_table (dump_file);
2302 }
2303#ifdef ENABLE_CHECKING
2304 symtab_node::verify_symtab_nodes ();
2305 /* Double check that all inline clones are gone and that all
2306 function bodies have been released from memory. */
2307 if (!seen_error ())
2308 {
2309 cgraph_node *node;
2310 bool error_found = false;
2311
2312 FOR_EACH_DEFINED_FUNCTION (node)
2313 if (node->global.inlined_to
2314 || gimple_has_body_p (node->decl))
2315 {
2316 error_found = true;
2317 node->debug ();
2318 }
2319 if (error_found)
2320 internal_error ("nodes with unreleased memory found");
2321 }
2322#endif
2323}
2324
2325
2326/* Analyze the whole compilation unit once it is parsed completely. */
2327
2328void
2329symbol_table::finalize_compilation_unit (void)
2330{
2331 timevar_push (TV_CGRAPH);
2332
2333 /* If we're here there's no current function anymore. Some frontends
2334 are lazy in clearing these. */
2335 current_function_decl = NULL;
2336 set_cfun (NULL);
2337
2338 /* Do not skip analyzing the functions if there were errors, we
2339 miss diagnostics for following functions otherwise. */
2340
2341 /* Emit size functions we didn't inline. */
2342 finalize_size_functions ();
2343
2344 /* Mark alias targets necessary and emit diagnostics. */
2345 handle_alias_pairs ();
2346
2347 if (!quiet_flag)
2348 {
2349 fprintf (stderr, "\nAnalyzing compilation unit\n");
2350 fflush (stderr);
2351 }
2352
2353 if (flag_dump_passes)
2354 dump_passes ();
2355
2356 /* Gimplify and lower all functions, compute reachability and
2357 remove unreachable nodes. */
2358 analyze_functions ();
2359
2360 /* Mark alias targets necessary and emit diagnostics. */
2361 handle_alias_pairs ();
2362
2363 /* Gimplify and lower thunks. */
2364 analyze_functions ();
2365
2366 /* Finally drive the pass manager. */
2367 compile ();
2368
2369 timevar_pop (TV_CGRAPH);
2370}
2371
2372/* Reset all state within cgraphunit.c so that we can rerun the compiler
2373 within the same process. For use by toplev::finalize. */
2374
2375void
2376cgraphunit_c_finalize (void)
2377{
2378 gcc_assert (cgraph_new_nodes.length () == 0);
2379 cgraph_new_nodes.truncate (0);
2380
2381 vtable_entry_type = NULL;
2382 queued_nodes = &symtab_terminator;
2383
2384 first_analyzed = NULL;
2385 first_analyzed_var = NULL;
2386}
2387
2388/* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2389 kind of wrapper method. */
2390
2391void
2392cgraph_node::create_wrapper (cgraph_node *target)
2393{
2394 /* Preserve DECL_RESULT so we get right by reference flag. */
2395 tree decl_result = DECL_RESULT (decl);
2396
2397 /* Remove the function's body but keep arguments to be reused
2398 for thunk. */
2399 release_body (true);
2400 reset ();
2401
2402 DECL_RESULT (decl) = decl_result;
2403 DECL_INITIAL (decl) = NULL;
2404 allocate_struct_function (decl, false);
2405 set_cfun (NULL);
2406
2407 /* Turn alias into thunk and expand it into GIMPLE representation. */
2408 definition = true;
2409 thunk.thunk_p = true;
2410 thunk.this_adjusting = false;
2411
2412 cgraph_edge *e = create_edge (target, NULL, 0, CGRAPH_FREQ_BASE);
2413
2414 tree arguments = DECL_ARGUMENTS (decl);
2415
2416 while (arguments)
2417 {
2418 TREE_ADDRESSABLE (arguments) = false;
2419 arguments = TREE_CHAIN (arguments);
2420 }
2421
2422 expand_thunk (false, true);
2423 e->call_stmt_cannot_inline_p = true;
2424
2425 /* Inline summary set-up. */
2426 analyze ();
2427 inline_analyze_function (this);
2428}
2429
2430#include "gt-cgraphunit.h"