]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraphunit.c
Remove cgraph_local_info structure.
[thirdparty/gcc.git] / gcc / cgraphunit.c
1 /* Driver of optimization process
2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO streaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158 */
159
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "backend.h"
164 #include "target.h"
165 #include "rtl.h"
166 #include "tree.h"
167 #include "gimple.h"
168 #include "cfghooks.h"
169 #include "regset.h" /* FIXME: For reg_obstack. */
170 #include "alloc-pool.h"
171 #include "tree-pass.h"
172 #include "stringpool.h"
173 #include "gimple-ssa.h"
174 #include "cgraph.h"
175 #include "coverage.h"
176 #include "lto-streamer.h"
177 #include "fold-const.h"
178 #include "varasm.h"
179 #include "stor-layout.h"
180 #include "output.h"
181 #include "cfgcleanup.h"
182 #include "gimple-fold.h"
183 #include "gimplify.h"
184 #include "gimple-iterator.h"
185 #include "gimplify-me.h"
186 #include "tree-cfg.h"
187 #include "tree-into-ssa.h"
188 #include "tree-ssa.h"
189 #include "langhooks.h"
190 #include "toplev.h"
191 #include "debug.h"
192 #include "symbol-summary.h"
193 #include "tree-vrp.h"
194 #include "ipa-prop.h"
195 #include "gimple-pretty-print.h"
196 #include "plugin.h"
197 #include "ipa-fnsummary.h"
198 #include "ipa-utils.h"
199 #include "except.h"
200 #include "cfgloop.h"
201 #include "context.h"
202 #include "pass_manager.h"
203 #include "tree-nested.h"
204 #include "dbgcnt.h"
205 #include "lto-section-names.h"
206 #include "stringpool.h"
207 #include "attribs.h"
208
209 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
210 secondary queue used during optimization to accommodate passes that
211 may generate new functions that need to be optimized and expanded. */
212 vec<cgraph_node *> cgraph_new_nodes;
213
214 static void expand_all_functions (void);
215 static void mark_functions_to_output (void);
216 static void handle_alias_pairs (void);
217
218 /* Used for vtable lookup in thunk adjusting. */
219 static GTY (()) tree vtable_entry_type;
220
221 /* Return true if this symbol is a function from the C frontend specified
222 directly in RTL form (with "__RTL"). */
223
224 bool
225 symtab_node::native_rtl_p () const
226 {
227 if (TREE_CODE (decl) != FUNCTION_DECL)
228 return false;
229 if (!DECL_STRUCT_FUNCTION (decl))
230 return false;
231 return DECL_STRUCT_FUNCTION (decl)->curr_properties & PROP_rtl;
232 }
233
234 /* Determine if symbol declaration is needed. That is, visible to something
235 either outside this translation unit, something magic in the system
236 configury */
237 bool
238 symtab_node::needed_p (void)
239 {
240 /* Double check that no one output the function into assembly file
241 early. */
242 if (!native_rtl_p ())
243 gcc_checking_assert
244 (!DECL_ASSEMBLER_NAME_SET_P (decl)
245 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
246
247 if (!definition)
248 return false;
249
250 if (DECL_EXTERNAL (decl))
251 return false;
252
253 /* If the user told us it is used, then it must be so. */
254 if (force_output)
255 return true;
256
257 /* ABI forced symbols are needed when they are external. */
258 if (forced_by_abi && TREE_PUBLIC (decl))
259 return true;
260
261 /* Keep constructors, destructors and virtual functions. */
262 if (TREE_CODE (decl) == FUNCTION_DECL
263 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
264 return true;
265
266 /* Externally visible variables must be output. The exception is
267 COMDAT variables that must be output only when they are needed. */
268 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
269 return true;
270
271 return false;
272 }
273
274 /* Head and terminator of the queue of nodes to be processed while building
275 callgraph. */
276
277 static symtab_node symtab_terminator;
278 static symtab_node *queued_nodes = &symtab_terminator;
279
280 /* Add NODE to queue starting at QUEUED_NODES.
281 The queue is linked via AUX pointers and terminated by pointer to 1. */
282
283 static void
284 enqueue_node (symtab_node *node)
285 {
286 if (node->aux)
287 return;
288 gcc_checking_assert (queued_nodes);
289 node->aux = queued_nodes;
290 queued_nodes = node;
291 }
292
293 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
294 functions into callgraph in a way so they look like ordinary reachable
295 functions inserted into callgraph already at construction time. */
296
297 void
298 symbol_table::process_new_functions (void)
299 {
300 tree fndecl;
301
302 if (!cgraph_new_nodes.exists ())
303 return;
304
305 handle_alias_pairs ();
306 /* Note that this queue may grow as its being processed, as the new
307 functions may generate new ones. */
308 for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
309 {
310 cgraph_node *node = cgraph_new_nodes[i];
311 fndecl = node->decl;
312 switch (state)
313 {
314 case CONSTRUCTION:
315 /* At construction time we just need to finalize function and move
316 it into reachable functions list. */
317
318 cgraph_node::finalize_function (fndecl, false);
319 call_cgraph_insertion_hooks (node);
320 enqueue_node (node);
321 break;
322
323 case IPA:
324 case IPA_SSA:
325 case IPA_SSA_AFTER_INLINING:
326 /* When IPA optimization already started, do all essential
327 transformations that has been already performed on the whole
328 cgraph but not on this function. */
329
330 gimple_register_cfg_hooks ();
331 if (!node->analyzed)
332 node->analyze ();
333 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
334 if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
335 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
336 {
337 bool summaried_computed = ipa_fn_summaries != NULL;
338 g->get_passes ()->execute_early_local_passes ();
339 /* Early passes compure inline parameters to do inlining
340 and splitting. This is redundant for functions added late.
341 Just throw away whatever it did. */
342 if (!summaried_computed)
343 {
344 ipa_free_fn_summary ();
345 ipa_free_size_summary ();
346 }
347 }
348 else if (ipa_fn_summaries != NULL)
349 compute_fn_summary (node, true);
350 free_dominance_info (CDI_POST_DOMINATORS);
351 free_dominance_info (CDI_DOMINATORS);
352 pop_cfun ();
353 call_cgraph_insertion_hooks (node);
354 break;
355
356 case EXPANSION:
357 /* Functions created during expansion shall be compiled
358 directly. */
359 node->process = 0;
360 call_cgraph_insertion_hooks (node);
361 node->expand ();
362 break;
363
364 default:
365 gcc_unreachable ();
366 break;
367 }
368 }
369
370 cgraph_new_nodes.release ();
371 }
372
373 /* As an GCC extension we allow redefinition of the function. The
374 semantics when both copies of bodies differ is not well defined.
375 We replace the old body with new body so in unit at a time mode
376 we always use new body, while in normal mode we may end up with
377 old body inlined into some functions and new body expanded and
378 inlined in others.
379
380 ??? It may make more sense to use one body for inlining and other
381 body for expanding the function but this is difficult to do. */
382
383 void
384 cgraph_node::reset (void)
385 {
386 /* If process is set, then we have already begun whole-unit analysis.
387 This is *not* testing for whether we've already emitted the function.
388 That case can be sort-of legitimately seen with real function redefinition
389 errors. I would argue that the front end should never present us with
390 such a case, but don't enforce that for now. */
391 gcc_assert (!process);
392
393 /* Reset our data structures so we can analyze the function again. */
394 inlined_to = NULL;
395 memset (&rtl, 0, sizeof (rtl));
396 analyzed = false;
397 definition = false;
398 alias = false;
399 transparent_alias = false;
400 weakref = false;
401 cpp_implicit_alias = false;
402
403 remove_callees ();
404 remove_all_references ();
405 }
406
407 /* Return true when there are references to the node. INCLUDE_SELF is
408 true if a self reference counts as a reference. */
409
410 bool
411 symtab_node::referred_to_p (bool include_self)
412 {
413 ipa_ref *ref = NULL;
414
415 /* See if there are any references at all. */
416 if (iterate_referring (0, ref))
417 return true;
418 /* For functions check also calls. */
419 cgraph_node *cn = dyn_cast <cgraph_node *> (this);
420 if (cn && cn->callers)
421 {
422 if (include_self)
423 return true;
424 for (cgraph_edge *e = cn->callers; e; e = e->next_caller)
425 if (e->caller != this)
426 return true;
427 }
428 return false;
429 }
430
431 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
432 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
433 the garbage collector run at the moment. We would need to either create
434 a new GC context, or just not compile right now. */
435
436 void
437 cgraph_node::finalize_function (tree decl, bool no_collect)
438 {
439 cgraph_node *node = cgraph_node::get_create (decl);
440
441 if (node->definition)
442 {
443 /* Nested functions should only be defined once. */
444 gcc_assert (!DECL_CONTEXT (decl)
445 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
446 node->reset ();
447 node->redefined_extern_inline = true;
448 }
449
450 /* Set definition first before calling notice_global_symbol so that
451 it is available to notice_global_symbol. */
452 node->definition = true;
453 notice_global_symbol (decl);
454 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
455 if (!flag_toplevel_reorder)
456 node->no_reorder = true;
457
458 /* With -fkeep-inline-functions we are keeping all inline functions except
459 for extern inline ones. */
460 if (flag_keep_inline_functions
461 && DECL_DECLARED_INLINE_P (decl)
462 && !DECL_EXTERNAL (decl)
463 && !DECL_DISREGARD_INLINE_LIMITS (decl))
464 node->force_output = 1;
465
466 /* __RTL functions were already output as soon as they were parsed (due
467 to the large amount of global state in the backend).
468 Mark such functions as "force_output" to reflect the fact that they
469 will be in the asm file when considering the symbols they reference.
470 The attempt to output them later on will bail out immediately. */
471 if (node->native_rtl_p ())
472 node->force_output = 1;
473
474 /* When not optimizing, also output the static functions. (see
475 PR24561), but don't do so for always_inline functions, functions
476 declared inline and nested functions. These were optimized out
477 in the original implementation and it is unclear whether we want
478 to change the behavior here. */
479 if (((!opt_for_fn (decl, optimize) || flag_keep_static_functions
480 || node->no_reorder)
481 && !node->cpp_implicit_alias
482 && !DECL_DISREGARD_INLINE_LIMITS (decl)
483 && !DECL_DECLARED_INLINE_P (decl)
484 && !(DECL_CONTEXT (decl)
485 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
486 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
487 node->force_output = 1;
488
489 /* If we've not yet emitted decl, tell the debug info about it. */
490 if (!TREE_ASM_WRITTEN (decl))
491 (*debug_hooks->deferred_inline_function) (decl);
492
493 if (!no_collect)
494 ggc_collect ();
495
496 if (symtab->state == CONSTRUCTION
497 && (node->needed_p () || node->referred_to_p ()))
498 enqueue_node (node);
499 }
500
501 /* Add the function FNDECL to the call graph.
502 Unlike finalize_function, this function is intended to be used
503 by middle end and allows insertion of new function at arbitrary point
504 of compilation. The function can be either in high, low or SSA form
505 GIMPLE.
506
507 The function is assumed to be reachable and have address taken (so no
508 API breaking optimizations are performed on it).
509
510 Main work done by this function is to enqueue the function for later
511 processing to avoid need the passes to be re-entrant. */
512
513 void
514 cgraph_node::add_new_function (tree fndecl, bool lowered)
515 {
516 gcc::pass_manager *passes = g->get_passes ();
517 cgraph_node *node;
518
519 if (dump_file)
520 {
521 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
522 const char *function_type = ((gimple_has_body_p (fndecl))
523 ? (lowered
524 ? (gimple_in_ssa_p (fn)
525 ? "ssa gimple"
526 : "low gimple")
527 : "high gimple")
528 : "to-be-gimplified");
529 fprintf (dump_file,
530 "Added new %s function %s to callgraph\n",
531 function_type,
532 fndecl_name (fndecl));
533 }
534
535 switch (symtab->state)
536 {
537 case PARSING:
538 cgraph_node::finalize_function (fndecl, false);
539 break;
540 case CONSTRUCTION:
541 /* Just enqueue function to be processed at nearest occurrence. */
542 node = cgraph_node::get_create (fndecl);
543 if (lowered)
544 node->lowered = true;
545 cgraph_new_nodes.safe_push (node);
546 break;
547
548 case IPA:
549 case IPA_SSA:
550 case IPA_SSA_AFTER_INLINING:
551 case EXPANSION:
552 /* Bring the function into finalized state and enqueue for later
553 analyzing and compilation. */
554 node = cgraph_node::get_create (fndecl);
555 node->local = false;
556 node->definition = true;
557 node->force_output = true;
558 if (TREE_PUBLIC (fndecl))
559 node->externally_visible = true;
560 if (!lowered && symtab->state == EXPANSION)
561 {
562 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
563 gimple_register_cfg_hooks ();
564 bitmap_obstack_initialize (NULL);
565 execute_pass_list (cfun, passes->all_lowering_passes);
566 passes->execute_early_local_passes ();
567 bitmap_obstack_release (NULL);
568 pop_cfun ();
569
570 lowered = true;
571 }
572 if (lowered)
573 node->lowered = true;
574 cgraph_new_nodes.safe_push (node);
575 break;
576
577 case FINISHED:
578 /* At the very end of compilation we have to do all the work up
579 to expansion. */
580 node = cgraph_node::create (fndecl);
581 if (lowered)
582 node->lowered = true;
583 node->definition = true;
584 node->analyze ();
585 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
586 gimple_register_cfg_hooks ();
587 bitmap_obstack_initialize (NULL);
588 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
589 g->get_passes ()->execute_early_local_passes ();
590 bitmap_obstack_release (NULL);
591 pop_cfun ();
592 node->expand ();
593 break;
594
595 default:
596 gcc_unreachable ();
597 }
598
599 /* Set a personality if required and we already passed EH lowering. */
600 if (lowered
601 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
602 == eh_personality_lang))
603 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
604 }
605
606 /* Analyze the function scheduled to be output. */
607 void
608 cgraph_node::analyze (void)
609 {
610 if (native_rtl_p ())
611 {
612 analyzed = true;
613 return;
614 }
615
616 tree decl = this->decl;
617 location_t saved_loc = input_location;
618 input_location = DECL_SOURCE_LOCATION (decl);
619
620 if (thunk.thunk_p)
621 {
622 cgraph_node *t = cgraph_node::get (thunk.alias);
623
624 create_edge (t, NULL, t->count);
625 callees->can_throw_external = !TREE_NOTHROW (t->decl);
626 /* Target code in expand_thunk may need the thunk's target
627 to be analyzed, so recurse here. */
628 if (!t->analyzed && t->definition)
629 t->analyze ();
630 if (t->alias)
631 {
632 t = t->get_alias_target ();
633 if (!t->analyzed && t->definition)
634 t->analyze ();
635 }
636 bool ret = expand_thunk (false, false);
637 thunk.alias = NULL;
638 if (!ret)
639 return;
640 }
641 if (alias)
642 resolve_alias (cgraph_node::get (alias_target), transparent_alias);
643 else if (dispatcher_function)
644 {
645 /* Generate the dispatcher body of multi-versioned functions. */
646 cgraph_function_version_info *dispatcher_version_info
647 = function_version ();
648 if (dispatcher_version_info != NULL
649 && (dispatcher_version_info->dispatcher_resolver
650 == NULL_TREE))
651 {
652 tree resolver = NULL_TREE;
653 gcc_assert (targetm.generate_version_dispatcher_body);
654 resolver = targetm.generate_version_dispatcher_body (this);
655 gcc_assert (resolver != NULL_TREE);
656 }
657 }
658 else
659 {
660 push_cfun (DECL_STRUCT_FUNCTION (decl));
661
662 assign_assembler_name_if_needed (decl);
663
664 /* Make sure to gimplify bodies only once. During analyzing a
665 function we lower it, which will require gimplified nested
666 functions, so we can end up here with an already gimplified
667 body. */
668 if (!gimple_has_body_p (decl))
669 gimplify_function_tree (decl);
670
671 /* Lower the function. */
672 if (!lowered)
673 {
674 if (nested)
675 lower_nested_functions (decl);
676 gcc_assert (!nested);
677
678 gimple_register_cfg_hooks ();
679 bitmap_obstack_initialize (NULL);
680 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
681 free_dominance_info (CDI_POST_DOMINATORS);
682 free_dominance_info (CDI_DOMINATORS);
683 compact_blocks ();
684 bitmap_obstack_release (NULL);
685 lowered = true;
686 }
687
688 pop_cfun ();
689 }
690 analyzed = true;
691
692 input_location = saved_loc;
693 }
694
695 /* C++ frontend produce same body aliases all over the place, even before PCH
696 gets streamed out. It relies on us linking the aliases with their function
697 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
698 first produce aliases without links, but once C++ FE is sure he won't sream
699 PCH we build the links via this function. */
700
701 void
702 symbol_table::process_same_body_aliases (void)
703 {
704 symtab_node *node;
705 FOR_EACH_SYMBOL (node)
706 if (node->cpp_implicit_alias && !node->analyzed)
707 node->resolve_alias
708 (VAR_P (node->alias_target)
709 ? (symtab_node *)varpool_node::get_create (node->alias_target)
710 : (symtab_node *)cgraph_node::get_create (node->alias_target));
711 cpp_implicit_aliases_done = true;
712 }
713
714 /* Process attributes common for vars and functions. */
715
716 static void
717 process_common_attributes (symtab_node *node, tree decl)
718 {
719 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
720
721 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
722 {
723 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
724 "%<weakref%> attribute should be accompanied with"
725 " an %<alias%> attribute");
726 DECL_WEAK (decl) = 0;
727 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
728 DECL_ATTRIBUTES (decl));
729 }
730
731 if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
732 node->no_reorder = 1;
733 }
734
735 /* Look for externally_visible and used attributes and mark cgraph nodes
736 accordingly.
737
738 We cannot mark the nodes at the point the attributes are processed (in
739 handle_*_attribute) because the copy of the declarations available at that
740 point may not be canonical. For example, in:
741
742 void f();
743 void f() __attribute__((used));
744
745 the declaration we see in handle_used_attribute will be the second
746 declaration -- but the front end will subsequently merge that declaration
747 with the original declaration and discard the second declaration.
748
749 Furthermore, we can't mark these nodes in finalize_function because:
750
751 void f() {}
752 void f() __attribute__((externally_visible));
753
754 is valid.
755
756 So, we walk the nodes at the end of the translation unit, applying the
757 attributes at that point. */
758
759 static void
760 process_function_and_variable_attributes (cgraph_node *first,
761 varpool_node *first_var)
762 {
763 cgraph_node *node;
764 varpool_node *vnode;
765
766 for (node = symtab->first_function (); node != first;
767 node = symtab->next_function (node))
768 {
769 tree decl = node->decl;
770 if (DECL_PRESERVE_P (decl))
771 node->mark_force_output ();
772 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
773 {
774 if (! TREE_PUBLIC (node->decl))
775 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
776 "%<externally_visible%>"
777 " attribute have effect only on public objects");
778 }
779 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
780 && (node->definition && !node->alias))
781 {
782 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
783 "%<weakref%> attribute ignored"
784 " because function is defined");
785 DECL_WEAK (decl) = 0;
786 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
787 DECL_ATTRIBUTES (decl));
788 }
789 else if (lookup_attribute ("alias", DECL_ATTRIBUTES (decl))
790 && node->definition
791 && !node->alias)
792 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
793 "%<alias%> attribute ignored"
794 " because function is defined");
795
796 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
797 && !DECL_DECLARED_INLINE_P (decl)
798 /* redefining extern inline function makes it DECL_UNINLINABLE. */
799 && !DECL_UNINLINABLE (decl))
800 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
801 "%<always_inline%> function might not be inlinable");
802
803 process_common_attributes (node, decl);
804 }
805 for (vnode = symtab->first_variable (); vnode != first_var;
806 vnode = symtab->next_variable (vnode))
807 {
808 tree decl = vnode->decl;
809 if (DECL_EXTERNAL (decl)
810 && DECL_INITIAL (decl))
811 varpool_node::finalize_decl (decl);
812 if (DECL_PRESERVE_P (decl))
813 vnode->force_output = true;
814 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
815 {
816 if (! TREE_PUBLIC (vnode->decl))
817 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
818 "%<externally_visible%>"
819 " attribute have effect only on public objects");
820 }
821 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
822 && vnode->definition
823 && DECL_INITIAL (decl))
824 {
825 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
826 "%<weakref%> attribute ignored"
827 " because variable is initialized");
828 DECL_WEAK (decl) = 0;
829 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
830 DECL_ATTRIBUTES (decl));
831 }
832 process_common_attributes (vnode, decl);
833 }
834 }
835
836 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
837 middle end to output the variable to asm file, if needed or externally
838 visible. */
839
840 void
841 varpool_node::finalize_decl (tree decl)
842 {
843 varpool_node *node = varpool_node::get_create (decl);
844
845 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
846
847 if (node->definition)
848 return;
849 /* Set definition first before calling notice_global_symbol so that
850 it is available to notice_global_symbol. */
851 node->definition = true;
852 notice_global_symbol (decl);
853 if (!flag_toplevel_reorder)
854 node->no_reorder = true;
855 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
856 /* Traditionally we do not eliminate static variables when not
857 optimizing and when not doing toplevel reoder. */
858 || (node->no_reorder && !DECL_COMDAT (node->decl)
859 && !DECL_ARTIFICIAL (node->decl)))
860 node->force_output = true;
861
862 if (symtab->state == CONSTRUCTION
863 && (node->needed_p () || node->referred_to_p ()))
864 enqueue_node (node);
865 if (symtab->state >= IPA_SSA)
866 node->analyze ();
867 /* Some frontends produce various interface variables after compilation
868 finished. */
869 if (symtab->state == FINISHED
870 || (node->no_reorder
871 && symtab->state == EXPANSION))
872 node->assemble_decl ();
873 }
874
875 /* EDGE is an polymorphic call. Mark all possible targets as reachable
876 and if there is only one target, perform trivial devirtualization.
877 REACHABLE_CALL_TARGETS collects target lists we already walked to
878 avoid udplicate work. */
879
880 static void
881 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
882 cgraph_edge *edge)
883 {
884 unsigned int i;
885 void *cache_token;
886 bool final;
887 vec <cgraph_node *>targets
888 = possible_polymorphic_call_targets
889 (edge, &final, &cache_token);
890
891 if (!reachable_call_targets->add (cache_token))
892 {
893 if (symtab->dump_file)
894 dump_possible_polymorphic_call_targets
895 (symtab->dump_file, edge);
896
897 for (i = 0; i < targets.length (); i++)
898 {
899 /* Do not bother to mark virtual methods in anonymous namespace;
900 either we will find use of virtual table defining it, or it is
901 unused. */
902 if (targets[i]->definition
903 && TREE_CODE
904 (TREE_TYPE (targets[i]->decl))
905 == METHOD_TYPE
906 && !type_in_anonymous_namespace_p
907 (TYPE_METHOD_BASETYPE (TREE_TYPE (targets[i]->decl))))
908 enqueue_node (targets[i]);
909 }
910 }
911
912 /* Very trivial devirtualization; when the type is
913 final or anonymous (so we know all its derivation)
914 and there is only one possible virtual call target,
915 make the edge direct. */
916 if (final)
917 {
918 if (targets.length () <= 1 && dbg_cnt (devirt))
919 {
920 cgraph_node *target;
921 if (targets.length () == 1)
922 target = targets[0];
923 else
924 target = cgraph_node::create
925 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
926
927 if (symtab->dump_file)
928 {
929 fprintf (symtab->dump_file,
930 "Devirtualizing call: ");
931 print_gimple_stmt (symtab->dump_file,
932 edge->call_stmt, 0,
933 TDF_SLIM);
934 }
935 if (dump_enabled_p ())
936 {
937 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, edge->call_stmt,
938 "devirtualizing call in %s to %s\n",
939 edge->caller->name (), target->name ());
940 }
941
942 edge->make_direct (target);
943 edge->redirect_call_stmt_to_callee ();
944
945 if (symtab->dump_file)
946 {
947 fprintf (symtab->dump_file,
948 "Devirtualized as: ");
949 print_gimple_stmt (symtab->dump_file,
950 edge->call_stmt, 0,
951 TDF_SLIM);
952 }
953 }
954 }
955 }
956
957 /* Issue appropriate warnings for the global declaration DECL. */
958
959 static void
960 check_global_declaration (symtab_node *snode)
961 {
962 const char *decl_file;
963 tree decl = snode->decl;
964
965 /* Warn about any function declared static but not defined. We don't
966 warn about variables, because many programs have static variables
967 that exist only to get some text into the object file. */
968 if (TREE_CODE (decl) == FUNCTION_DECL
969 && DECL_INITIAL (decl) == 0
970 && DECL_EXTERNAL (decl)
971 && ! DECL_ARTIFICIAL (decl)
972 && ! TREE_NO_WARNING (decl)
973 && ! TREE_PUBLIC (decl)
974 && (warn_unused_function
975 || snode->referred_to_p (/*include_self=*/false)))
976 {
977 if (snode->referred_to_p (/*include_self=*/false))
978 pedwarn (input_location, 0, "%q+F used but never defined", decl);
979 else
980 warning (OPT_Wunused_function, "%q+F declared %<static%> but never defined", decl);
981 /* This symbol is effectively an "extern" declaration now. */
982 TREE_PUBLIC (decl) = 1;
983 }
984
985 /* Warn about static fns or vars defined but not used. */
986 if (((warn_unused_function && TREE_CODE (decl) == FUNCTION_DECL)
987 || (((warn_unused_variable && ! TREE_READONLY (decl))
988 || (warn_unused_const_variable > 0 && TREE_READONLY (decl)
989 && (warn_unused_const_variable == 2
990 || (main_input_filename != NULL
991 && (decl_file = DECL_SOURCE_FILE (decl)) != NULL
992 && filename_cmp (main_input_filename,
993 decl_file) == 0))))
994 && VAR_P (decl)))
995 && ! DECL_IN_SYSTEM_HEADER (decl)
996 && ! snode->referred_to_p (/*include_self=*/false)
997 /* This TREE_USED check is needed in addition to referred_to_p
998 above, because the `__unused__' attribute is not being
999 considered for referred_to_p. */
1000 && ! TREE_USED (decl)
1001 /* The TREE_USED bit for file-scope decls is kept in the identifier,
1002 to handle multiple external decls in different scopes. */
1003 && ! (DECL_NAME (decl) && TREE_USED (DECL_NAME (decl)))
1004 && ! DECL_EXTERNAL (decl)
1005 && ! DECL_ARTIFICIAL (decl)
1006 && ! DECL_ABSTRACT_ORIGIN (decl)
1007 && ! TREE_PUBLIC (decl)
1008 /* A volatile variable might be used in some non-obvious way. */
1009 && (! VAR_P (decl) || ! TREE_THIS_VOLATILE (decl))
1010 /* Global register variables must be declared to reserve them. */
1011 && ! (VAR_P (decl) && DECL_REGISTER (decl))
1012 /* Global ctors and dtors are called by the runtime. */
1013 && (TREE_CODE (decl) != FUNCTION_DECL
1014 || (!DECL_STATIC_CONSTRUCTOR (decl)
1015 && !DECL_STATIC_DESTRUCTOR (decl)))
1016 /* Otherwise, ask the language. */
1017 && lang_hooks.decls.warn_unused_global (decl))
1018 warning_at (DECL_SOURCE_LOCATION (decl),
1019 (TREE_CODE (decl) == FUNCTION_DECL)
1020 ? OPT_Wunused_function
1021 : (TREE_READONLY (decl)
1022 ? OPT_Wunused_const_variable_
1023 : OPT_Wunused_variable),
1024 "%qD defined but not used", decl);
1025 }
1026
1027 /* Discover all functions and variables that are trivially needed, analyze
1028 them as well as all functions and variables referred by them */
1029 static cgraph_node *first_analyzed;
1030 static varpool_node *first_analyzed_var;
1031
1032 /* FIRST_TIME is set to TRUE for the first time we are called for a
1033 translation unit from finalize_compilation_unit() or false
1034 otherwise. */
1035
1036 static void
1037 analyze_functions (bool first_time)
1038 {
1039 /* Keep track of already processed nodes when called multiple times for
1040 intermodule optimization. */
1041 cgraph_node *first_handled = first_analyzed;
1042 varpool_node *first_handled_var = first_analyzed_var;
1043 hash_set<void *> reachable_call_targets;
1044
1045 symtab_node *node;
1046 symtab_node *next;
1047 int i;
1048 ipa_ref *ref;
1049 bool changed = true;
1050 location_t saved_loc = input_location;
1051
1052 bitmap_obstack_initialize (NULL);
1053 symtab->state = CONSTRUCTION;
1054 input_location = UNKNOWN_LOCATION;
1055
1056 /* Ugly, but the fixup cannot happen at a time same body alias is created;
1057 C++ FE is confused about the COMDAT groups being right. */
1058 if (symtab->cpp_implicit_aliases_done)
1059 FOR_EACH_SYMBOL (node)
1060 if (node->cpp_implicit_alias)
1061 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
1062 build_type_inheritance_graph ();
1063
1064 /* Analysis adds static variables that in turn adds references to new functions.
1065 So we need to iterate the process until it stabilize. */
1066 while (changed)
1067 {
1068 changed = false;
1069 process_function_and_variable_attributes (first_analyzed,
1070 first_analyzed_var);
1071
1072 /* First identify the trivially needed symbols. */
1073 for (node = symtab->first_symbol ();
1074 node != first_analyzed
1075 && node != first_analyzed_var; node = node->next)
1076 {
1077 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
1078 node->get_comdat_group_id ();
1079 if (node->needed_p ())
1080 {
1081 enqueue_node (node);
1082 if (!changed && symtab->dump_file)
1083 fprintf (symtab->dump_file, "Trivially needed symbols:");
1084 changed = true;
1085 if (symtab->dump_file)
1086 fprintf (symtab->dump_file, " %s", node->asm_name ());
1087 if (!changed && symtab->dump_file)
1088 fprintf (symtab->dump_file, "\n");
1089 }
1090 if (node == first_analyzed
1091 || node == first_analyzed_var)
1092 break;
1093 }
1094 symtab->process_new_functions ();
1095 first_analyzed_var = symtab->first_variable ();
1096 first_analyzed = symtab->first_function ();
1097
1098 if (changed && symtab->dump_file)
1099 fprintf (symtab->dump_file, "\n");
1100
1101 /* Lower representation, build callgraph edges and references for all trivially
1102 needed symbols and all symbols referred by them. */
1103 while (queued_nodes != &symtab_terminator)
1104 {
1105 changed = true;
1106 node = queued_nodes;
1107 queued_nodes = (symtab_node *)queued_nodes->aux;
1108 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1109 if (cnode && cnode->definition)
1110 {
1111 cgraph_edge *edge;
1112 tree decl = cnode->decl;
1113
1114 /* ??? It is possible to create extern inline function
1115 and later using weak alias attribute to kill its body.
1116 See gcc.c-torture/compile/20011119-1.c */
1117 if (!DECL_STRUCT_FUNCTION (decl)
1118 && !cnode->alias
1119 && !cnode->thunk.thunk_p
1120 && !cnode->dispatcher_function)
1121 {
1122 cnode->reset ();
1123 cnode->redefined_extern_inline = true;
1124 continue;
1125 }
1126
1127 if (!cnode->analyzed)
1128 cnode->analyze ();
1129
1130 for (edge = cnode->callees; edge; edge = edge->next_callee)
1131 if (edge->callee->definition
1132 && (!DECL_EXTERNAL (edge->callee->decl)
1133 /* When not optimizing, do not try to analyze extern
1134 inline functions. Doing so is pointless. */
1135 || opt_for_fn (edge->callee->decl, optimize)
1136 /* Weakrefs needs to be preserved. */
1137 || edge->callee->alias
1138 /* always_inline functions are inlined aven at -O0. */
1139 || lookup_attribute
1140 ("always_inline",
1141 DECL_ATTRIBUTES (edge->callee->decl))
1142 /* Multiversioned functions needs the dispatcher to
1143 be produced locally even for extern functions. */
1144 || edge->callee->function_version ()))
1145 enqueue_node (edge->callee);
1146 if (opt_for_fn (cnode->decl, optimize)
1147 && opt_for_fn (cnode->decl, flag_devirtualize))
1148 {
1149 cgraph_edge *next;
1150
1151 for (edge = cnode->indirect_calls; edge; edge = next)
1152 {
1153 next = edge->next_callee;
1154 if (edge->indirect_info->polymorphic)
1155 walk_polymorphic_call_targets (&reachable_call_targets,
1156 edge);
1157 }
1158 }
1159
1160 /* If decl is a clone of an abstract function,
1161 mark that abstract function so that we don't release its body.
1162 The DECL_INITIAL() of that abstract function declaration
1163 will be later needed to output debug info. */
1164 if (DECL_ABSTRACT_ORIGIN (decl))
1165 {
1166 cgraph_node *origin_node
1167 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1168 origin_node->used_as_abstract_origin = true;
1169 }
1170 /* Preserve a functions function context node. It will
1171 later be needed to output debug info. */
1172 if (tree fn = decl_function_context (decl))
1173 {
1174 cgraph_node *origin_node = cgraph_node::get_create (fn);
1175 enqueue_node (origin_node);
1176 }
1177 }
1178 else
1179 {
1180 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1181 if (vnode && vnode->definition && !vnode->analyzed)
1182 vnode->analyze ();
1183 }
1184
1185 if (node->same_comdat_group)
1186 {
1187 symtab_node *next;
1188 for (next = node->same_comdat_group;
1189 next != node;
1190 next = next->same_comdat_group)
1191 if (!next->comdat_local_p ())
1192 enqueue_node (next);
1193 }
1194 for (i = 0; node->iterate_reference (i, ref); i++)
1195 if (ref->referred->definition
1196 && (!DECL_EXTERNAL (ref->referred->decl)
1197 || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
1198 && optimize)
1199 || (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
1200 && opt_for_fn (ref->referred->decl, optimize))
1201 || node->alias
1202 || ref->referred->alias)))
1203 enqueue_node (ref->referred);
1204 symtab->process_new_functions ();
1205 }
1206 }
1207 update_type_inheritance_graph ();
1208
1209 /* Collect entry points to the unit. */
1210 if (symtab->dump_file)
1211 {
1212 fprintf (symtab->dump_file, "\n\nInitial ");
1213 symtab->dump (symtab->dump_file);
1214 }
1215
1216 if (first_time)
1217 {
1218 symtab_node *snode;
1219 FOR_EACH_SYMBOL (snode)
1220 check_global_declaration (snode);
1221 }
1222
1223 if (symtab->dump_file)
1224 fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1225
1226 for (node = symtab->first_symbol ();
1227 node != first_handled
1228 && node != first_handled_var; node = next)
1229 {
1230 next = node->next;
1231 /* For symbols declared locally we clear TREE_READONLY when emitting
1232 the construtor (if one is needed). For external declarations we can
1233 not safely assume that the type is readonly because we may be called
1234 during its construction. */
1235 if (TREE_CODE (node->decl) == VAR_DECL
1236 && TYPE_P (TREE_TYPE (node->decl))
1237 && TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (node->decl))
1238 && DECL_EXTERNAL (node->decl))
1239 TREE_READONLY (node->decl) = 0;
1240 if (!node->aux && !node->referred_to_p ())
1241 {
1242 if (symtab->dump_file)
1243 fprintf (symtab->dump_file, " %s", node->name ());
1244
1245 /* See if the debugger can use anything before the DECL
1246 passes away. Perhaps it can notice a DECL that is now a
1247 constant and can tag the early DIE with an appropriate
1248 attribute.
1249
1250 Otherwise, this is the last chance the debug_hooks have
1251 at looking at optimized away DECLs, since
1252 late_global_decl will subsequently be called from the
1253 contents of the now pruned symbol table. */
1254 if (VAR_P (node->decl)
1255 && !decl_function_context (node->decl))
1256 {
1257 /* We are reclaiming totally unreachable code and variables
1258 so they effectively appear as readonly. Show that to
1259 the debug machinery. */
1260 TREE_READONLY (node->decl) = 1;
1261 node->definition = false;
1262 (*debug_hooks->late_global_decl) (node->decl);
1263 }
1264
1265 node->remove ();
1266 continue;
1267 }
1268 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1269 {
1270 tree decl = node->decl;
1271
1272 if (cnode->definition && !gimple_has_body_p (decl)
1273 && !cnode->alias
1274 && !cnode->thunk.thunk_p)
1275 cnode->reset ();
1276
1277 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1278 || cnode->alias
1279 || gimple_has_body_p (decl)
1280 || cnode->native_rtl_p ());
1281 gcc_assert (cnode->analyzed == cnode->definition);
1282 }
1283 node->aux = NULL;
1284 }
1285 for (;node; node = node->next)
1286 node->aux = NULL;
1287 first_analyzed = symtab->first_function ();
1288 first_analyzed_var = symtab->first_variable ();
1289 if (symtab->dump_file)
1290 {
1291 fprintf (symtab->dump_file, "\n\nReclaimed ");
1292 symtab->dump (symtab->dump_file);
1293 }
1294 bitmap_obstack_release (NULL);
1295 ggc_collect ();
1296 /* Initialize assembler name hash, in particular we want to trigger C++
1297 mangling and same body alias creation before we free DECL_ARGUMENTS
1298 used by it. */
1299 if (!seen_error ())
1300 symtab->symtab_initialize_asm_name_hash ();
1301
1302 input_location = saved_loc;
1303 }
1304
1305 /* Check declaration of the type of ALIAS for compatibility with its TARGET
1306 (which may be an ifunc resolver) and issue a diagnostic when they are
1307 not compatible according to language rules (plus a C++ extension for
1308 non-static member functions). */
1309
1310 static void
1311 maybe_diag_incompatible_alias (tree alias, tree target)
1312 {
1313 tree altype = TREE_TYPE (alias);
1314 tree targtype = TREE_TYPE (target);
1315
1316 bool ifunc = cgraph_node::get (alias)->ifunc_resolver;
1317 tree funcptr = altype;
1318
1319 if (ifunc)
1320 {
1321 /* Handle attribute ifunc first. */
1322 if (TREE_CODE (altype) == METHOD_TYPE)
1323 {
1324 /* Set FUNCPTR to the type of the alias target. If the type
1325 is a non-static member function of class C, construct a type
1326 of an ordinary function taking C* as the first argument,
1327 followed by the member function argument list, and use it
1328 instead to check for incompatibility. This conversion is
1329 not defined by the language but an extension provided by
1330 G++. */
1331
1332 tree rettype = TREE_TYPE (altype);
1333 tree args = TYPE_ARG_TYPES (altype);
1334 altype = build_function_type (rettype, args);
1335 funcptr = altype;
1336 }
1337
1338 targtype = TREE_TYPE (targtype);
1339
1340 if (POINTER_TYPE_P (targtype))
1341 {
1342 targtype = TREE_TYPE (targtype);
1343
1344 /* Only issue Wattribute-alias for conversions to void* with
1345 -Wextra. */
1346 if (VOID_TYPE_P (targtype) && !extra_warnings)
1347 return;
1348
1349 /* Proceed to handle incompatible ifunc resolvers below. */
1350 }
1351 else
1352 {
1353 funcptr = build_pointer_type (funcptr);
1354
1355 error_at (DECL_SOURCE_LOCATION (target),
1356 "%<ifunc%> resolver for %qD must return %qT",
1357 alias, funcptr);
1358 inform (DECL_SOURCE_LOCATION (alias),
1359 "resolver indirect function declared here");
1360 return;
1361 }
1362 }
1363
1364 if ((!FUNC_OR_METHOD_TYPE_P (targtype)
1365 || (prototype_p (altype)
1366 && prototype_p (targtype)
1367 && !types_compatible_p (altype, targtype))))
1368 {
1369 /* Warn for incompatibilities. Avoid warning for functions
1370 without a prototype to make it possible to declare aliases
1371 without knowing the exact type, as libstdc++ does. */
1372 if (ifunc)
1373 {
1374 funcptr = build_pointer_type (funcptr);
1375
1376 auto_diagnostic_group d;
1377 if (warning_at (DECL_SOURCE_LOCATION (target),
1378 OPT_Wattribute_alias_,
1379 "%<ifunc%> resolver for %qD should return %qT",
1380 alias, funcptr))
1381 inform (DECL_SOURCE_LOCATION (alias),
1382 "resolver indirect function declared here");
1383 }
1384 else
1385 {
1386 auto_diagnostic_group d;
1387 if (warning_at (DECL_SOURCE_LOCATION (alias),
1388 OPT_Wattribute_alias_,
1389 "%qD alias between functions of incompatible "
1390 "types %qT and %qT", alias, altype, targtype))
1391 inform (DECL_SOURCE_LOCATION (target),
1392 "aliased declaration here");
1393 }
1394 }
1395 }
1396
1397 /* Translate the ugly representation of aliases as alias pairs into nice
1398 representation in callgraph. We don't handle all cases yet,
1399 unfortunately. */
1400
1401 static void
1402 handle_alias_pairs (void)
1403 {
1404 alias_pair *p;
1405 unsigned i;
1406
1407 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1408 {
1409 symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1410
1411 /* Weakrefs with target not defined in current unit are easy to handle:
1412 they behave just as external variables except we need to note the
1413 alias flag to later output the weakref pseudo op into asm file. */
1414 if (!target_node
1415 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1416 {
1417 symtab_node *node = symtab_node::get (p->decl);
1418 if (node)
1419 {
1420 node->alias_target = p->target;
1421 node->weakref = true;
1422 node->alias = true;
1423 node->transparent_alias = true;
1424 }
1425 alias_pairs->unordered_remove (i);
1426 continue;
1427 }
1428 else if (!target_node)
1429 {
1430 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1431 symtab_node *node = symtab_node::get (p->decl);
1432 if (node)
1433 node->alias = false;
1434 alias_pairs->unordered_remove (i);
1435 continue;
1436 }
1437
1438 if (DECL_EXTERNAL (target_node->decl)
1439 /* We use local aliases for C++ thunks to force the tailcall
1440 to bind locally. This is a hack - to keep it working do
1441 the following (which is not strictly correct). */
1442 && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1443 || ! DECL_VIRTUAL_P (target_node->decl))
1444 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1445 {
1446 error ("%q+D aliased to external symbol %qE",
1447 p->decl, p->target);
1448 }
1449
1450 if (TREE_CODE (p->decl) == FUNCTION_DECL
1451 && target_node && is_a <cgraph_node *> (target_node))
1452 {
1453 maybe_diag_incompatible_alias (p->decl, target_node->decl);
1454
1455 maybe_diag_alias_attributes (p->decl, target_node->decl);
1456
1457 cgraph_node *src_node = cgraph_node::get (p->decl);
1458 if (src_node && src_node->definition)
1459 src_node->reset ();
1460 cgraph_node::create_alias (p->decl, target_node->decl);
1461 alias_pairs->unordered_remove (i);
1462 }
1463 else if (VAR_P (p->decl)
1464 && target_node && is_a <varpool_node *> (target_node))
1465 {
1466 varpool_node::create_alias (p->decl, target_node->decl);
1467 alias_pairs->unordered_remove (i);
1468 }
1469 else
1470 {
1471 error ("%q+D alias between function and variable is not supported",
1472 p->decl);
1473 inform (DECL_SOURCE_LOCATION (target_node->decl),
1474 "aliased declaration here");
1475
1476 alias_pairs->unordered_remove (i);
1477 }
1478 }
1479 vec_free (alias_pairs);
1480 }
1481
1482
1483 /* Figure out what functions we want to assemble. */
1484
1485 static void
1486 mark_functions_to_output (void)
1487 {
1488 bool check_same_comdat_groups = false;
1489 cgraph_node *node;
1490
1491 if (flag_checking)
1492 FOR_EACH_FUNCTION (node)
1493 gcc_assert (!node->process);
1494
1495 FOR_EACH_FUNCTION (node)
1496 {
1497 tree decl = node->decl;
1498
1499 gcc_assert (!node->process || node->same_comdat_group);
1500 if (node->process)
1501 continue;
1502
1503 /* We need to output all local functions that are used and not
1504 always inlined, as well as those that are reachable from
1505 outside the current compilation unit. */
1506 if (node->analyzed
1507 && !node->thunk.thunk_p
1508 && !node->alias
1509 && !node->inlined_to
1510 && !TREE_ASM_WRITTEN (decl)
1511 && !DECL_EXTERNAL (decl))
1512 {
1513 node->process = 1;
1514 if (node->same_comdat_group)
1515 {
1516 cgraph_node *next;
1517 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1518 next != node;
1519 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1520 if (!next->thunk.thunk_p && !next->alias
1521 && !next->comdat_local_p ())
1522 next->process = 1;
1523 }
1524 }
1525 else if (node->same_comdat_group)
1526 {
1527 if (flag_checking)
1528 check_same_comdat_groups = true;
1529 }
1530 else
1531 {
1532 /* We should've reclaimed all functions that are not needed. */
1533 if (flag_checking
1534 && !node->inlined_to
1535 && gimple_has_body_p (decl)
1536 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1537 are inside partition, we can end up not removing the body since we no longer
1538 have analyzed node pointing to it. */
1539 && !node->in_other_partition
1540 && !node->alias
1541 && !node->clones
1542 && !DECL_EXTERNAL (decl))
1543 {
1544 node->debug ();
1545 internal_error ("failed to reclaim unneeded function");
1546 }
1547 gcc_assert (node->inlined_to
1548 || !gimple_has_body_p (decl)
1549 || node->in_other_partition
1550 || node->clones
1551 || DECL_ARTIFICIAL (decl)
1552 || DECL_EXTERNAL (decl));
1553
1554 }
1555
1556 }
1557 if (flag_checking && check_same_comdat_groups)
1558 FOR_EACH_FUNCTION (node)
1559 if (node->same_comdat_group && !node->process)
1560 {
1561 tree decl = node->decl;
1562 if (!node->inlined_to
1563 && gimple_has_body_p (decl)
1564 /* FIXME: in an ltrans unit when the offline copy is outside a
1565 partition but inline copies are inside a partition, we can
1566 end up not removing the body since we no longer have an
1567 analyzed node pointing to it. */
1568 && !node->in_other_partition
1569 && !node->clones
1570 && !DECL_EXTERNAL (decl))
1571 {
1572 node->debug ();
1573 internal_error ("failed to reclaim unneeded function in same "
1574 "comdat group");
1575 }
1576 }
1577 }
1578
1579 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1580 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1581
1582 Set current_function_decl and cfun to newly constructed empty function body.
1583 return basic block in the function body. */
1584
1585 basic_block
1586 init_lowered_empty_function (tree decl, bool in_ssa, profile_count count)
1587 {
1588 basic_block bb;
1589 edge e;
1590
1591 current_function_decl = decl;
1592 allocate_struct_function (decl, false);
1593 gimple_register_cfg_hooks ();
1594 init_empty_tree_cfg ();
1595 init_tree_ssa (cfun);
1596
1597 if (in_ssa)
1598 {
1599 init_ssa_operands (cfun);
1600 cfun->gimple_df->in_ssa_p = true;
1601 cfun->curr_properties |= PROP_ssa;
1602 }
1603
1604 DECL_INITIAL (decl) = make_node (BLOCK);
1605 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1606
1607 DECL_SAVED_TREE (decl) = error_mark_node;
1608 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1609 | PROP_cfg | PROP_loops);
1610
1611 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1612 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1613 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1614
1615 /* Create BB for body of the function and connect it properly. */
1616 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = count;
1617 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = count;
1618 bb = create_basic_block (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1619 bb->count = count;
1620 e = make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1621 e->probability = profile_probability::always ();
1622 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1623 e->probability = profile_probability::always ();
1624 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1625
1626 return bb;
1627 }
1628
1629 /* Adjust PTR by the constant FIXED_OFFSET, by the vtable offset indicated by
1630 VIRTUAL_OFFSET, and by the indirect offset indicated by INDIRECT_OFFSET, if
1631 it is non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and zero
1632 for a result adjusting thunk. */
1633
1634 tree
1635 thunk_adjust (gimple_stmt_iterator * bsi,
1636 tree ptr, bool this_adjusting,
1637 HOST_WIDE_INT fixed_offset, tree virtual_offset,
1638 HOST_WIDE_INT indirect_offset)
1639 {
1640 gassign *stmt;
1641 tree ret;
1642
1643 if (this_adjusting
1644 && fixed_offset != 0)
1645 {
1646 stmt = gimple_build_assign
1647 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1648 ptr,
1649 fixed_offset));
1650 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1651 }
1652
1653 if (!vtable_entry_type && (virtual_offset || indirect_offset != 0))
1654 {
1655 tree vfunc_type = make_node (FUNCTION_TYPE);
1656 TREE_TYPE (vfunc_type) = integer_type_node;
1657 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1658 layout_type (vfunc_type);
1659
1660 vtable_entry_type = build_pointer_type (vfunc_type);
1661 }
1662
1663 /* If there's a virtual offset, look up that value in the vtable and
1664 adjust the pointer again. */
1665 if (virtual_offset)
1666 {
1667 tree vtabletmp;
1668 tree vtabletmp2;
1669 tree vtabletmp3;
1670
1671 vtabletmp =
1672 create_tmp_reg (build_pointer_type
1673 (build_pointer_type (vtable_entry_type)), "vptr");
1674
1675 /* The vptr is always at offset zero in the object. */
1676 stmt = gimple_build_assign (vtabletmp,
1677 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1678 ptr));
1679 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1680
1681 /* Form the vtable address. */
1682 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1683 "vtableaddr");
1684 stmt = gimple_build_assign (vtabletmp2,
1685 build_simple_mem_ref (vtabletmp));
1686 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1687
1688 /* Find the entry with the vcall offset. */
1689 stmt = gimple_build_assign (vtabletmp2,
1690 fold_build_pointer_plus_loc (input_location,
1691 vtabletmp2,
1692 virtual_offset));
1693 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1694
1695 /* Get the offset itself. */
1696 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1697 "vcalloffset");
1698 stmt = gimple_build_assign (vtabletmp3,
1699 build_simple_mem_ref (vtabletmp2));
1700 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1701
1702 /* Adjust the `this' pointer. */
1703 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1704 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1705 GSI_CONTINUE_LINKING);
1706 }
1707
1708 /* Likewise for an offset that is stored in the object that contains the
1709 vtable. */
1710 if (indirect_offset != 0)
1711 {
1712 tree offset_ptr, offset_tree;
1713
1714 /* Get the address of the offset. */
1715 offset_ptr
1716 = create_tmp_reg (build_pointer_type
1717 (build_pointer_type (vtable_entry_type)),
1718 "offset_ptr");
1719 stmt = gimple_build_assign (offset_ptr,
1720 build1 (NOP_EXPR, TREE_TYPE (offset_ptr),
1721 ptr));
1722 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1723
1724 stmt = gimple_build_assign
1725 (offset_ptr,
1726 fold_build_pointer_plus_hwi_loc (input_location, offset_ptr,
1727 indirect_offset));
1728 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1729
1730 /* Get the offset itself. */
1731 offset_tree = create_tmp_reg (TREE_TYPE (TREE_TYPE (offset_ptr)),
1732 "offset");
1733 stmt = gimple_build_assign (offset_tree,
1734 build_simple_mem_ref (offset_ptr));
1735 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1736
1737 /* Adjust the `this' pointer. */
1738 ptr = fold_build_pointer_plus_loc (input_location, ptr, offset_tree);
1739 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1740 GSI_CONTINUE_LINKING);
1741 }
1742
1743 if (!this_adjusting
1744 && fixed_offset != 0)
1745 /* Adjust the pointer by the constant. */
1746 {
1747 tree ptrtmp;
1748
1749 if (VAR_P (ptr))
1750 ptrtmp = ptr;
1751 else
1752 {
1753 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1754 stmt = gimple_build_assign (ptrtmp, ptr);
1755 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1756 }
1757 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1758 ptrtmp, fixed_offset);
1759 }
1760
1761 /* Emit the statement and gimplify the adjustment expression. */
1762 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1763 stmt = gimple_build_assign (ret, ptr);
1764 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1765
1766 return ret;
1767 }
1768
1769 /* Expand thunk NODE to gimple if possible.
1770 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1771 no assembler is produced.
1772 When OUTPUT_ASM_THUNK is true, also produce assembler for
1773 thunks that are not lowered. */
1774
1775 bool
1776 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1777 {
1778 bool this_adjusting = thunk.this_adjusting;
1779 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1780 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1781 HOST_WIDE_INT indirect_offset = thunk.indirect_offset;
1782 tree virtual_offset = NULL;
1783 tree alias = callees->callee->decl;
1784 tree thunk_fndecl = decl;
1785 tree a;
1786
1787 if (!force_gimple_thunk
1788 && this_adjusting
1789 && indirect_offset == 0
1790 && !DECL_EXTERNAL (alias)
1791 && !DECL_STATIC_CHAIN (alias)
1792 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1793 virtual_value, alias))
1794 {
1795 tree fn_block;
1796 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1797
1798 if (!output_asm_thunks)
1799 {
1800 analyzed = true;
1801 return false;
1802 }
1803
1804 if (in_lto_p)
1805 get_untransformed_body ();
1806 a = DECL_ARGUMENTS (thunk_fndecl);
1807
1808 current_function_decl = thunk_fndecl;
1809
1810 /* Ensure thunks are emitted in their correct sections. */
1811 resolve_unique_section (thunk_fndecl, 0,
1812 flag_function_sections);
1813
1814 DECL_RESULT (thunk_fndecl)
1815 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1816 RESULT_DECL, 0, restype);
1817 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1818
1819 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1820 create one. */
1821 fn_block = make_node (BLOCK);
1822 BLOCK_VARS (fn_block) = a;
1823 DECL_INITIAL (thunk_fndecl) = fn_block;
1824 BLOCK_SUPERCONTEXT (fn_block) = thunk_fndecl;
1825 allocate_struct_function (thunk_fndecl, false);
1826 init_function_start (thunk_fndecl);
1827 cfun->is_thunk = 1;
1828 insn_locations_init ();
1829 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1830 prologue_location = curr_insn_location ();
1831
1832 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1833 fixed_offset, virtual_value, alias);
1834
1835 insn_locations_finalize ();
1836 init_insn_lengths ();
1837 free_after_compilation (cfun);
1838 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1839 thunk.thunk_p = false;
1840 analyzed = false;
1841 }
1842 else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
1843 {
1844 error ("generic thunk code fails for method %qD which uses %<...%>",
1845 thunk_fndecl);
1846 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1847 analyzed = true;
1848 return false;
1849 }
1850 else
1851 {
1852 tree restype;
1853 basic_block bb, then_bb, else_bb, return_bb;
1854 gimple_stmt_iterator bsi;
1855 int nargs = 0;
1856 tree arg;
1857 int i;
1858 tree resdecl;
1859 tree restmp = NULL;
1860
1861 gcall *call;
1862 greturn *ret;
1863 bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
1864
1865 /* We may be called from expand_thunk that releses body except for
1866 DECL_ARGUMENTS. In this case force_gimple_thunk is true. */
1867 if (in_lto_p && !force_gimple_thunk)
1868 get_untransformed_body ();
1869
1870 /* We need to force DECL_IGNORED_P when the thunk is created
1871 after early debug was run. */
1872 if (force_gimple_thunk)
1873 DECL_IGNORED_P (thunk_fndecl) = 1;
1874
1875 a = DECL_ARGUMENTS (thunk_fndecl);
1876
1877 current_function_decl = thunk_fndecl;
1878
1879 /* Ensure thunks are emitted in their correct sections. */
1880 resolve_unique_section (thunk_fndecl, 0,
1881 flag_function_sections);
1882
1883 bitmap_obstack_initialize (NULL);
1884
1885 if (thunk.virtual_offset_p)
1886 virtual_offset = size_int (virtual_value);
1887
1888 /* Build the return declaration for the function. */
1889 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1890 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1891 {
1892 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1893 DECL_ARTIFICIAL (resdecl) = 1;
1894 DECL_IGNORED_P (resdecl) = 1;
1895 DECL_CONTEXT (resdecl) = thunk_fndecl;
1896 DECL_RESULT (thunk_fndecl) = resdecl;
1897 }
1898 else
1899 resdecl = DECL_RESULT (thunk_fndecl);
1900
1901 profile_count cfg_count = count;
1902 if (!cfg_count.initialized_p ())
1903 cfg_count = profile_count::from_gcov_type (BB_FREQ_MAX).guessed_local ();
1904
1905 bb = then_bb = else_bb = return_bb
1906 = init_lowered_empty_function (thunk_fndecl, true, cfg_count);
1907
1908 bsi = gsi_start_bb (bb);
1909
1910 /* Build call to the function being thunked. */
1911 if (!VOID_TYPE_P (restype)
1912 && (!alias_is_noreturn
1913 || TREE_ADDRESSABLE (restype)
1914 || TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
1915 {
1916 if (DECL_BY_REFERENCE (resdecl))
1917 {
1918 restmp = gimple_fold_indirect_ref (resdecl);
1919 if (!restmp)
1920 restmp = build2 (MEM_REF,
1921 TREE_TYPE (TREE_TYPE (resdecl)),
1922 resdecl,
1923 build_int_cst (TREE_TYPE (resdecl), 0));
1924 }
1925 else if (!is_gimple_reg_type (restype))
1926 {
1927 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
1928 {
1929 restmp = resdecl;
1930
1931 if (VAR_P (restmp))
1932 {
1933 add_local_decl (cfun, restmp);
1934 BLOCK_VARS (DECL_INITIAL (current_function_decl))
1935 = restmp;
1936 }
1937 }
1938 else
1939 restmp = create_tmp_var (restype, "retval");
1940 }
1941 else
1942 restmp = create_tmp_reg (restype, "retval");
1943 }
1944
1945 for (arg = a; arg; arg = DECL_CHAIN (arg))
1946 nargs++;
1947 auto_vec<tree> vargs (nargs);
1948 i = 0;
1949 arg = a;
1950 if (this_adjusting)
1951 {
1952 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1953 virtual_offset, indirect_offset));
1954 arg = DECL_CHAIN (a);
1955 i = 1;
1956 }
1957
1958 if (nargs)
1959 for (; i < nargs; i++, arg = DECL_CHAIN (arg))
1960 {
1961 tree tmp = arg;
1962 if (VECTOR_TYPE_P (TREE_TYPE (arg))
1963 || TREE_CODE (TREE_TYPE (arg)) == COMPLEX_TYPE)
1964 DECL_GIMPLE_REG_P (arg) = 1;
1965
1966 if (!is_gimple_val (arg))
1967 {
1968 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1969 (TREE_TYPE (arg)), "arg");
1970 gimple *stmt = gimple_build_assign (tmp, arg);
1971 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1972 }
1973 vargs.quick_push (tmp);
1974 }
1975 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1976 callees->call_stmt = call;
1977 gimple_call_set_from_thunk (call, true);
1978 if (DECL_STATIC_CHAIN (alias))
1979 {
1980 tree p = DECL_STRUCT_FUNCTION (alias)->static_chain_decl;
1981 tree type = TREE_TYPE (p);
1982 tree decl = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1983 PARM_DECL, create_tmp_var_name ("CHAIN"),
1984 type);
1985 DECL_ARTIFICIAL (decl) = 1;
1986 DECL_IGNORED_P (decl) = 1;
1987 TREE_USED (decl) = 1;
1988 DECL_CONTEXT (decl) = thunk_fndecl;
1989 DECL_ARG_TYPE (decl) = type;
1990 TREE_READONLY (decl) = 1;
1991
1992 struct function *sf = DECL_STRUCT_FUNCTION (thunk_fndecl);
1993 sf->static_chain_decl = decl;
1994
1995 gimple_call_set_chain (call, decl);
1996 }
1997
1998 /* Return slot optimization is always possible and in fact requred to
1999 return values with DECL_BY_REFERENCE. */
2000 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
2001 && (!is_gimple_reg_type (TREE_TYPE (resdecl))
2002 || DECL_BY_REFERENCE (resdecl)))
2003 gimple_call_set_return_slot_opt (call, true);
2004
2005 if (restmp)
2006 {
2007 gimple_call_set_lhs (call, restmp);
2008 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
2009 TREE_TYPE (TREE_TYPE (alias))));
2010 }
2011 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
2012 if (!alias_is_noreturn)
2013 {
2014 if (restmp && !this_adjusting
2015 && (fixed_offset || virtual_offset))
2016 {
2017 tree true_label = NULL_TREE;
2018
2019 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
2020 {
2021 gimple *stmt;
2022 edge e;
2023 /* If the return type is a pointer, we need to
2024 protect against NULL. We know there will be an
2025 adjustment, because that's why we're emitting a
2026 thunk. */
2027 then_bb = create_basic_block (NULL, bb);
2028 then_bb->count = cfg_count - cfg_count.apply_scale (1, 16);
2029 return_bb = create_basic_block (NULL, then_bb);
2030 return_bb->count = cfg_count;
2031 else_bb = create_basic_block (NULL, else_bb);
2032 else_bb->count = cfg_count.apply_scale (1, 16);
2033 add_bb_to_loop (then_bb, bb->loop_father);
2034 add_bb_to_loop (return_bb, bb->loop_father);
2035 add_bb_to_loop (else_bb, bb->loop_father);
2036 remove_edge (single_succ_edge (bb));
2037 true_label = gimple_block_label (then_bb);
2038 stmt = gimple_build_cond (NE_EXPR, restmp,
2039 build_zero_cst (TREE_TYPE (restmp)),
2040 NULL_TREE, NULL_TREE);
2041 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2042 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
2043 e->probability = profile_probability::guessed_always ()
2044 .apply_scale (1, 16);
2045 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
2046 e->probability = profile_probability::guessed_always ()
2047 .apply_scale (1, 16);
2048 make_single_succ_edge (return_bb,
2049 EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
2050 make_single_succ_edge (then_bb, return_bb, EDGE_FALLTHRU);
2051 e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
2052 e->probability = profile_probability::always ();
2053 bsi = gsi_last_bb (then_bb);
2054 }
2055
2056 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
2057 fixed_offset, virtual_offset,
2058 indirect_offset);
2059 if (true_label)
2060 {
2061 gimple *stmt;
2062 bsi = gsi_last_bb (else_bb);
2063 stmt = gimple_build_assign (restmp,
2064 build_zero_cst (TREE_TYPE (restmp)));
2065 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2066 bsi = gsi_last_bb (return_bb);
2067 }
2068 }
2069 else
2070 gimple_call_set_tail (call, true);
2071
2072 /* Build return value. */
2073 if (!DECL_BY_REFERENCE (resdecl))
2074 ret = gimple_build_return (restmp);
2075 else
2076 ret = gimple_build_return (resdecl);
2077
2078 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
2079 }
2080 else
2081 {
2082 gimple_call_set_tail (call, true);
2083 remove_edge (single_succ_edge (bb));
2084 }
2085
2086 cfun->gimple_df->in_ssa_p = true;
2087 update_max_bb_count ();
2088 profile_status_for_fn (cfun)
2089 = cfg_count.initialized_p () && cfg_count.ipa_p ()
2090 ? PROFILE_READ : PROFILE_GUESSED;
2091 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
2092 TREE_ASM_WRITTEN (thunk_fndecl) = false;
2093 delete_unreachable_blocks ();
2094 update_ssa (TODO_update_ssa);
2095 checking_verify_flow_info ();
2096 free_dominance_info (CDI_DOMINATORS);
2097
2098 /* Since we want to emit the thunk, we explicitly mark its name as
2099 referenced. */
2100 thunk.thunk_p = false;
2101 lowered = true;
2102 bitmap_obstack_release (NULL);
2103 }
2104 current_function_decl = NULL;
2105 set_cfun (NULL);
2106 return true;
2107 }
2108
2109 /* Assemble thunks and aliases associated to node. */
2110
2111 void
2112 cgraph_node::assemble_thunks_and_aliases (void)
2113 {
2114 cgraph_edge *e;
2115 ipa_ref *ref;
2116
2117 for (e = callers; e;)
2118 if (e->caller->thunk.thunk_p
2119 && !e->caller->inlined_to)
2120 {
2121 cgraph_node *thunk = e->caller;
2122
2123 e = e->next_caller;
2124 thunk->expand_thunk (true, false);
2125 thunk->assemble_thunks_and_aliases ();
2126 }
2127 else
2128 e = e->next_caller;
2129
2130 FOR_EACH_ALIAS (this, ref)
2131 {
2132 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2133 if (!alias->transparent_alias)
2134 {
2135 bool saved_written = TREE_ASM_WRITTEN (decl);
2136
2137 /* Force assemble_alias to really output the alias this time instead
2138 of buffering it in same alias pairs. */
2139 TREE_ASM_WRITTEN (decl) = 1;
2140 do_assemble_alias (alias->decl,
2141 DECL_ASSEMBLER_NAME (decl));
2142 alias->assemble_thunks_and_aliases ();
2143 TREE_ASM_WRITTEN (decl) = saved_written;
2144 }
2145 }
2146 }
2147
2148 /* Expand function specified by node. */
2149
2150 void
2151 cgraph_node::expand (void)
2152 {
2153 location_t saved_loc;
2154
2155 /* We ought to not compile any inline clones. */
2156 gcc_assert (!inlined_to);
2157
2158 /* __RTL functions are compiled as soon as they are parsed, so don't
2159 do it again. */
2160 if (native_rtl_p ())
2161 return;
2162
2163 announce_function (decl);
2164 process = 0;
2165 gcc_assert (lowered);
2166 get_untransformed_body ();
2167
2168 /* Generate RTL for the body of DECL. */
2169
2170 timevar_push (TV_REST_OF_COMPILATION);
2171
2172 gcc_assert (symtab->global_info_ready);
2173
2174 /* Initialize the default bitmap obstack. */
2175 bitmap_obstack_initialize (NULL);
2176
2177 /* Initialize the RTL code for the function. */
2178 saved_loc = input_location;
2179 input_location = DECL_SOURCE_LOCATION (decl);
2180
2181 gcc_assert (DECL_STRUCT_FUNCTION (decl));
2182 push_cfun (DECL_STRUCT_FUNCTION (decl));
2183 init_function_start (decl);
2184
2185 gimple_register_cfg_hooks ();
2186
2187 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
2188
2189 execute_all_ipa_transforms (false);
2190
2191 /* Perform all tree transforms and optimizations. */
2192
2193 /* Signal the start of passes. */
2194 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
2195
2196 execute_pass_list (cfun, g->get_passes ()->all_passes);
2197
2198 /* Signal the end of passes. */
2199 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
2200
2201 bitmap_obstack_release (&reg_obstack);
2202
2203 /* Release the default bitmap obstack. */
2204 bitmap_obstack_release (NULL);
2205
2206 /* If requested, warn about function definitions where the function will
2207 return a value (usually of some struct or union type) which itself will
2208 take up a lot of stack space. */
2209 if (!DECL_EXTERNAL (decl) && TREE_TYPE (decl))
2210 {
2211 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
2212
2213 if (ret_type && TYPE_SIZE_UNIT (ret_type)
2214 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
2215 && compare_tree_int (TYPE_SIZE_UNIT (ret_type),
2216 warn_larger_than_size) > 0)
2217 {
2218 unsigned int size_as_int
2219 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
2220
2221 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
2222 warning (OPT_Wlarger_than_,
2223 "size of return value of %q+D is %u bytes",
2224 decl, size_as_int);
2225 else
2226 warning (OPT_Wlarger_than_,
2227 "size of return value of %q+D is larger than %wu bytes",
2228 decl, warn_larger_than_size);
2229 }
2230 }
2231
2232 gimple_set_body (decl, NULL);
2233 if (DECL_STRUCT_FUNCTION (decl) == 0
2234 && !cgraph_node::get (decl)->origin)
2235 {
2236 /* Stop pointing to the local nodes about to be freed.
2237 But DECL_INITIAL must remain nonzero so we know this
2238 was an actual function definition.
2239 For a nested function, this is done in c_pop_function_context.
2240 If rest_of_compilation set this to 0, leave it 0. */
2241 if (DECL_INITIAL (decl) != 0)
2242 DECL_INITIAL (decl) = error_mark_node;
2243 }
2244
2245 input_location = saved_loc;
2246
2247 ggc_collect ();
2248 timevar_pop (TV_REST_OF_COMPILATION);
2249
2250 /* Make sure that BE didn't give up on compiling. */
2251 gcc_assert (TREE_ASM_WRITTEN (decl));
2252 if (cfun)
2253 pop_cfun ();
2254
2255 /* It would make a lot more sense to output thunks before function body to get more
2256 forward and lest backwarding jumps. This however would need solving problem
2257 with comdats. See PR48668. Also aliases must come after function itself to
2258 make one pass assemblers, like one on AIX, happy. See PR 50689.
2259 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
2260 groups. */
2261 assemble_thunks_and_aliases ();
2262 release_body ();
2263 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
2264 points to the dead function body. */
2265 remove_callees ();
2266 remove_all_references ();
2267 }
2268
2269 /* Node comparer that is responsible for the order that corresponds
2270 to time when a function was launched for the first time. */
2271
2272 static int
2273 node_cmp (const void *pa, const void *pb)
2274 {
2275 const cgraph_node *a = *(const cgraph_node * const *) pa;
2276 const cgraph_node *b = *(const cgraph_node * const *) pb;
2277
2278 /* Functions with time profile must be before these without profile. */
2279 if (!a->tp_first_run || !b->tp_first_run)
2280 return a->tp_first_run - b->tp_first_run;
2281
2282 return a->tp_first_run != b->tp_first_run
2283 ? b->tp_first_run - a->tp_first_run
2284 : b->order - a->order;
2285 }
2286
2287 /* Expand all functions that must be output.
2288
2289 Attempt to topologically sort the nodes so function is output when
2290 all called functions are already assembled to allow data to be
2291 propagated across the callgraph. Use a stack to get smaller distance
2292 between a function and its callees (later we may choose to use a more
2293 sophisticated algorithm for function reordering; we will likely want
2294 to use subsections to make the output functions appear in top-down
2295 order). */
2296
2297 static void
2298 expand_all_functions (void)
2299 {
2300 cgraph_node *node;
2301 cgraph_node **order = XCNEWVEC (cgraph_node *,
2302 symtab->cgraph_count);
2303 unsigned int expanded_func_count = 0, profiled_func_count = 0;
2304 int order_pos, new_order_pos = 0;
2305 int i;
2306
2307 order_pos = ipa_reverse_postorder (order);
2308 gcc_assert (order_pos == symtab->cgraph_count);
2309
2310 /* Garbage collector may remove inline clones we eliminate during
2311 optimization. So we must be sure to not reference them. */
2312 for (i = 0; i < order_pos; i++)
2313 if (order[i]->process)
2314 order[new_order_pos++] = order[i];
2315
2316 if (flag_profile_reorder_functions)
2317 qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
2318
2319 for (i = new_order_pos - 1; i >= 0; i--)
2320 {
2321 node = order[i];
2322
2323 if (node->process)
2324 {
2325 expanded_func_count++;
2326 if(node->tp_first_run)
2327 profiled_func_count++;
2328
2329 if (symtab->dump_file)
2330 fprintf (symtab->dump_file,
2331 "Time profile order in expand_all_functions:%s:%d\n",
2332 node->asm_name (), node->tp_first_run);
2333 node->process = 0;
2334 node->expand ();
2335 }
2336 }
2337
2338 if (dump_file)
2339 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
2340 main_input_filename, profiled_func_count, expanded_func_count);
2341
2342 if (symtab->dump_file && flag_profile_reorder_functions)
2343 fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
2344 profiled_func_count, expanded_func_count);
2345
2346 symtab->process_new_functions ();
2347 free_gimplify_stack ();
2348
2349 free (order);
2350 }
2351
2352 /* This is used to sort the node types by the cgraph order number. */
2353
2354 enum cgraph_order_sort_kind
2355 {
2356 ORDER_UNDEFINED = 0,
2357 ORDER_FUNCTION,
2358 ORDER_VAR,
2359 ORDER_VAR_UNDEF,
2360 ORDER_ASM
2361 };
2362
2363 struct cgraph_order_sort
2364 {
2365 enum cgraph_order_sort_kind kind;
2366 union
2367 {
2368 cgraph_node *f;
2369 varpool_node *v;
2370 asm_node *a;
2371 } u;
2372 };
2373
2374 /* Output all functions, variables, and asm statements in the order
2375 according to their order fields, which is the order in which they
2376 appeared in the file. This implements -fno-toplevel-reorder. In
2377 this mode we may output functions and variables which don't really
2378 need to be output. */
2379
2380 static void
2381 output_in_order (void)
2382 {
2383 int max;
2384 cgraph_order_sort *nodes;
2385 int i;
2386 cgraph_node *pf;
2387 varpool_node *pv;
2388 asm_node *pa;
2389 max = symtab->order;
2390 nodes = XCNEWVEC (cgraph_order_sort, max);
2391
2392 FOR_EACH_DEFINED_FUNCTION (pf)
2393 {
2394 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
2395 {
2396 if (!pf->no_reorder)
2397 continue;
2398 i = pf->order;
2399 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2400 nodes[i].kind = ORDER_FUNCTION;
2401 nodes[i].u.f = pf;
2402 }
2403 }
2404
2405 /* There is a similar loop in symbol_table::output_variables.
2406 Please keep them in sync. */
2407 FOR_EACH_VARIABLE (pv)
2408 {
2409 if (!pv->no_reorder)
2410 continue;
2411 if (DECL_HARD_REGISTER (pv->decl)
2412 || DECL_HAS_VALUE_EXPR_P (pv->decl))
2413 continue;
2414 i = pv->order;
2415 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2416 nodes[i].kind = pv->definition ? ORDER_VAR : ORDER_VAR_UNDEF;
2417 nodes[i].u.v = pv;
2418 }
2419
2420 for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
2421 {
2422 i = pa->order;
2423 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2424 nodes[i].kind = ORDER_ASM;
2425 nodes[i].u.a = pa;
2426 }
2427
2428 /* In toplevel reorder mode we output all statics; mark them as needed. */
2429
2430 for (i = 0; i < max; ++i)
2431 if (nodes[i].kind == ORDER_VAR)
2432 nodes[i].u.v->finalize_named_section_flags ();
2433
2434 for (i = 0; i < max; ++i)
2435 {
2436 switch (nodes[i].kind)
2437 {
2438 case ORDER_FUNCTION:
2439 nodes[i].u.f->process = 0;
2440 nodes[i].u.f->expand ();
2441 break;
2442
2443 case ORDER_VAR:
2444 nodes[i].u.v->assemble_decl ();
2445 break;
2446
2447 case ORDER_VAR_UNDEF:
2448 assemble_undefined_decl (nodes[i].u.v->decl);
2449 break;
2450
2451 case ORDER_ASM:
2452 assemble_asm (nodes[i].u.a->asm_str);
2453 break;
2454
2455 case ORDER_UNDEFINED:
2456 break;
2457
2458 default:
2459 gcc_unreachable ();
2460 }
2461 }
2462
2463 symtab->clear_asm_symbols ();
2464
2465 free (nodes);
2466 }
2467
2468 static void
2469 ipa_passes (void)
2470 {
2471 gcc::pass_manager *passes = g->get_passes ();
2472
2473 set_cfun (NULL);
2474 current_function_decl = NULL;
2475 gimple_register_cfg_hooks ();
2476 bitmap_obstack_initialize (NULL);
2477
2478 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2479
2480 if (!in_lto_p)
2481 {
2482 execute_ipa_pass_list (passes->all_small_ipa_passes);
2483 if (seen_error ())
2484 return;
2485 }
2486
2487 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2488 devirtualization and other changes where removal iterate. */
2489 symtab->remove_unreachable_nodes (symtab->dump_file);
2490
2491 /* If pass_all_early_optimizations was not scheduled, the state of
2492 the cgraph will not be properly updated. Update it now. */
2493 if (symtab->state < IPA_SSA)
2494 symtab->state = IPA_SSA;
2495
2496 if (!in_lto_p)
2497 {
2498 /* Generate coverage variables and constructors. */
2499 coverage_finish ();
2500
2501 /* Process new functions added. */
2502 set_cfun (NULL);
2503 current_function_decl = NULL;
2504 symtab->process_new_functions ();
2505
2506 execute_ipa_summary_passes
2507 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2508 }
2509
2510 /* Some targets need to handle LTO assembler output specially. */
2511 if (flag_generate_lto || flag_generate_offload)
2512 targetm.asm_out.lto_start ();
2513
2514 if (!in_lto_p
2515 || flag_incremental_link == INCREMENTAL_LINK_LTO)
2516 {
2517 if (!quiet_flag)
2518 fprintf (stderr, "Streaming LTO\n");
2519 if (g->have_offload)
2520 {
2521 section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2522 lto_stream_offload_p = true;
2523 ipa_write_summaries ();
2524 lto_stream_offload_p = false;
2525 }
2526 if (flag_lto)
2527 {
2528 section_name_prefix = LTO_SECTION_NAME_PREFIX;
2529 lto_stream_offload_p = false;
2530 ipa_write_summaries ();
2531 }
2532 }
2533
2534 if (flag_generate_lto || flag_generate_offload)
2535 targetm.asm_out.lto_end ();
2536
2537 if (!flag_ltrans
2538 && ((in_lto_p && flag_incremental_link != INCREMENTAL_LINK_LTO)
2539 || !flag_lto || flag_fat_lto_objects))
2540 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2541 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2542
2543 bitmap_obstack_release (NULL);
2544 }
2545
2546
2547 /* Return string alias is alias of. */
2548
2549 static tree
2550 get_alias_symbol (tree decl)
2551 {
2552 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2553 return get_identifier (TREE_STRING_POINTER
2554 (TREE_VALUE (TREE_VALUE (alias))));
2555 }
2556
2557
2558 /* Weakrefs may be associated to external decls and thus not output
2559 at expansion time. Emit all necessary aliases. */
2560
2561 void
2562 symbol_table::output_weakrefs (void)
2563 {
2564 symtab_node *node;
2565 FOR_EACH_SYMBOL (node)
2566 if (node->alias
2567 && !TREE_ASM_WRITTEN (node->decl)
2568 && node->weakref)
2569 {
2570 tree target;
2571
2572 /* Weakrefs are special by not requiring target definition in current
2573 compilation unit. It is thus bit hard to work out what we want to
2574 alias.
2575 When alias target is defined, we need to fetch it from symtab reference,
2576 otherwise it is pointed to by alias_target. */
2577 if (node->alias_target)
2578 target = (DECL_P (node->alias_target)
2579 ? DECL_ASSEMBLER_NAME (node->alias_target)
2580 : node->alias_target);
2581 else if (node->analyzed)
2582 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2583 else
2584 {
2585 gcc_unreachable ();
2586 target = get_alias_symbol (node->decl);
2587 }
2588 do_assemble_alias (node->decl, target);
2589 }
2590 }
2591
2592 /* Perform simple optimizations based on callgraph. */
2593
2594 void
2595 symbol_table::compile (void)
2596 {
2597 if (seen_error ())
2598 return;
2599
2600 symtab_node::checking_verify_symtab_nodes ();
2601
2602 timevar_push (TV_CGRAPHOPT);
2603 if (pre_ipa_mem_report)
2604 dump_memory_report ("Memory consumption before IPA");
2605 if (!quiet_flag)
2606 fprintf (stderr, "Performing interprocedural optimizations\n");
2607 state = IPA;
2608
2609 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2610 if (flag_generate_lto || flag_generate_offload)
2611 lto_streamer_hooks_init ();
2612
2613 /* Don't run the IPA passes if there was any error or sorry messages. */
2614 if (!seen_error ())
2615 {
2616 timevar_start (TV_CGRAPH_IPA_PASSES);
2617 ipa_passes ();
2618 timevar_stop (TV_CGRAPH_IPA_PASSES);
2619 }
2620 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2621 if (seen_error ()
2622 || ((!in_lto_p || flag_incremental_link == INCREMENTAL_LINK_LTO)
2623 && flag_lto && !flag_fat_lto_objects))
2624 {
2625 timevar_pop (TV_CGRAPHOPT);
2626 return;
2627 }
2628
2629 global_info_ready = true;
2630 if (dump_file)
2631 {
2632 fprintf (dump_file, "Optimized ");
2633 symtab->dump (dump_file);
2634 }
2635 if (post_ipa_mem_report)
2636 dump_memory_report ("Memory consumption after IPA");
2637 timevar_pop (TV_CGRAPHOPT);
2638
2639 /* Output everything. */
2640 switch_to_section (text_section);
2641 (*debug_hooks->assembly_start) ();
2642 if (!quiet_flag)
2643 fprintf (stderr, "Assembling functions:\n");
2644 symtab_node::checking_verify_symtab_nodes ();
2645
2646 bitmap_obstack_initialize (NULL);
2647 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2648 bitmap_obstack_release (NULL);
2649 mark_functions_to_output ();
2650
2651 /* When weakref support is missing, we automatically translate all
2652 references to NODE to references to its ultimate alias target.
2653 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2654 TREE_CHAIN.
2655
2656 Set up this mapping before we output any assembler but once we are sure
2657 that all symbol renaming is done.
2658
2659 FIXME: All this uglyness can go away if we just do renaming at gimple
2660 level by physically rewritting the IL. At the moment we can only redirect
2661 calls, so we need infrastructure for renaming references as well. */
2662 #ifndef ASM_OUTPUT_WEAKREF
2663 symtab_node *node;
2664
2665 FOR_EACH_SYMBOL (node)
2666 if (node->alias
2667 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2668 {
2669 IDENTIFIER_TRANSPARENT_ALIAS
2670 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2671 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2672 = (node->alias_target ? node->alias_target
2673 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2674 }
2675 #endif
2676
2677 state = EXPANSION;
2678
2679 /* Output first asm statements and anything ordered. The process
2680 flag is cleared for these nodes, so we skip them later. */
2681 output_in_order ();
2682
2683 timevar_start (TV_CGRAPH_FUNC_EXPANSION);
2684 expand_all_functions ();
2685 timevar_stop (TV_CGRAPH_FUNC_EXPANSION);
2686
2687 output_variables ();
2688
2689 process_new_functions ();
2690 state = FINISHED;
2691 output_weakrefs ();
2692
2693 if (dump_file)
2694 {
2695 fprintf (dump_file, "\nFinal ");
2696 symtab->dump (dump_file);
2697 }
2698 if (!flag_checking)
2699 return;
2700 symtab_node::verify_symtab_nodes ();
2701 /* Double check that all inline clones are gone and that all
2702 function bodies have been released from memory. */
2703 if (!seen_error ())
2704 {
2705 cgraph_node *node;
2706 bool error_found = false;
2707
2708 FOR_EACH_DEFINED_FUNCTION (node)
2709 if (node->inlined_to
2710 || gimple_has_body_p (node->decl))
2711 {
2712 error_found = true;
2713 node->debug ();
2714 }
2715 if (error_found)
2716 internal_error ("nodes with unreleased memory found");
2717 }
2718 }
2719
2720 /* Earlydebug dump file, flags, and number. */
2721
2722 static int debuginfo_early_dump_nr;
2723 static FILE *debuginfo_early_dump_file;
2724 static dump_flags_t debuginfo_early_dump_flags;
2725
2726 /* Debug dump file, flags, and number. */
2727
2728 static int debuginfo_dump_nr;
2729 static FILE *debuginfo_dump_file;
2730 static dump_flags_t debuginfo_dump_flags;
2731
2732 /* Register the debug and earlydebug dump files. */
2733
2734 void
2735 debuginfo_early_init (void)
2736 {
2737 gcc::dump_manager *dumps = g->get_dumps ();
2738 debuginfo_early_dump_nr = dumps->dump_register (".earlydebug", "earlydebug",
2739 "earlydebug", DK_tree,
2740 OPTGROUP_NONE,
2741 false);
2742 debuginfo_dump_nr = dumps->dump_register (".debug", "debug",
2743 "debug", DK_tree,
2744 OPTGROUP_NONE,
2745 false);
2746 }
2747
2748 /* Initialize the debug and earlydebug dump files. */
2749
2750 void
2751 debuginfo_init (void)
2752 {
2753 gcc::dump_manager *dumps = g->get_dumps ();
2754 debuginfo_dump_file = dump_begin (debuginfo_dump_nr, NULL);
2755 debuginfo_dump_flags = dumps->get_dump_file_info (debuginfo_dump_nr)->pflags;
2756 debuginfo_early_dump_file = dump_begin (debuginfo_early_dump_nr, NULL);
2757 debuginfo_early_dump_flags
2758 = dumps->get_dump_file_info (debuginfo_early_dump_nr)->pflags;
2759 }
2760
2761 /* Finalize the debug and earlydebug dump files. */
2762
2763 void
2764 debuginfo_fini (void)
2765 {
2766 if (debuginfo_dump_file)
2767 dump_end (debuginfo_dump_nr, debuginfo_dump_file);
2768 if (debuginfo_early_dump_file)
2769 dump_end (debuginfo_early_dump_nr, debuginfo_early_dump_file);
2770 }
2771
2772 /* Set dump_file to the debug dump file. */
2773
2774 void
2775 debuginfo_start (void)
2776 {
2777 set_dump_file (debuginfo_dump_file);
2778 }
2779
2780 /* Undo setting dump_file to the debug dump file. */
2781
2782 void
2783 debuginfo_stop (void)
2784 {
2785 set_dump_file (NULL);
2786 }
2787
2788 /* Set dump_file to the earlydebug dump file. */
2789
2790 void
2791 debuginfo_early_start (void)
2792 {
2793 set_dump_file (debuginfo_early_dump_file);
2794 }
2795
2796 /* Undo setting dump_file to the earlydebug dump file. */
2797
2798 void
2799 debuginfo_early_stop (void)
2800 {
2801 set_dump_file (NULL);
2802 }
2803
2804 /* Analyze the whole compilation unit once it is parsed completely. */
2805
2806 void
2807 symbol_table::finalize_compilation_unit (void)
2808 {
2809 timevar_push (TV_CGRAPH);
2810
2811 /* If we're here there's no current function anymore. Some frontends
2812 are lazy in clearing these. */
2813 current_function_decl = NULL;
2814 set_cfun (NULL);
2815
2816 /* Do not skip analyzing the functions if there were errors, we
2817 miss diagnostics for following functions otherwise. */
2818
2819 /* Emit size functions we didn't inline. */
2820 finalize_size_functions ();
2821
2822 /* Mark alias targets necessary and emit diagnostics. */
2823 handle_alias_pairs ();
2824
2825 if (!quiet_flag)
2826 {
2827 fprintf (stderr, "\nAnalyzing compilation unit\n");
2828 fflush (stderr);
2829 }
2830
2831 if (flag_dump_passes)
2832 dump_passes ();
2833
2834 /* Gimplify and lower all functions, compute reachability and
2835 remove unreachable nodes. */
2836 analyze_functions (/*first_time=*/true);
2837
2838 /* Mark alias targets necessary and emit diagnostics. */
2839 handle_alias_pairs ();
2840
2841 /* Gimplify and lower thunks. */
2842 analyze_functions (/*first_time=*/false);
2843
2844 /* Offloading requires LTO infrastructure. */
2845 if (!in_lto_p && g->have_offload)
2846 flag_generate_offload = 1;
2847
2848 if (!seen_error ())
2849 {
2850 /* Emit early debug for reachable functions, and by consequence,
2851 locally scoped symbols. */
2852 struct cgraph_node *cnode;
2853 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (cnode)
2854 (*debug_hooks->early_global_decl) (cnode->decl);
2855
2856 /* Clean up anything that needs cleaning up after initial debug
2857 generation. */
2858 debuginfo_early_start ();
2859 (*debug_hooks->early_finish) (main_input_filename);
2860 debuginfo_early_stop ();
2861 }
2862
2863 /* Finally drive the pass manager. */
2864 compile ();
2865
2866 timevar_pop (TV_CGRAPH);
2867 }
2868
2869 /* Reset all state within cgraphunit.c so that we can rerun the compiler
2870 within the same process. For use by toplev::finalize. */
2871
2872 void
2873 cgraphunit_c_finalize (void)
2874 {
2875 gcc_assert (cgraph_new_nodes.length () == 0);
2876 cgraph_new_nodes.truncate (0);
2877
2878 vtable_entry_type = NULL;
2879 queued_nodes = &symtab_terminator;
2880
2881 first_analyzed = NULL;
2882 first_analyzed_var = NULL;
2883 }
2884
2885 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2886 kind of wrapper method. */
2887
2888 void
2889 cgraph_node::create_wrapper (cgraph_node *target)
2890 {
2891 /* Preserve DECL_RESULT so we get right by reference flag. */
2892 tree decl_result = DECL_RESULT (decl);
2893
2894 /* Remove the function's body but keep arguments to be reused
2895 for thunk. */
2896 release_body (true);
2897 reset ();
2898
2899 DECL_UNINLINABLE (decl) = false;
2900 DECL_RESULT (decl) = decl_result;
2901 DECL_INITIAL (decl) = NULL;
2902 allocate_struct_function (decl, false);
2903 set_cfun (NULL);
2904
2905 /* Turn alias into thunk and expand it into GIMPLE representation. */
2906 definition = true;
2907
2908 memset (&thunk, 0, sizeof (cgraph_thunk_info));
2909 thunk.thunk_p = true;
2910 create_edge (target, NULL, count);
2911 callees->can_throw_external = !TREE_NOTHROW (target->decl);
2912
2913 tree arguments = DECL_ARGUMENTS (decl);
2914
2915 while (arguments)
2916 {
2917 TREE_ADDRESSABLE (arguments) = false;
2918 arguments = TREE_CHAIN (arguments);
2919 }
2920
2921 expand_thunk (false, true);
2922
2923 /* Inline summary set-up. */
2924 analyze ();
2925 inline_analyze_function (this);
2926 }
2927
2928 #include "gt-cgraphunit.h"