]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraphunit.c
Error about alias attribute with body definition (PR c/87483).
[thirdparty/gcc.git] / gcc / cgraphunit.c
1 /* Driver of optimization process
2 Copyright (C) 2003-2018 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158 */
159
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "backend.h"
164 #include "target.h"
165 #include "rtl.h"
166 #include "tree.h"
167 #include "gimple.h"
168 #include "cfghooks.h"
169 #include "regset.h" /* FIXME: For reg_obstack. */
170 #include "alloc-pool.h"
171 #include "tree-pass.h"
172 #include "stringpool.h"
173 #include "gimple-ssa.h"
174 #include "cgraph.h"
175 #include "coverage.h"
176 #include "lto-streamer.h"
177 #include "fold-const.h"
178 #include "varasm.h"
179 #include "stor-layout.h"
180 #include "output.h"
181 #include "cfgcleanup.h"
182 #include "gimple-fold.h"
183 #include "gimplify.h"
184 #include "gimple-iterator.h"
185 #include "gimplify-me.h"
186 #include "tree-cfg.h"
187 #include "tree-into-ssa.h"
188 #include "tree-ssa.h"
189 #include "langhooks.h"
190 #include "toplev.h"
191 #include "debug.h"
192 #include "symbol-summary.h"
193 #include "tree-vrp.h"
194 #include "ipa-prop.h"
195 #include "gimple-pretty-print.h"
196 #include "plugin.h"
197 #include "ipa-fnsummary.h"
198 #include "ipa-utils.h"
199 #include "except.h"
200 #include "cfgloop.h"
201 #include "context.h"
202 #include "pass_manager.h"
203 #include "tree-nested.h"
204 #include "dbgcnt.h"
205 #include "lto-section-names.h"
206 #include "stringpool.h"
207 #include "attribs.h"
208
209 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
210 secondary queue used during optimization to accommodate passes that
211 may generate new functions that need to be optimized and expanded. */
212 vec<cgraph_node *> cgraph_new_nodes;
213
214 static void expand_all_functions (void);
215 static void mark_functions_to_output (void);
216 static void handle_alias_pairs (void);
217
218 /* Used for vtable lookup in thunk adjusting. */
219 static GTY (()) tree vtable_entry_type;
220
221 /* Return true if this symbol is a function from the C frontend specified
222 directly in RTL form (with "__RTL"). */
223
224 bool
225 symtab_node::native_rtl_p () const
226 {
227 if (TREE_CODE (decl) != FUNCTION_DECL)
228 return false;
229 if (!DECL_STRUCT_FUNCTION (decl))
230 return false;
231 return DECL_STRUCT_FUNCTION (decl)->curr_properties & PROP_rtl;
232 }
233
234 /* Determine if symbol declaration is needed. That is, visible to something
235 either outside this translation unit, something magic in the system
236 configury */
237 bool
238 symtab_node::needed_p (void)
239 {
240 /* Double check that no one output the function into assembly file
241 early. */
242 if (!native_rtl_p ())
243 gcc_checking_assert
244 (!DECL_ASSEMBLER_NAME_SET_P (decl)
245 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
246
247 if (!definition)
248 return false;
249
250 if (DECL_EXTERNAL (decl))
251 return false;
252
253 /* If the user told us it is used, then it must be so. */
254 if (force_output)
255 return true;
256
257 /* ABI forced symbols are needed when they are external. */
258 if (forced_by_abi && TREE_PUBLIC (decl))
259 return true;
260
261 /* Keep constructors, destructors and virtual functions. */
262 if (TREE_CODE (decl) == FUNCTION_DECL
263 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
264 return true;
265
266 /* Externally visible variables must be output. The exception is
267 COMDAT variables that must be output only when they are needed. */
268 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
269 return true;
270
271 return false;
272 }
273
274 /* Head and terminator of the queue of nodes to be processed while building
275 callgraph. */
276
277 static symtab_node symtab_terminator;
278 static symtab_node *queued_nodes = &symtab_terminator;
279
280 /* Add NODE to queue starting at QUEUED_NODES.
281 The queue is linked via AUX pointers and terminated by pointer to 1. */
282
283 static void
284 enqueue_node (symtab_node *node)
285 {
286 if (node->aux)
287 return;
288 gcc_checking_assert (queued_nodes);
289 node->aux = queued_nodes;
290 queued_nodes = node;
291 }
292
293 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
294 functions into callgraph in a way so they look like ordinary reachable
295 functions inserted into callgraph already at construction time. */
296
297 void
298 symbol_table::process_new_functions (void)
299 {
300 tree fndecl;
301
302 if (!cgraph_new_nodes.exists ())
303 return;
304
305 handle_alias_pairs ();
306 /* Note that this queue may grow as its being processed, as the new
307 functions may generate new ones. */
308 for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
309 {
310 cgraph_node *node = cgraph_new_nodes[i];
311 fndecl = node->decl;
312 switch (state)
313 {
314 case CONSTRUCTION:
315 /* At construction time we just need to finalize function and move
316 it into reachable functions list. */
317
318 cgraph_node::finalize_function (fndecl, false);
319 call_cgraph_insertion_hooks (node);
320 enqueue_node (node);
321 break;
322
323 case IPA:
324 case IPA_SSA:
325 case IPA_SSA_AFTER_INLINING:
326 /* When IPA optimization already started, do all essential
327 transformations that has been already performed on the whole
328 cgraph but not on this function. */
329
330 gimple_register_cfg_hooks ();
331 if (!node->analyzed)
332 node->analyze ();
333 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
334 if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
335 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
336 {
337 bool summaried_computed = ipa_fn_summaries != NULL;
338 g->get_passes ()->execute_early_local_passes ();
339 /* Early passes compure inline parameters to do inlining
340 and splitting. This is redundant for functions added late.
341 Just throw away whatever it did. */
342 if (!summaried_computed)
343 ipa_free_fn_summary ();
344 }
345 else if (ipa_fn_summaries != NULL)
346 compute_fn_summary (node, true);
347 free_dominance_info (CDI_POST_DOMINATORS);
348 free_dominance_info (CDI_DOMINATORS);
349 pop_cfun ();
350 call_cgraph_insertion_hooks (node);
351 break;
352
353 case EXPANSION:
354 /* Functions created during expansion shall be compiled
355 directly. */
356 node->process = 0;
357 call_cgraph_insertion_hooks (node);
358 node->expand ();
359 break;
360
361 default:
362 gcc_unreachable ();
363 break;
364 }
365 }
366
367 cgraph_new_nodes.release ();
368 }
369
370 /* As an GCC extension we allow redefinition of the function. The
371 semantics when both copies of bodies differ is not well defined.
372 We replace the old body with new body so in unit at a time mode
373 we always use new body, while in normal mode we may end up with
374 old body inlined into some functions and new body expanded and
375 inlined in others.
376
377 ??? It may make more sense to use one body for inlining and other
378 body for expanding the function but this is difficult to do. */
379
380 void
381 cgraph_node::reset (void)
382 {
383 /* If process is set, then we have already begun whole-unit analysis.
384 This is *not* testing for whether we've already emitted the function.
385 That case can be sort-of legitimately seen with real function redefinition
386 errors. I would argue that the front end should never present us with
387 such a case, but don't enforce that for now. */
388 gcc_assert (!process);
389
390 /* Reset our data structures so we can analyze the function again. */
391 memset (&local, 0, sizeof (local));
392 memset (&global, 0, sizeof (global));
393 memset (&rtl, 0, sizeof (rtl));
394 analyzed = false;
395 definition = false;
396 alias = false;
397 transparent_alias = false;
398 weakref = false;
399 cpp_implicit_alias = false;
400
401 remove_callees ();
402 remove_all_references ();
403 }
404
405 /* Return true when there are references to the node. INCLUDE_SELF is
406 true if a self reference counts as a reference. */
407
408 bool
409 symtab_node::referred_to_p (bool include_self)
410 {
411 ipa_ref *ref = NULL;
412
413 /* See if there are any references at all. */
414 if (iterate_referring (0, ref))
415 return true;
416 /* For functions check also calls. */
417 cgraph_node *cn = dyn_cast <cgraph_node *> (this);
418 if (cn && cn->callers)
419 {
420 if (include_self)
421 return true;
422 for (cgraph_edge *e = cn->callers; e; e = e->next_caller)
423 if (e->caller != this)
424 return true;
425 }
426 return false;
427 }
428
429 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
430 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
431 the garbage collector run at the moment. We would need to either create
432 a new GC context, or just not compile right now. */
433
434 void
435 cgraph_node::finalize_function (tree decl, bool no_collect)
436 {
437 cgraph_node *node = cgraph_node::get_create (decl);
438
439 if (node->definition)
440 {
441 /* Nested functions should only be defined once. */
442 gcc_assert (!DECL_CONTEXT (decl)
443 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
444 node->reset ();
445 node->local.redefined_extern_inline = true;
446 }
447
448 /* Set definition first before calling notice_global_symbol so that
449 it is available to notice_global_symbol. */
450 node->definition = true;
451 notice_global_symbol (decl);
452 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
453 if (!flag_toplevel_reorder)
454 node->no_reorder = true;
455
456 /* With -fkeep-inline-functions we are keeping all inline functions except
457 for extern inline ones. */
458 if (flag_keep_inline_functions
459 && DECL_DECLARED_INLINE_P (decl)
460 && !DECL_EXTERNAL (decl)
461 && !DECL_DISREGARD_INLINE_LIMITS (decl))
462 node->force_output = 1;
463
464 /* __RTL functions were already output as soon as they were parsed (due
465 to the large amount of global state in the backend).
466 Mark such functions as "force_output" to reflect the fact that they
467 will be in the asm file when considering the symbols they reference.
468 The attempt to output them later on will bail out immediately. */
469 if (node->native_rtl_p ())
470 node->force_output = 1;
471
472 /* When not optimizing, also output the static functions. (see
473 PR24561), but don't do so for always_inline functions, functions
474 declared inline and nested functions. These were optimized out
475 in the original implementation and it is unclear whether we want
476 to change the behavior here. */
477 if (((!opt_for_fn (decl, optimize) || flag_keep_static_functions
478 || node->no_reorder)
479 && !node->cpp_implicit_alias
480 && !DECL_DISREGARD_INLINE_LIMITS (decl)
481 && !DECL_DECLARED_INLINE_P (decl)
482 && !(DECL_CONTEXT (decl)
483 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
484 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
485 node->force_output = 1;
486
487 /* If we've not yet emitted decl, tell the debug info about it. */
488 if (!TREE_ASM_WRITTEN (decl))
489 (*debug_hooks->deferred_inline_function) (decl);
490
491 if (!no_collect)
492 ggc_collect ();
493
494 if (symtab->state == CONSTRUCTION
495 && (node->needed_p () || node->referred_to_p ()))
496 enqueue_node (node);
497 }
498
499 /* Add the function FNDECL to the call graph.
500 Unlike finalize_function, this function is intended to be used
501 by middle end and allows insertion of new function at arbitrary point
502 of compilation. The function can be either in high, low or SSA form
503 GIMPLE.
504
505 The function is assumed to be reachable and have address taken (so no
506 API breaking optimizations are performed on it).
507
508 Main work done by this function is to enqueue the function for later
509 processing to avoid need the passes to be re-entrant. */
510
511 void
512 cgraph_node::add_new_function (tree fndecl, bool lowered)
513 {
514 gcc::pass_manager *passes = g->get_passes ();
515 cgraph_node *node;
516
517 if (dump_file)
518 {
519 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
520 const char *function_type = ((gimple_has_body_p (fndecl))
521 ? (lowered
522 ? (gimple_in_ssa_p (fn)
523 ? "ssa gimple"
524 : "low gimple")
525 : "high gimple")
526 : "to-be-gimplified");
527 fprintf (dump_file,
528 "Added new %s function %s to callgraph\n",
529 function_type,
530 fndecl_name (fndecl));
531 }
532
533 switch (symtab->state)
534 {
535 case PARSING:
536 cgraph_node::finalize_function (fndecl, false);
537 break;
538 case CONSTRUCTION:
539 /* Just enqueue function to be processed at nearest occurrence. */
540 node = cgraph_node::get_create (fndecl);
541 if (lowered)
542 node->lowered = true;
543 cgraph_new_nodes.safe_push (node);
544 break;
545
546 case IPA:
547 case IPA_SSA:
548 case IPA_SSA_AFTER_INLINING:
549 case EXPANSION:
550 /* Bring the function into finalized state and enqueue for later
551 analyzing and compilation. */
552 node = cgraph_node::get_create (fndecl);
553 node->local.local = false;
554 node->definition = true;
555 node->force_output = true;
556 if (TREE_PUBLIC (fndecl))
557 node->externally_visible = true;
558 if (!lowered && symtab->state == EXPANSION)
559 {
560 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
561 gimple_register_cfg_hooks ();
562 bitmap_obstack_initialize (NULL);
563 execute_pass_list (cfun, passes->all_lowering_passes);
564 passes->execute_early_local_passes ();
565 bitmap_obstack_release (NULL);
566 pop_cfun ();
567
568 lowered = true;
569 }
570 if (lowered)
571 node->lowered = true;
572 cgraph_new_nodes.safe_push (node);
573 break;
574
575 case FINISHED:
576 /* At the very end of compilation we have to do all the work up
577 to expansion. */
578 node = cgraph_node::create (fndecl);
579 if (lowered)
580 node->lowered = true;
581 node->definition = true;
582 node->analyze ();
583 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
584 gimple_register_cfg_hooks ();
585 bitmap_obstack_initialize (NULL);
586 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
587 g->get_passes ()->execute_early_local_passes ();
588 bitmap_obstack_release (NULL);
589 pop_cfun ();
590 node->expand ();
591 break;
592
593 default:
594 gcc_unreachable ();
595 }
596
597 /* Set a personality if required and we already passed EH lowering. */
598 if (lowered
599 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
600 == eh_personality_lang))
601 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
602 }
603
604 /* Analyze the function scheduled to be output. */
605 void
606 cgraph_node::analyze (void)
607 {
608 if (native_rtl_p ())
609 {
610 analyzed = true;
611 return;
612 }
613
614 tree decl = this->decl;
615 location_t saved_loc = input_location;
616 input_location = DECL_SOURCE_LOCATION (decl);
617
618 if (thunk.thunk_p)
619 {
620 cgraph_node *t = cgraph_node::get (thunk.alias);
621
622 create_edge (t, NULL, t->count);
623 callees->can_throw_external = !TREE_NOTHROW (t->decl);
624 /* Target code in expand_thunk may need the thunk's target
625 to be analyzed, so recurse here. */
626 if (!t->analyzed && t->definition)
627 t->analyze ();
628 if (t->alias)
629 {
630 t = t->get_alias_target ();
631 if (!t->analyzed && t->definition)
632 t->analyze ();
633 }
634 bool ret = expand_thunk (false, false);
635 thunk.alias = NULL;
636 if (!ret)
637 return;
638 }
639 if (alias)
640 resolve_alias (cgraph_node::get (alias_target), transparent_alias);
641 else if (dispatcher_function)
642 {
643 /* Generate the dispatcher body of multi-versioned functions. */
644 cgraph_function_version_info *dispatcher_version_info
645 = function_version ();
646 if (dispatcher_version_info != NULL
647 && (dispatcher_version_info->dispatcher_resolver
648 == NULL_TREE))
649 {
650 tree resolver = NULL_TREE;
651 gcc_assert (targetm.generate_version_dispatcher_body);
652 resolver = targetm.generate_version_dispatcher_body (this);
653 gcc_assert (resolver != NULL_TREE);
654 }
655 }
656 else
657 {
658 push_cfun (DECL_STRUCT_FUNCTION (decl));
659
660 assign_assembler_name_if_needed (decl);
661
662 /* Make sure to gimplify bodies only once. During analyzing a
663 function we lower it, which will require gimplified nested
664 functions, so we can end up here with an already gimplified
665 body. */
666 if (!gimple_has_body_p (decl))
667 gimplify_function_tree (decl);
668
669 /* Lower the function. */
670 if (!lowered)
671 {
672 if (nested)
673 lower_nested_functions (decl);
674 gcc_assert (!nested);
675
676 gimple_register_cfg_hooks ();
677 bitmap_obstack_initialize (NULL);
678 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
679 free_dominance_info (CDI_POST_DOMINATORS);
680 free_dominance_info (CDI_DOMINATORS);
681 compact_blocks ();
682 bitmap_obstack_release (NULL);
683 lowered = true;
684 }
685
686 pop_cfun ();
687 }
688 analyzed = true;
689
690 input_location = saved_loc;
691 }
692
693 /* C++ frontend produce same body aliases all over the place, even before PCH
694 gets streamed out. It relies on us linking the aliases with their function
695 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
696 first produce aliases without links, but once C++ FE is sure he won't sream
697 PCH we build the links via this function. */
698
699 void
700 symbol_table::process_same_body_aliases (void)
701 {
702 symtab_node *node;
703 FOR_EACH_SYMBOL (node)
704 if (node->cpp_implicit_alias && !node->analyzed)
705 node->resolve_alias
706 (VAR_P (node->alias_target)
707 ? (symtab_node *)varpool_node::get_create (node->alias_target)
708 : (symtab_node *)cgraph_node::get_create (node->alias_target));
709 cpp_implicit_aliases_done = true;
710 }
711
712 /* Process attributes common for vars and functions. */
713
714 static void
715 process_common_attributes (symtab_node *node, tree decl)
716 {
717 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
718
719 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
720 {
721 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
722 "%<weakref%> attribute should be accompanied with"
723 " an %<alias%> attribute");
724 DECL_WEAK (decl) = 0;
725 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
726 DECL_ATTRIBUTES (decl));
727 }
728
729 if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
730 node->no_reorder = 1;
731 }
732
733 /* Look for externally_visible and used attributes and mark cgraph nodes
734 accordingly.
735
736 We cannot mark the nodes at the point the attributes are processed (in
737 handle_*_attribute) because the copy of the declarations available at that
738 point may not be canonical. For example, in:
739
740 void f();
741 void f() __attribute__((used));
742
743 the declaration we see in handle_used_attribute will be the second
744 declaration -- but the front end will subsequently merge that declaration
745 with the original declaration and discard the second declaration.
746
747 Furthermore, we can't mark these nodes in finalize_function because:
748
749 void f() {}
750 void f() __attribute__((externally_visible));
751
752 is valid.
753
754 So, we walk the nodes at the end of the translation unit, applying the
755 attributes at that point. */
756
757 static void
758 process_function_and_variable_attributes (cgraph_node *first,
759 varpool_node *first_var)
760 {
761 cgraph_node *node;
762 varpool_node *vnode;
763
764 for (node = symtab->first_function (); node != first;
765 node = symtab->next_function (node))
766 {
767 tree decl = node->decl;
768 if (DECL_PRESERVE_P (decl))
769 node->mark_force_output ();
770 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
771 {
772 if (! TREE_PUBLIC (node->decl))
773 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
774 "%<externally_visible%>"
775 " attribute have effect only on public objects");
776 }
777 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
778 && (node->definition && !node->alias))
779 {
780 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
781 "%<weakref%> attribute ignored"
782 " because function is defined");
783 DECL_WEAK (decl) = 0;
784 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
785 DECL_ATTRIBUTES (decl));
786 }
787 else if (lookup_attribute ("alias", DECL_ATTRIBUTES (decl))
788 && node->definition
789 && !node->alias)
790 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
791 "%<alias%> attribute ignored"
792 " because function is defined");
793
794 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
795 && !DECL_DECLARED_INLINE_P (decl)
796 /* redefining extern inline function makes it DECL_UNINLINABLE. */
797 && !DECL_UNINLINABLE (decl))
798 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
799 "always_inline function might not be inlinable");
800
801 process_common_attributes (node, decl);
802 }
803 for (vnode = symtab->first_variable (); vnode != first_var;
804 vnode = symtab->next_variable (vnode))
805 {
806 tree decl = vnode->decl;
807 if (DECL_EXTERNAL (decl)
808 && DECL_INITIAL (decl))
809 varpool_node::finalize_decl (decl);
810 if (DECL_PRESERVE_P (decl))
811 vnode->force_output = true;
812 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
813 {
814 if (! TREE_PUBLIC (vnode->decl))
815 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
816 "%<externally_visible%>"
817 " attribute have effect only on public objects");
818 }
819 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
820 && vnode->definition
821 && DECL_INITIAL (decl))
822 {
823 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
824 "%<weakref%> attribute ignored"
825 " because variable is initialized");
826 DECL_WEAK (decl) = 0;
827 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
828 DECL_ATTRIBUTES (decl));
829 }
830 process_common_attributes (vnode, decl);
831 }
832 }
833
834 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
835 middle end to output the variable to asm file, if needed or externally
836 visible. */
837
838 void
839 varpool_node::finalize_decl (tree decl)
840 {
841 varpool_node *node = varpool_node::get_create (decl);
842
843 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
844
845 if (node->definition)
846 return;
847 /* Set definition first before calling notice_global_symbol so that
848 it is available to notice_global_symbol. */
849 node->definition = true;
850 notice_global_symbol (decl);
851 if (!flag_toplevel_reorder)
852 node->no_reorder = true;
853 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
854 /* Traditionally we do not eliminate static variables when not
855 optimizing and when not doing toplevel reoder. */
856 || (node->no_reorder && !DECL_COMDAT (node->decl)
857 && !DECL_ARTIFICIAL (node->decl)))
858 node->force_output = true;
859
860 if (symtab->state == CONSTRUCTION
861 && (node->needed_p () || node->referred_to_p ()))
862 enqueue_node (node);
863 if (symtab->state >= IPA_SSA)
864 node->analyze ();
865 /* Some frontends produce various interface variables after compilation
866 finished. */
867 if (symtab->state == FINISHED
868 || (node->no_reorder
869 && symtab->state == EXPANSION))
870 node->assemble_decl ();
871 }
872
873 /* EDGE is an polymorphic call. Mark all possible targets as reachable
874 and if there is only one target, perform trivial devirtualization.
875 REACHABLE_CALL_TARGETS collects target lists we already walked to
876 avoid udplicate work. */
877
878 static void
879 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
880 cgraph_edge *edge)
881 {
882 unsigned int i;
883 void *cache_token;
884 bool final;
885 vec <cgraph_node *>targets
886 = possible_polymorphic_call_targets
887 (edge, &final, &cache_token);
888
889 if (!reachable_call_targets->add (cache_token))
890 {
891 if (symtab->dump_file)
892 dump_possible_polymorphic_call_targets
893 (symtab->dump_file, edge);
894
895 for (i = 0; i < targets.length (); i++)
896 {
897 /* Do not bother to mark virtual methods in anonymous namespace;
898 either we will find use of virtual table defining it, or it is
899 unused. */
900 if (targets[i]->definition
901 && TREE_CODE
902 (TREE_TYPE (targets[i]->decl))
903 == METHOD_TYPE
904 && !type_in_anonymous_namespace_p
905 (TYPE_METHOD_BASETYPE (TREE_TYPE (targets[i]->decl))))
906 enqueue_node (targets[i]);
907 }
908 }
909
910 /* Very trivial devirtualization; when the type is
911 final or anonymous (so we know all its derivation)
912 and there is only one possible virtual call target,
913 make the edge direct. */
914 if (final)
915 {
916 if (targets.length () <= 1 && dbg_cnt (devirt))
917 {
918 cgraph_node *target;
919 if (targets.length () == 1)
920 target = targets[0];
921 else
922 target = cgraph_node::create
923 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
924
925 if (symtab->dump_file)
926 {
927 fprintf (symtab->dump_file,
928 "Devirtualizing call: ");
929 print_gimple_stmt (symtab->dump_file,
930 edge->call_stmt, 0,
931 TDF_SLIM);
932 }
933 if (dump_enabled_p ())
934 {
935 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, edge->call_stmt,
936 "devirtualizing call in %s to %s\n",
937 edge->caller->name (), target->name ());
938 }
939
940 edge->make_direct (target);
941 edge->redirect_call_stmt_to_callee ();
942
943 if (symtab->dump_file)
944 {
945 fprintf (symtab->dump_file,
946 "Devirtualized as: ");
947 print_gimple_stmt (symtab->dump_file,
948 edge->call_stmt, 0,
949 TDF_SLIM);
950 }
951 }
952 }
953 }
954
955 /* Issue appropriate warnings for the global declaration DECL. */
956
957 static void
958 check_global_declaration (symtab_node *snode)
959 {
960 const char *decl_file;
961 tree decl = snode->decl;
962
963 /* Warn about any function declared static but not defined. We don't
964 warn about variables, because many programs have static variables
965 that exist only to get some text into the object file. */
966 if (TREE_CODE (decl) == FUNCTION_DECL
967 && DECL_INITIAL (decl) == 0
968 && DECL_EXTERNAL (decl)
969 && ! DECL_ARTIFICIAL (decl)
970 && ! TREE_NO_WARNING (decl)
971 && ! TREE_PUBLIC (decl)
972 && (warn_unused_function
973 || snode->referred_to_p (/*include_self=*/false)))
974 {
975 if (snode->referred_to_p (/*include_self=*/false))
976 pedwarn (input_location, 0, "%q+F used but never defined", decl);
977 else
978 warning (OPT_Wunused_function, "%q+F declared %<static%> but never defined", decl);
979 /* This symbol is effectively an "extern" declaration now. */
980 TREE_PUBLIC (decl) = 1;
981 }
982
983 /* Warn about static fns or vars defined but not used. */
984 if (((warn_unused_function && TREE_CODE (decl) == FUNCTION_DECL)
985 || (((warn_unused_variable && ! TREE_READONLY (decl))
986 || (warn_unused_const_variable > 0 && TREE_READONLY (decl)
987 && (warn_unused_const_variable == 2
988 || (main_input_filename != NULL
989 && (decl_file = DECL_SOURCE_FILE (decl)) != NULL
990 && filename_cmp (main_input_filename,
991 decl_file) == 0))))
992 && VAR_P (decl)))
993 && ! DECL_IN_SYSTEM_HEADER (decl)
994 && ! snode->referred_to_p (/*include_self=*/false)
995 /* This TREE_USED check is needed in addition to referred_to_p
996 above, because the `__unused__' attribute is not being
997 considered for referred_to_p. */
998 && ! TREE_USED (decl)
999 /* The TREE_USED bit for file-scope decls is kept in the identifier,
1000 to handle multiple external decls in different scopes. */
1001 && ! (DECL_NAME (decl) && TREE_USED (DECL_NAME (decl)))
1002 && ! DECL_EXTERNAL (decl)
1003 && ! DECL_ARTIFICIAL (decl)
1004 && ! DECL_ABSTRACT_ORIGIN (decl)
1005 && ! TREE_PUBLIC (decl)
1006 /* A volatile variable might be used in some non-obvious way. */
1007 && (! VAR_P (decl) || ! TREE_THIS_VOLATILE (decl))
1008 /* Global register variables must be declared to reserve them. */
1009 && ! (VAR_P (decl) && DECL_REGISTER (decl))
1010 /* Global ctors and dtors are called by the runtime. */
1011 && (TREE_CODE (decl) != FUNCTION_DECL
1012 || (!DECL_STATIC_CONSTRUCTOR (decl)
1013 && !DECL_STATIC_DESTRUCTOR (decl)))
1014 /* Otherwise, ask the language. */
1015 && lang_hooks.decls.warn_unused_global (decl))
1016 warning_at (DECL_SOURCE_LOCATION (decl),
1017 (TREE_CODE (decl) == FUNCTION_DECL)
1018 ? OPT_Wunused_function
1019 : (TREE_READONLY (decl)
1020 ? OPT_Wunused_const_variable_
1021 : OPT_Wunused_variable),
1022 "%qD defined but not used", decl);
1023 }
1024
1025 /* Discover all functions and variables that are trivially needed, analyze
1026 them as well as all functions and variables referred by them */
1027 static cgraph_node *first_analyzed;
1028 static varpool_node *first_analyzed_var;
1029
1030 /* FIRST_TIME is set to TRUE for the first time we are called for a
1031 translation unit from finalize_compilation_unit() or false
1032 otherwise. */
1033
1034 static void
1035 analyze_functions (bool first_time)
1036 {
1037 /* Keep track of already processed nodes when called multiple times for
1038 intermodule optimization. */
1039 cgraph_node *first_handled = first_analyzed;
1040 varpool_node *first_handled_var = first_analyzed_var;
1041 hash_set<void *> reachable_call_targets;
1042
1043 symtab_node *node;
1044 symtab_node *next;
1045 int i;
1046 ipa_ref *ref;
1047 bool changed = true;
1048 location_t saved_loc = input_location;
1049
1050 bitmap_obstack_initialize (NULL);
1051 symtab->state = CONSTRUCTION;
1052 input_location = UNKNOWN_LOCATION;
1053
1054 /* Ugly, but the fixup can not happen at a time same body alias is created;
1055 C++ FE is confused about the COMDAT groups being right. */
1056 if (symtab->cpp_implicit_aliases_done)
1057 FOR_EACH_SYMBOL (node)
1058 if (node->cpp_implicit_alias)
1059 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
1060 build_type_inheritance_graph ();
1061
1062 /* Analysis adds static variables that in turn adds references to new functions.
1063 So we need to iterate the process until it stabilize. */
1064 while (changed)
1065 {
1066 changed = false;
1067 process_function_and_variable_attributes (first_analyzed,
1068 first_analyzed_var);
1069
1070 /* First identify the trivially needed symbols. */
1071 for (node = symtab->first_symbol ();
1072 node != first_analyzed
1073 && node != first_analyzed_var; node = node->next)
1074 {
1075 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
1076 node->get_comdat_group_id ();
1077 if (node->needed_p ())
1078 {
1079 enqueue_node (node);
1080 if (!changed && symtab->dump_file)
1081 fprintf (symtab->dump_file, "Trivially needed symbols:");
1082 changed = true;
1083 if (symtab->dump_file)
1084 fprintf (symtab->dump_file, " %s", node->asm_name ());
1085 if (!changed && symtab->dump_file)
1086 fprintf (symtab->dump_file, "\n");
1087 }
1088 if (node == first_analyzed
1089 || node == first_analyzed_var)
1090 break;
1091 }
1092 symtab->process_new_functions ();
1093 first_analyzed_var = symtab->first_variable ();
1094 first_analyzed = symtab->first_function ();
1095
1096 if (changed && symtab->dump_file)
1097 fprintf (symtab->dump_file, "\n");
1098
1099 /* Lower representation, build callgraph edges and references for all trivially
1100 needed symbols and all symbols referred by them. */
1101 while (queued_nodes != &symtab_terminator)
1102 {
1103 changed = true;
1104 node = queued_nodes;
1105 queued_nodes = (symtab_node *)queued_nodes->aux;
1106 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1107 if (cnode && cnode->definition)
1108 {
1109 cgraph_edge *edge;
1110 tree decl = cnode->decl;
1111
1112 /* ??? It is possible to create extern inline function
1113 and later using weak alias attribute to kill its body.
1114 See gcc.c-torture/compile/20011119-1.c */
1115 if (!DECL_STRUCT_FUNCTION (decl)
1116 && !cnode->alias
1117 && !cnode->thunk.thunk_p
1118 && !cnode->dispatcher_function)
1119 {
1120 cnode->reset ();
1121 cnode->local.redefined_extern_inline = true;
1122 continue;
1123 }
1124
1125 if (!cnode->analyzed)
1126 cnode->analyze ();
1127
1128 for (edge = cnode->callees; edge; edge = edge->next_callee)
1129 if (edge->callee->definition
1130 && (!DECL_EXTERNAL (edge->callee->decl)
1131 /* When not optimizing, do not try to analyze extern
1132 inline functions. Doing so is pointless. */
1133 || opt_for_fn (edge->callee->decl, optimize)
1134 /* Weakrefs needs to be preserved. */
1135 || edge->callee->alias
1136 /* always_inline functions are inlined aven at -O0. */
1137 || lookup_attribute
1138 ("always_inline",
1139 DECL_ATTRIBUTES (edge->callee->decl))
1140 /* Multiversioned functions needs the dispatcher to
1141 be produced locally even for extern functions. */
1142 || edge->callee->function_version ()))
1143 enqueue_node (edge->callee);
1144 if (opt_for_fn (cnode->decl, optimize)
1145 && opt_for_fn (cnode->decl, flag_devirtualize))
1146 {
1147 cgraph_edge *next;
1148
1149 for (edge = cnode->indirect_calls; edge; edge = next)
1150 {
1151 next = edge->next_callee;
1152 if (edge->indirect_info->polymorphic)
1153 walk_polymorphic_call_targets (&reachable_call_targets,
1154 edge);
1155 }
1156 }
1157
1158 /* If decl is a clone of an abstract function,
1159 mark that abstract function so that we don't release its body.
1160 The DECL_INITIAL() of that abstract function declaration
1161 will be later needed to output debug info. */
1162 if (DECL_ABSTRACT_ORIGIN (decl))
1163 {
1164 cgraph_node *origin_node
1165 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1166 origin_node->used_as_abstract_origin = true;
1167 }
1168 /* Preserve a functions function context node. It will
1169 later be needed to output debug info. */
1170 if (tree fn = decl_function_context (decl))
1171 {
1172 cgraph_node *origin_node = cgraph_node::get_create (fn);
1173 enqueue_node (origin_node);
1174 }
1175 }
1176 else
1177 {
1178 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1179 if (vnode && vnode->definition && !vnode->analyzed)
1180 vnode->analyze ();
1181 }
1182
1183 if (node->same_comdat_group)
1184 {
1185 symtab_node *next;
1186 for (next = node->same_comdat_group;
1187 next != node;
1188 next = next->same_comdat_group)
1189 if (!next->comdat_local_p ())
1190 enqueue_node (next);
1191 }
1192 for (i = 0; node->iterate_reference (i, ref); i++)
1193 if (ref->referred->definition
1194 && (!DECL_EXTERNAL (ref->referred->decl)
1195 || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
1196 && optimize)
1197 || (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
1198 && opt_for_fn (ref->referred->decl, optimize))
1199 || node->alias
1200 || ref->referred->alias)))
1201 enqueue_node (ref->referred);
1202 symtab->process_new_functions ();
1203 }
1204 }
1205 update_type_inheritance_graph ();
1206
1207 /* Collect entry points to the unit. */
1208 if (symtab->dump_file)
1209 {
1210 fprintf (symtab->dump_file, "\n\nInitial ");
1211 symtab->dump (symtab->dump_file);
1212 }
1213
1214 if (first_time)
1215 {
1216 symtab_node *snode;
1217 FOR_EACH_SYMBOL (snode)
1218 check_global_declaration (snode);
1219 }
1220
1221 if (symtab->dump_file)
1222 fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1223
1224 for (node = symtab->first_symbol ();
1225 node != first_handled
1226 && node != first_handled_var; node = next)
1227 {
1228 next = node->next;
1229 if (!node->aux && !node->referred_to_p ())
1230 {
1231 if (symtab->dump_file)
1232 fprintf (symtab->dump_file, " %s", node->name ());
1233
1234 /* See if the debugger can use anything before the DECL
1235 passes away. Perhaps it can notice a DECL that is now a
1236 constant and can tag the early DIE with an appropriate
1237 attribute.
1238
1239 Otherwise, this is the last chance the debug_hooks have
1240 at looking at optimized away DECLs, since
1241 late_global_decl will subsequently be called from the
1242 contents of the now pruned symbol table. */
1243 if (VAR_P (node->decl)
1244 && !decl_function_context (node->decl))
1245 {
1246 /* We are reclaiming totally unreachable code and variables
1247 so they effectively appear as readonly. Show that to
1248 the debug machinery. */
1249 TREE_READONLY (node->decl) = 1;
1250 node->definition = false;
1251 (*debug_hooks->late_global_decl) (node->decl);
1252 }
1253
1254 node->remove ();
1255 continue;
1256 }
1257 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1258 {
1259 tree decl = node->decl;
1260
1261 if (cnode->definition && !gimple_has_body_p (decl)
1262 && !cnode->alias
1263 && !cnode->thunk.thunk_p)
1264 cnode->reset ();
1265
1266 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1267 || cnode->alias
1268 || gimple_has_body_p (decl)
1269 || cnode->native_rtl_p ());
1270 gcc_assert (cnode->analyzed == cnode->definition);
1271 }
1272 node->aux = NULL;
1273 }
1274 for (;node; node = node->next)
1275 node->aux = NULL;
1276 first_analyzed = symtab->first_function ();
1277 first_analyzed_var = symtab->first_variable ();
1278 if (symtab->dump_file)
1279 {
1280 fprintf (symtab->dump_file, "\n\nReclaimed ");
1281 symtab->dump (symtab->dump_file);
1282 }
1283 bitmap_obstack_release (NULL);
1284 ggc_collect ();
1285 /* Initialize assembler name hash, in particular we want to trigger C++
1286 mangling and same body alias creation before we free DECL_ARGUMENTS
1287 used by it. */
1288 if (!seen_error ())
1289 symtab->symtab_initialize_asm_name_hash ();
1290
1291 input_location = saved_loc;
1292 }
1293
1294 /* Check declaration of the type of ALIAS for compatibility with its TARGET
1295 (which may be an ifunc resolver) and issue a diagnostic when they are
1296 not compatible according to language rules (plus a C++ extension for
1297 non-static member functions). */
1298
1299 static void
1300 maybe_diag_incompatible_alias (tree alias, tree target)
1301 {
1302 tree altype = TREE_TYPE (alias);
1303 tree targtype = TREE_TYPE (target);
1304
1305 bool ifunc = cgraph_node::get (alias)->ifunc_resolver;
1306 tree funcptr = altype;
1307
1308 if (ifunc)
1309 {
1310 /* Handle attribute ifunc first. */
1311 if (TREE_CODE (altype) == METHOD_TYPE)
1312 {
1313 /* Set FUNCPTR to the type of the alias target. If the type
1314 is a non-static member function of class C, construct a type
1315 of an ordinary function taking C* as the first argument,
1316 followed by the member function argument list, and use it
1317 instead to check for incompatibility. This conversion is
1318 not defined by the language but an extension provided by
1319 G++. */
1320
1321 tree rettype = TREE_TYPE (altype);
1322 tree args = TYPE_ARG_TYPES (altype);
1323 altype = build_function_type (rettype, args);
1324 funcptr = altype;
1325 }
1326
1327 targtype = TREE_TYPE (targtype);
1328
1329 if (POINTER_TYPE_P (targtype))
1330 {
1331 targtype = TREE_TYPE (targtype);
1332
1333 /* Only issue Wattribute-alias for conversions to void* with
1334 -Wextra. */
1335 if (VOID_TYPE_P (targtype) && !extra_warnings)
1336 return;
1337
1338 /* Proceed to handle incompatible ifunc resolvers below. */
1339 }
1340 else
1341 {
1342 funcptr = build_pointer_type (funcptr);
1343
1344 error_at (DECL_SOURCE_LOCATION (target),
1345 "%<ifunc%> resolver for %qD must return %qT",
1346 alias, funcptr);
1347 inform (DECL_SOURCE_LOCATION (alias),
1348 "resolver indirect function declared here");
1349 return;
1350 }
1351 }
1352
1353 if ((!FUNC_OR_METHOD_TYPE_P (targtype)
1354 || (prototype_p (altype)
1355 && prototype_p (targtype)
1356 && !types_compatible_p (altype, targtype))))
1357 {
1358 /* Warn for incompatibilities. Avoid warning for functions
1359 without a prototype to make it possible to declare aliases
1360 without knowing the exact type, as libstdc++ does. */
1361 if (ifunc)
1362 {
1363 funcptr = build_pointer_type (funcptr);
1364
1365 auto_diagnostic_group d;
1366 if (warning_at (DECL_SOURCE_LOCATION (target),
1367 OPT_Wattribute_alias,
1368 "%<ifunc%> resolver for %qD should return %qT",
1369 alias, funcptr))
1370 inform (DECL_SOURCE_LOCATION (alias),
1371 "resolver indirect function declared here");
1372 }
1373 else
1374 {
1375 auto_diagnostic_group d;
1376 if (warning_at (DECL_SOURCE_LOCATION (alias),
1377 OPT_Wattribute_alias,
1378 "%qD alias between functions of incompatible "
1379 "types %qT and %qT", alias, altype, targtype))
1380 inform (DECL_SOURCE_LOCATION (target),
1381 "aliased declaration here");
1382 }
1383 }
1384 }
1385
1386 /* Translate the ugly representation of aliases as alias pairs into nice
1387 representation in callgraph. We don't handle all cases yet,
1388 unfortunately. */
1389
1390 static void
1391 handle_alias_pairs (void)
1392 {
1393 alias_pair *p;
1394 unsigned i;
1395
1396 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1397 {
1398 symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1399
1400 /* Weakrefs with target not defined in current unit are easy to handle:
1401 they behave just as external variables except we need to note the
1402 alias flag to later output the weakref pseudo op into asm file. */
1403 if (!target_node
1404 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1405 {
1406 symtab_node *node = symtab_node::get (p->decl);
1407 if (node)
1408 {
1409 node->alias_target = p->target;
1410 node->weakref = true;
1411 node->alias = true;
1412 node->transparent_alias = true;
1413 }
1414 alias_pairs->unordered_remove (i);
1415 continue;
1416 }
1417 else if (!target_node)
1418 {
1419 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1420 symtab_node *node = symtab_node::get (p->decl);
1421 if (node)
1422 node->alias = false;
1423 alias_pairs->unordered_remove (i);
1424 continue;
1425 }
1426
1427 if (DECL_EXTERNAL (target_node->decl)
1428 /* We use local aliases for C++ thunks to force the tailcall
1429 to bind locally. This is a hack - to keep it working do
1430 the following (which is not strictly correct). */
1431 && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1432 || ! DECL_VIRTUAL_P (target_node->decl))
1433 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1434 {
1435 error ("%q+D aliased to external symbol %qE",
1436 p->decl, p->target);
1437 }
1438
1439 if (TREE_CODE (p->decl) == FUNCTION_DECL
1440 && target_node && is_a <cgraph_node *> (target_node))
1441 {
1442 maybe_diag_incompatible_alias (p->decl, target_node->decl);
1443
1444 cgraph_node *src_node = cgraph_node::get (p->decl);
1445 if (src_node && src_node->definition)
1446 src_node->reset ();
1447 cgraph_node::create_alias (p->decl, target_node->decl);
1448 alias_pairs->unordered_remove (i);
1449 }
1450 else if (VAR_P (p->decl)
1451 && target_node && is_a <varpool_node *> (target_node))
1452 {
1453 varpool_node::create_alias (p->decl, target_node->decl);
1454 alias_pairs->unordered_remove (i);
1455 }
1456 else
1457 {
1458 error ("%q+D alias between function and variable is not supported",
1459 p->decl);
1460 inform (DECL_SOURCE_LOCATION (target_node->decl),
1461 "aliased declaration here");
1462
1463 alias_pairs->unordered_remove (i);
1464 }
1465 }
1466 vec_free (alias_pairs);
1467 }
1468
1469
1470 /* Figure out what functions we want to assemble. */
1471
1472 static void
1473 mark_functions_to_output (void)
1474 {
1475 bool check_same_comdat_groups = false;
1476 cgraph_node *node;
1477
1478 if (flag_checking)
1479 FOR_EACH_FUNCTION (node)
1480 gcc_assert (!node->process);
1481
1482 FOR_EACH_FUNCTION (node)
1483 {
1484 tree decl = node->decl;
1485
1486 gcc_assert (!node->process || node->same_comdat_group);
1487 if (node->process)
1488 continue;
1489
1490 /* We need to output all local functions that are used and not
1491 always inlined, as well as those that are reachable from
1492 outside the current compilation unit. */
1493 if (node->analyzed
1494 && !node->thunk.thunk_p
1495 && !node->alias
1496 && !node->global.inlined_to
1497 && !TREE_ASM_WRITTEN (decl)
1498 && !DECL_EXTERNAL (decl))
1499 {
1500 node->process = 1;
1501 if (node->same_comdat_group)
1502 {
1503 cgraph_node *next;
1504 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1505 next != node;
1506 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1507 if (!next->thunk.thunk_p && !next->alias
1508 && !next->comdat_local_p ())
1509 next->process = 1;
1510 }
1511 }
1512 else if (node->same_comdat_group)
1513 {
1514 if (flag_checking)
1515 check_same_comdat_groups = true;
1516 }
1517 else
1518 {
1519 /* We should've reclaimed all functions that are not needed. */
1520 if (flag_checking
1521 && !node->global.inlined_to
1522 && gimple_has_body_p (decl)
1523 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1524 are inside partition, we can end up not removing the body since we no longer
1525 have analyzed node pointing to it. */
1526 && !node->in_other_partition
1527 && !node->alias
1528 && !node->clones
1529 && !DECL_EXTERNAL (decl))
1530 {
1531 node->debug ();
1532 internal_error ("failed to reclaim unneeded function");
1533 }
1534 gcc_assert (node->global.inlined_to
1535 || !gimple_has_body_p (decl)
1536 || node->in_other_partition
1537 || node->clones
1538 || DECL_ARTIFICIAL (decl)
1539 || DECL_EXTERNAL (decl));
1540
1541 }
1542
1543 }
1544 if (flag_checking && check_same_comdat_groups)
1545 FOR_EACH_FUNCTION (node)
1546 if (node->same_comdat_group && !node->process)
1547 {
1548 tree decl = node->decl;
1549 if (!node->global.inlined_to
1550 && gimple_has_body_p (decl)
1551 /* FIXME: in an ltrans unit when the offline copy is outside a
1552 partition but inline copies are inside a partition, we can
1553 end up not removing the body since we no longer have an
1554 analyzed node pointing to it. */
1555 && !node->in_other_partition
1556 && !node->clones
1557 && !DECL_EXTERNAL (decl))
1558 {
1559 node->debug ();
1560 internal_error ("failed to reclaim unneeded function in same "
1561 "comdat group");
1562 }
1563 }
1564 }
1565
1566 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1567 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1568
1569 Set current_function_decl and cfun to newly constructed empty function body.
1570 return basic block in the function body. */
1571
1572 basic_block
1573 init_lowered_empty_function (tree decl, bool in_ssa, profile_count count)
1574 {
1575 basic_block bb;
1576 edge e;
1577
1578 current_function_decl = decl;
1579 allocate_struct_function (decl, false);
1580 gimple_register_cfg_hooks ();
1581 init_empty_tree_cfg ();
1582 init_tree_ssa (cfun);
1583
1584 if (in_ssa)
1585 {
1586 init_ssa_operands (cfun);
1587 cfun->gimple_df->in_ssa_p = true;
1588 cfun->curr_properties |= PROP_ssa;
1589 }
1590
1591 DECL_INITIAL (decl) = make_node (BLOCK);
1592 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1593
1594 DECL_SAVED_TREE (decl) = error_mark_node;
1595 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1596 | PROP_cfg | PROP_loops);
1597
1598 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1599 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1600 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1601
1602 /* Create BB for body of the function and connect it properly. */
1603 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = count;
1604 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = count;
1605 bb = create_basic_block (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1606 bb->count = count;
1607 e = make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1608 e->probability = profile_probability::always ();
1609 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1610 e->probability = profile_probability::always ();
1611 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1612
1613 return bb;
1614 }
1615
1616 /* Adjust PTR by the constant FIXED_OFFSET, by the vtable offset indicated by
1617 VIRTUAL_OFFSET, and by the indirect offset indicated by INDIRECT_OFFSET, if
1618 it is non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and zero
1619 for a result adjusting thunk. */
1620
1621 tree
1622 thunk_adjust (gimple_stmt_iterator * bsi,
1623 tree ptr, bool this_adjusting,
1624 HOST_WIDE_INT fixed_offset, tree virtual_offset,
1625 HOST_WIDE_INT indirect_offset)
1626 {
1627 gassign *stmt;
1628 tree ret;
1629
1630 if (this_adjusting
1631 && fixed_offset != 0)
1632 {
1633 stmt = gimple_build_assign
1634 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1635 ptr,
1636 fixed_offset));
1637 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1638 }
1639
1640 if (!vtable_entry_type && (virtual_offset || indirect_offset != 0))
1641 {
1642 tree vfunc_type = make_node (FUNCTION_TYPE);
1643 TREE_TYPE (vfunc_type) = integer_type_node;
1644 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1645 layout_type (vfunc_type);
1646
1647 vtable_entry_type = build_pointer_type (vfunc_type);
1648 }
1649
1650 /* If there's a virtual offset, look up that value in the vtable and
1651 adjust the pointer again. */
1652 if (virtual_offset)
1653 {
1654 tree vtabletmp;
1655 tree vtabletmp2;
1656 tree vtabletmp3;
1657
1658 vtabletmp =
1659 create_tmp_reg (build_pointer_type
1660 (build_pointer_type (vtable_entry_type)), "vptr");
1661
1662 /* The vptr is always at offset zero in the object. */
1663 stmt = gimple_build_assign (vtabletmp,
1664 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1665 ptr));
1666 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1667
1668 /* Form the vtable address. */
1669 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1670 "vtableaddr");
1671 stmt = gimple_build_assign (vtabletmp2,
1672 build_simple_mem_ref (vtabletmp));
1673 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1674
1675 /* Find the entry with the vcall offset. */
1676 stmt = gimple_build_assign (vtabletmp2,
1677 fold_build_pointer_plus_loc (input_location,
1678 vtabletmp2,
1679 virtual_offset));
1680 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1681
1682 /* Get the offset itself. */
1683 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1684 "vcalloffset");
1685 stmt = gimple_build_assign (vtabletmp3,
1686 build_simple_mem_ref (vtabletmp2));
1687 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1688
1689 /* Adjust the `this' pointer. */
1690 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1691 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1692 GSI_CONTINUE_LINKING);
1693 }
1694
1695 /* Likewise for an offset that is stored in the object that contains the
1696 vtable. */
1697 if (indirect_offset != 0)
1698 {
1699 tree offset_ptr, offset_tree;
1700
1701 /* Get the address of the offset. */
1702 offset_ptr
1703 = create_tmp_reg (build_pointer_type
1704 (build_pointer_type (vtable_entry_type)),
1705 "offset_ptr");
1706 stmt = gimple_build_assign (offset_ptr,
1707 build1 (NOP_EXPR, TREE_TYPE (offset_ptr),
1708 ptr));
1709 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1710
1711 stmt = gimple_build_assign
1712 (offset_ptr,
1713 fold_build_pointer_plus_hwi_loc (input_location, offset_ptr,
1714 indirect_offset));
1715 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1716
1717 /* Get the offset itself. */
1718 offset_tree = create_tmp_reg (TREE_TYPE (TREE_TYPE (offset_ptr)),
1719 "offset");
1720 stmt = gimple_build_assign (offset_tree,
1721 build_simple_mem_ref (offset_ptr));
1722 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1723
1724 /* Adjust the `this' pointer. */
1725 ptr = fold_build_pointer_plus_loc (input_location, ptr, offset_tree);
1726 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1727 GSI_CONTINUE_LINKING);
1728 }
1729
1730 if (!this_adjusting
1731 && fixed_offset != 0)
1732 /* Adjust the pointer by the constant. */
1733 {
1734 tree ptrtmp;
1735
1736 if (VAR_P (ptr))
1737 ptrtmp = ptr;
1738 else
1739 {
1740 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1741 stmt = gimple_build_assign (ptrtmp, ptr);
1742 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1743 }
1744 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1745 ptrtmp, fixed_offset);
1746 }
1747
1748 /* Emit the statement and gimplify the adjustment expression. */
1749 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1750 stmt = gimple_build_assign (ret, ptr);
1751 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1752
1753 return ret;
1754 }
1755
1756 /* Expand thunk NODE to gimple if possible.
1757 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1758 no assembler is produced.
1759 When OUTPUT_ASM_THUNK is true, also produce assembler for
1760 thunks that are not lowered. */
1761
1762 bool
1763 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1764 {
1765 bool this_adjusting = thunk.this_adjusting;
1766 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1767 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1768 HOST_WIDE_INT indirect_offset = thunk.indirect_offset;
1769 tree virtual_offset = NULL;
1770 tree alias = callees->callee->decl;
1771 tree thunk_fndecl = decl;
1772 tree a;
1773
1774 /* Instrumentation thunk is the same function with
1775 a different signature. Never need to expand it. */
1776 if (thunk.add_pointer_bounds_args)
1777 return false;
1778
1779 if (!force_gimple_thunk
1780 && this_adjusting
1781 && indirect_offset == 0
1782 && !DECL_EXTERNAL (alias)
1783 && !DECL_STATIC_CHAIN (alias)
1784 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1785 virtual_value, alias))
1786 {
1787 const char *fnname;
1788 tree fn_block;
1789 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1790
1791 if (!output_asm_thunks)
1792 {
1793 analyzed = true;
1794 return false;
1795 }
1796
1797 if (in_lto_p)
1798 get_untransformed_body ();
1799 a = DECL_ARGUMENTS (thunk_fndecl);
1800
1801 current_function_decl = thunk_fndecl;
1802
1803 /* Ensure thunks are emitted in their correct sections. */
1804 resolve_unique_section (thunk_fndecl, 0,
1805 flag_function_sections);
1806
1807 DECL_RESULT (thunk_fndecl)
1808 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1809 RESULT_DECL, 0, restype);
1810 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1811 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1812
1813 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1814 create one. */
1815 fn_block = make_node (BLOCK);
1816 BLOCK_VARS (fn_block) = a;
1817 DECL_INITIAL (thunk_fndecl) = fn_block;
1818 BLOCK_SUPERCONTEXT (fn_block) = thunk_fndecl;
1819 allocate_struct_function (thunk_fndecl, false);
1820 init_function_start (thunk_fndecl);
1821 cfun->is_thunk = 1;
1822 insn_locations_init ();
1823 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1824 prologue_location = curr_insn_location ();
1825 assemble_start_function (thunk_fndecl, fnname);
1826
1827 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1828 fixed_offset, virtual_value, alias);
1829
1830 assemble_end_function (thunk_fndecl, fnname);
1831 insn_locations_finalize ();
1832 init_insn_lengths ();
1833 free_after_compilation (cfun);
1834 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1835 thunk.thunk_p = false;
1836 analyzed = false;
1837 }
1838 else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
1839 {
1840 error ("generic thunk code fails for method %qD which uses %<...%>",
1841 thunk_fndecl);
1842 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1843 analyzed = true;
1844 return false;
1845 }
1846 else
1847 {
1848 tree restype;
1849 basic_block bb, then_bb, else_bb, return_bb;
1850 gimple_stmt_iterator bsi;
1851 int nargs = 0;
1852 tree arg;
1853 int i;
1854 tree resdecl;
1855 tree restmp = NULL;
1856
1857 gcall *call;
1858 greturn *ret;
1859 bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
1860
1861 /* We may be called from expand_thunk that releses body except for
1862 DECL_ARGUMENTS. In this case force_gimple_thunk is true. */
1863 if (in_lto_p && !force_gimple_thunk)
1864 get_untransformed_body ();
1865 a = DECL_ARGUMENTS (thunk_fndecl);
1866
1867 current_function_decl = thunk_fndecl;
1868
1869 /* Ensure thunks are emitted in their correct sections. */
1870 resolve_unique_section (thunk_fndecl, 0,
1871 flag_function_sections);
1872
1873 DECL_IGNORED_P (thunk_fndecl) = 1;
1874 bitmap_obstack_initialize (NULL);
1875
1876 if (thunk.virtual_offset_p)
1877 virtual_offset = size_int (virtual_value);
1878
1879 /* Build the return declaration for the function. */
1880 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1881 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1882 {
1883 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1884 DECL_ARTIFICIAL (resdecl) = 1;
1885 DECL_IGNORED_P (resdecl) = 1;
1886 DECL_CONTEXT (resdecl) = thunk_fndecl;
1887 DECL_RESULT (thunk_fndecl) = resdecl;
1888 }
1889 else
1890 resdecl = DECL_RESULT (thunk_fndecl);
1891
1892 profile_count cfg_count = count;
1893 if (!cfg_count.initialized_p ())
1894 cfg_count = profile_count::from_gcov_type (BB_FREQ_MAX).guessed_local ();
1895
1896 bb = then_bb = else_bb = return_bb
1897 = init_lowered_empty_function (thunk_fndecl, true, cfg_count);
1898
1899 bsi = gsi_start_bb (bb);
1900
1901 /* Build call to the function being thunked. */
1902 if (!VOID_TYPE_P (restype)
1903 && (!alias_is_noreturn
1904 || TREE_ADDRESSABLE (restype)
1905 || TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
1906 {
1907 if (DECL_BY_REFERENCE (resdecl))
1908 {
1909 restmp = gimple_fold_indirect_ref (resdecl);
1910 if (!restmp)
1911 restmp = build2 (MEM_REF,
1912 TREE_TYPE (TREE_TYPE (DECL_RESULT (alias))),
1913 resdecl,
1914 build_int_cst (TREE_TYPE
1915 (DECL_RESULT (alias)), 0));
1916 }
1917 else if (!is_gimple_reg_type (restype))
1918 {
1919 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
1920 {
1921 restmp = resdecl;
1922
1923 if (VAR_P (restmp))
1924 {
1925 add_local_decl (cfun, restmp);
1926 BLOCK_VARS (DECL_INITIAL (current_function_decl))
1927 = restmp;
1928 }
1929 }
1930 else
1931 restmp = create_tmp_var (restype, "retval");
1932 }
1933 else
1934 restmp = create_tmp_reg (restype, "retval");
1935 }
1936
1937 for (arg = a; arg; arg = DECL_CHAIN (arg))
1938 nargs++;
1939 auto_vec<tree> vargs (nargs);
1940 i = 0;
1941 arg = a;
1942 if (this_adjusting)
1943 {
1944 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1945 virtual_offset, indirect_offset));
1946 arg = DECL_CHAIN (a);
1947 i = 1;
1948 }
1949
1950 if (nargs)
1951 for (; i < nargs; i++, arg = DECL_CHAIN (arg))
1952 {
1953 tree tmp = arg;
1954 if (VECTOR_TYPE_P (TREE_TYPE (arg))
1955 || TREE_CODE (TREE_TYPE (arg)) == COMPLEX_TYPE)
1956 DECL_GIMPLE_REG_P (arg) = 1;
1957
1958 if (!is_gimple_val (arg))
1959 {
1960 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1961 (TREE_TYPE (arg)), "arg");
1962 gimple *stmt = gimple_build_assign (tmp, arg);
1963 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1964 }
1965 vargs.quick_push (tmp);
1966 }
1967 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1968 callees->call_stmt = call;
1969 gimple_call_set_from_thunk (call, true);
1970 if (DECL_STATIC_CHAIN (alias))
1971 {
1972 tree p = DECL_STRUCT_FUNCTION (alias)->static_chain_decl;
1973 tree type = TREE_TYPE (p);
1974 tree decl = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1975 PARM_DECL, create_tmp_var_name ("CHAIN"),
1976 type);
1977 DECL_ARTIFICIAL (decl) = 1;
1978 DECL_IGNORED_P (decl) = 1;
1979 TREE_USED (decl) = 1;
1980 DECL_CONTEXT (decl) = thunk_fndecl;
1981 DECL_ARG_TYPE (decl) = type;
1982 TREE_READONLY (decl) = 1;
1983
1984 struct function *sf = DECL_STRUCT_FUNCTION (thunk_fndecl);
1985 sf->static_chain_decl = decl;
1986
1987 gimple_call_set_chain (call, decl);
1988 }
1989
1990 /* Return slot optimization is always possible and in fact requred to
1991 return values with DECL_BY_REFERENCE. */
1992 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
1993 && (!is_gimple_reg_type (TREE_TYPE (resdecl))
1994 || DECL_BY_REFERENCE (resdecl)))
1995 gimple_call_set_return_slot_opt (call, true);
1996
1997 if (restmp)
1998 {
1999 gimple_call_set_lhs (call, restmp);
2000 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
2001 TREE_TYPE (TREE_TYPE (alias))));
2002 }
2003 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
2004 if (!alias_is_noreturn)
2005 {
2006 if (restmp && !this_adjusting
2007 && (fixed_offset || virtual_offset))
2008 {
2009 tree true_label = NULL_TREE;
2010
2011 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
2012 {
2013 gimple *stmt;
2014 edge e;
2015 /* If the return type is a pointer, we need to
2016 protect against NULL. We know there will be an
2017 adjustment, because that's why we're emitting a
2018 thunk. */
2019 then_bb = create_basic_block (NULL, bb);
2020 then_bb->count = cfg_count - cfg_count.apply_scale (1, 16);
2021 return_bb = create_basic_block (NULL, then_bb);
2022 return_bb->count = cfg_count;
2023 else_bb = create_basic_block (NULL, else_bb);
2024 else_bb->count = cfg_count.apply_scale (1, 16);
2025 add_bb_to_loop (then_bb, bb->loop_father);
2026 add_bb_to_loop (return_bb, bb->loop_father);
2027 add_bb_to_loop (else_bb, bb->loop_father);
2028 remove_edge (single_succ_edge (bb));
2029 true_label = gimple_block_label (then_bb);
2030 stmt = gimple_build_cond (NE_EXPR, restmp,
2031 build_zero_cst (TREE_TYPE (restmp)),
2032 NULL_TREE, NULL_TREE);
2033 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2034 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
2035 e->probability = profile_probability::guessed_always ()
2036 .apply_scale (1, 16);
2037 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
2038 e->probability = profile_probability::guessed_always ()
2039 .apply_scale (1, 16);
2040 make_single_succ_edge (return_bb,
2041 EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
2042 make_single_succ_edge (then_bb, return_bb, EDGE_FALLTHRU);
2043 e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
2044 e->probability = profile_probability::always ();
2045 bsi = gsi_last_bb (then_bb);
2046 }
2047
2048 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
2049 fixed_offset, virtual_offset,
2050 indirect_offset);
2051 if (true_label)
2052 {
2053 gimple *stmt;
2054 bsi = gsi_last_bb (else_bb);
2055 stmt = gimple_build_assign (restmp,
2056 build_zero_cst (TREE_TYPE (restmp)));
2057 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2058 bsi = gsi_last_bb (return_bb);
2059 }
2060 }
2061 else
2062 gimple_call_set_tail (call, true);
2063
2064 /* Build return value. */
2065 if (!DECL_BY_REFERENCE (resdecl))
2066 ret = gimple_build_return (restmp);
2067 else
2068 ret = gimple_build_return (resdecl);
2069
2070 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
2071 }
2072 else
2073 {
2074 gimple_call_set_tail (call, true);
2075 remove_edge (single_succ_edge (bb));
2076 }
2077
2078 cfun->gimple_df->in_ssa_p = true;
2079 update_max_bb_count ();
2080 profile_status_for_fn (cfun)
2081 = cfg_count.initialized_p () && cfg_count.ipa_p ()
2082 ? PROFILE_READ : PROFILE_GUESSED;
2083 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
2084 TREE_ASM_WRITTEN (thunk_fndecl) = false;
2085 delete_unreachable_blocks ();
2086 update_ssa (TODO_update_ssa);
2087 checking_verify_flow_info ();
2088 free_dominance_info (CDI_DOMINATORS);
2089
2090 /* Since we want to emit the thunk, we explicitly mark its name as
2091 referenced. */
2092 thunk.thunk_p = false;
2093 lowered = true;
2094 bitmap_obstack_release (NULL);
2095 }
2096 current_function_decl = NULL;
2097 set_cfun (NULL);
2098 return true;
2099 }
2100
2101 /* Assemble thunks and aliases associated to node. */
2102
2103 void
2104 cgraph_node::assemble_thunks_and_aliases (void)
2105 {
2106 cgraph_edge *e;
2107 ipa_ref *ref;
2108
2109 for (e = callers; e;)
2110 if (e->caller->thunk.thunk_p
2111 && !e->caller->global.inlined_to
2112 && !e->caller->thunk.add_pointer_bounds_args)
2113 {
2114 cgraph_node *thunk = e->caller;
2115
2116 e = e->next_caller;
2117 thunk->expand_thunk (true, false);
2118 thunk->assemble_thunks_and_aliases ();
2119 }
2120 else
2121 e = e->next_caller;
2122
2123 FOR_EACH_ALIAS (this, ref)
2124 {
2125 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2126 if (!alias->transparent_alias)
2127 {
2128 bool saved_written = TREE_ASM_WRITTEN (decl);
2129
2130 /* Force assemble_alias to really output the alias this time instead
2131 of buffering it in same alias pairs. */
2132 TREE_ASM_WRITTEN (decl) = 1;
2133 do_assemble_alias (alias->decl,
2134 DECL_ASSEMBLER_NAME (decl));
2135 alias->assemble_thunks_and_aliases ();
2136 TREE_ASM_WRITTEN (decl) = saved_written;
2137 }
2138 }
2139 }
2140
2141 /* Expand function specified by node. */
2142
2143 void
2144 cgraph_node::expand (void)
2145 {
2146 location_t saved_loc;
2147
2148 /* We ought to not compile any inline clones. */
2149 gcc_assert (!global.inlined_to);
2150
2151 /* __RTL functions are compiled as soon as they are parsed, so don't
2152 do it again. */
2153 if (native_rtl_p ())
2154 return;
2155
2156 announce_function (decl);
2157 process = 0;
2158 gcc_assert (lowered);
2159 get_untransformed_body ();
2160
2161 /* Generate RTL for the body of DECL. */
2162
2163 timevar_push (TV_REST_OF_COMPILATION);
2164
2165 gcc_assert (symtab->global_info_ready);
2166
2167 /* Initialize the default bitmap obstack. */
2168 bitmap_obstack_initialize (NULL);
2169
2170 /* Initialize the RTL code for the function. */
2171 saved_loc = input_location;
2172 input_location = DECL_SOURCE_LOCATION (decl);
2173
2174 gcc_assert (DECL_STRUCT_FUNCTION (decl));
2175 push_cfun (DECL_STRUCT_FUNCTION (decl));
2176 init_function_start (decl);
2177
2178 gimple_register_cfg_hooks ();
2179
2180 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
2181
2182 execute_all_ipa_transforms ();
2183
2184 /* Perform all tree transforms and optimizations. */
2185
2186 /* Signal the start of passes. */
2187 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
2188
2189 execute_pass_list (cfun, g->get_passes ()->all_passes);
2190
2191 /* Signal the end of passes. */
2192 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
2193
2194 bitmap_obstack_release (&reg_obstack);
2195
2196 /* Release the default bitmap obstack. */
2197 bitmap_obstack_release (NULL);
2198
2199 /* If requested, warn about function definitions where the function will
2200 return a value (usually of some struct or union type) which itself will
2201 take up a lot of stack space. */
2202 if (!DECL_EXTERNAL (decl) && TREE_TYPE (decl))
2203 {
2204 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
2205
2206 if (ret_type && TYPE_SIZE_UNIT (ret_type)
2207 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
2208 && compare_tree_int (TYPE_SIZE_UNIT (ret_type),
2209 warn_larger_than_size) > 0)
2210 {
2211 unsigned int size_as_int
2212 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
2213
2214 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
2215 warning (OPT_Wlarger_than_,
2216 "size of return value of %q+D is %u bytes",
2217 decl, size_as_int);
2218 else
2219 warning (OPT_Wlarger_than_,
2220 "size of return value of %q+D is larger than %wu bytes",
2221 decl, warn_larger_than_size);
2222 }
2223 }
2224
2225 gimple_set_body (decl, NULL);
2226 if (DECL_STRUCT_FUNCTION (decl) == 0
2227 && !cgraph_node::get (decl)->origin)
2228 {
2229 /* Stop pointing to the local nodes about to be freed.
2230 But DECL_INITIAL must remain nonzero so we know this
2231 was an actual function definition.
2232 For a nested function, this is done in c_pop_function_context.
2233 If rest_of_compilation set this to 0, leave it 0. */
2234 if (DECL_INITIAL (decl) != 0)
2235 DECL_INITIAL (decl) = error_mark_node;
2236 }
2237
2238 input_location = saved_loc;
2239
2240 ggc_collect ();
2241 timevar_pop (TV_REST_OF_COMPILATION);
2242
2243 /* Make sure that BE didn't give up on compiling. */
2244 gcc_assert (TREE_ASM_WRITTEN (decl));
2245 if (cfun)
2246 pop_cfun ();
2247
2248 /* It would make a lot more sense to output thunks before function body to get more
2249 forward and lest backwarding jumps. This however would need solving problem
2250 with comdats. See PR48668. Also aliases must come after function itself to
2251 make one pass assemblers, like one on AIX, happy. See PR 50689.
2252 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
2253 groups. */
2254 assemble_thunks_and_aliases ();
2255 release_body ();
2256 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
2257 points to the dead function body. */
2258 remove_callees ();
2259 remove_all_references ();
2260 }
2261
2262 /* Node comparer that is responsible for the order that corresponds
2263 to time when a function was launched for the first time. */
2264
2265 static int
2266 node_cmp (const void *pa, const void *pb)
2267 {
2268 const cgraph_node *a = *(const cgraph_node * const *) pa;
2269 const cgraph_node *b = *(const cgraph_node * const *) pb;
2270
2271 /* Functions with time profile must be before these without profile. */
2272 if (!a->tp_first_run || !b->tp_first_run)
2273 return a->tp_first_run - b->tp_first_run;
2274
2275 return a->tp_first_run != b->tp_first_run
2276 ? b->tp_first_run - a->tp_first_run
2277 : b->order - a->order;
2278 }
2279
2280 /* Expand all functions that must be output.
2281
2282 Attempt to topologically sort the nodes so function is output when
2283 all called functions are already assembled to allow data to be
2284 propagated across the callgraph. Use a stack to get smaller distance
2285 between a function and its callees (later we may choose to use a more
2286 sophisticated algorithm for function reordering; we will likely want
2287 to use subsections to make the output functions appear in top-down
2288 order). */
2289
2290 static void
2291 expand_all_functions (void)
2292 {
2293 cgraph_node *node;
2294 cgraph_node **order = XCNEWVEC (cgraph_node *,
2295 symtab->cgraph_count);
2296 unsigned int expanded_func_count = 0, profiled_func_count = 0;
2297 int order_pos, new_order_pos = 0;
2298 int i;
2299
2300 order_pos = ipa_reverse_postorder (order);
2301 gcc_assert (order_pos == symtab->cgraph_count);
2302
2303 /* Garbage collector may remove inline clones we eliminate during
2304 optimization. So we must be sure to not reference them. */
2305 for (i = 0; i < order_pos; i++)
2306 if (order[i]->process)
2307 order[new_order_pos++] = order[i];
2308
2309 if (flag_profile_reorder_functions)
2310 qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
2311
2312 for (i = new_order_pos - 1; i >= 0; i--)
2313 {
2314 node = order[i];
2315
2316 if (node->process)
2317 {
2318 expanded_func_count++;
2319 if(node->tp_first_run)
2320 profiled_func_count++;
2321
2322 if (symtab->dump_file)
2323 fprintf (symtab->dump_file,
2324 "Time profile order in expand_all_functions:%s:%d\n",
2325 node->asm_name (), node->tp_first_run);
2326 node->process = 0;
2327 node->expand ();
2328 }
2329 }
2330
2331 if (dump_file)
2332 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
2333 main_input_filename, profiled_func_count, expanded_func_count);
2334
2335 if (symtab->dump_file && flag_profile_reorder_functions)
2336 fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
2337 profiled_func_count, expanded_func_count);
2338
2339 symtab->process_new_functions ();
2340 free_gimplify_stack ();
2341
2342 free (order);
2343 }
2344
2345 /* This is used to sort the node types by the cgraph order number. */
2346
2347 enum cgraph_order_sort_kind
2348 {
2349 ORDER_UNDEFINED = 0,
2350 ORDER_FUNCTION,
2351 ORDER_VAR,
2352 ORDER_VAR_UNDEF,
2353 ORDER_ASM
2354 };
2355
2356 struct cgraph_order_sort
2357 {
2358 enum cgraph_order_sort_kind kind;
2359 union
2360 {
2361 cgraph_node *f;
2362 varpool_node *v;
2363 asm_node *a;
2364 } u;
2365 };
2366
2367 /* Output all functions, variables, and asm statements in the order
2368 according to their order fields, which is the order in which they
2369 appeared in the file. This implements -fno-toplevel-reorder. In
2370 this mode we may output functions and variables which don't really
2371 need to be output. */
2372
2373 static void
2374 output_in_order (void)
2375 {
2376 int max;
2377 cgraph_order_sort *nodes;
2378 int i;
2379 cgraph_node *pf;
2380 varpool_node *pv;
2381 asm_node *pa;
2382 max = symtab->order;
2383 nodes = XCNEWVEC (cgraph_order_sort, max);
2384
2385 FOR_EACH_DEFINED_FUNCTION (pf)
2386 {
2387 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
2388 {
2389 if (!pf->no_reorder)
2390 continue;
2391 i = pf->order;
2392 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2393 nodes[i].kind = ORDER_FUNCTION;
2394 nodes[i].u.f = pf;
2395 }
2396 }
2397
2398 /* There is a similar loop in symbol_table::output_variables.
2399 Please keep them in sync. */
2400 FOR_EACH_VARIABLE (pv)
2401 {
2402 if (!pv->no_reorder)
2403 continue;
2404 if (DECL_HARD_REGISTER (pv->decl)
2405 || DECL_HAS_VALUE_EXPR_P (pv->decl))
2406 continue;
2407 i = pv->order;
2408 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2409 nodes[i].kind = pv->definition ? ORDER_VAR : ORDER_VAR_UNDEF;
2410 nodes[i].u.v = pv;
2411 }
2412
2413 for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
2414 {
2415 i = pa->order;
2416 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2417 nodes[i].kind = ORDER_ASM;
2418 nodes[i].u.a = pa;
2419 }
2420
2421 /* In toplevel reorder mode we output all statics; mark them as needed. */
2422
2423 for (i = 0; i < max; ++i)
2424 if (nodes[i].kind == ORDER_VAR)
2425 nodes[i].u.v->finalize_named_section_flags ();
2426
2427 for (i = 0; i < max; ++i)
2428 {
2429 switch (nodes[i].kind)
2430 {
2431 case ORDER_FUNCTION:
2432 nodes[i].u.f->process = 0;
2433 nodes[i].u.f->expand ();
2434 break;
2435
2436 case ORDER_VAR:
2437 nodes[i].u.v->assemble_decl ();
2438 break;
2439
2440 case ORDER_VAR_UNDEF:
2441 assemble_undefined_decl (nodes[i].u.v->decl);
2442 break;
2443
2444 case ORDER_ASM:
2445 assemble_asm (nodes[i].u.a->asm_str);
2446 break;
2447
2448 case ORDER_UNDEFINED:
2449 break;
2450
2451 default:
2452 gcc_unreachable ();
2453 }
2454 }
2455
2456 symtab->clear_asm_symbols ();
2457
2458 free (nodes);
2459 }
2460
2461 static void
2462 ipa_passes (void)
2463 {
2464 gcc::pass_manager *passes = g->get_passes ();
2465
2466 set_cfun (NULL);
2467 current_function_decl = NULL;
2468 gimple_register_cfg_hooks ();
2469 bitmap_obstack_initialize (NULL);
2470
2471 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2472
2473 if (!in_lto_p)
2474 {
2475 execute_ipa_pass_list (passes->all_small_ipa_passes);
2476 if (seen_error ())
2477 return;
2478 }
2479
2480 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2481 devirtualization and other changes where removal iterate. */
2482 symtab->remove_unreachable_nodes (symtab->dump_file);
2483
2484 /* If pass_all_early_optimizations was not scheduled, the state of
2485 the cgraph will not be properly updated. Update it now. */
2486 if (symtab->state < IPA_SSA)
2487 symtab->state = IPA_SSA;
2488
2489 if (!in_lto_p)
2490 {
2491 /* Generate coverage variables and constructors. */
2492 coverage_finish ();
2493
2494 /* Process new functions added. */
2495 set_cfun (NULL);
2496 current_function_decl = NULL;
2497 symtab->process_new_functions ();
2498
2499 execute_ipa_summary_passes
2500 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2501 }
2502
2503 /* Some targets need to handle LTO assembler output specially. */
2504 if (flag_generate_lto || flag_generate_offload)
2505 targetm.asm_out.lto_start ();
2506
2507 if (!in_lto_p
2508 || flag_incremental_link == INCREMENTAL_LINK_LTO)
2509 {
2510 if (!quiet_flag)
2511 fprintf (stderr, "Streaming LTO\n");
2512 if (g->have_offload)
2513 {
2514 section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2515 lto_stream_offload_p = true;
2516 ipa_write_summaries ();
2517 lto_stream_offload_p = false;
2518 }
2519 if (flag_lto)
2520 {
2521 section_name_prefix = LTO_SECTION_NAME_PREFIX;
2522 lto_stream_offload_p = false;
2523 ipa_write_summaries ();
2524 }
2525 }
2526
2527 if (flag_generate_lto || flag_generate_offload)
2528 targetm.asm_out.lto_end ();
2529
2530 if (!flag_ltrans
2531 && ((in_lto_p && flag_incremental_link != INCREMENTAL_LINK_LTO)
2532 || !flag_lto || flag_fat_lto_objects))
2533 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2534 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2535
2536 bitmap_obstack_release (NULL);
2537 }
2538
2539
2540 /* Return string alias is alias of. */
2541
2542 static tree
2543 get_alias_symbol (tree decl)
2544 {
2545 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2546 return get_identifier (TREE_STRING_POINTER
2547 (TREE_VALUE (TREE_VALUE (alias))));
2548 }
2549
2550
2551 /* Weakrefs may be associated to external decls and thus not output
2552 at expansion time. Emit all necessary aliases. */
2553
2554 void
2555 symbol_table::output_weakrefs (void)
2556 {
2557 symtab_node *node;
2558 FOR_EACH_SYMBOL (node)
2559 if (node->alias
2560 && !TREE_ASM_WRITTEN (node->decl)
2561 && node->weakref)
2562 {
2563 tree target;
2564
2565 /* Weakrefs are special by not requiring target definition in current
2566 compilation unit. It is thus bit hard to work out what we want to
2567 alias.
2568 When alias target is defined, we need to fetch it from symtab reference,
2569 otherwise it is pointed to by alias_target. */
2570 if (node->alias_target)
2571 target = (DECL_P (node->alias_target)
2572 ? DECL_ASSEMBLER_NAME (node->alias_target)
2573 : node->alias_target);
2574 else if (node->analyzed)
2575 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2576 else
2577 {
2578 gcc_unreachable ();
2579 target = get_alias_symbol (node->decl);
2580 }
2581 do_assemble_alias (node->decl, target);
2582 }
2583 }
2584
2585 /* Perform simple optimizations based on callgraph. */
2586
2587 void
2588 symbol_table::compile (void)
2589 {
2590 if (seen_error ())
2591 return;
2592
2593 symtab_node::checking_verify_symtab_nodes ();
2594
2595 timevar_push (TV_CGRAPHOPT);
2596 if (pre_ipa_mem_report)
2597 {
2598 fprintf (stderr, "Memory consumption before IPA\n");
2599 dump_memory_report (false);
2600 }
2601 if (!quiet_flag)
2602 fprintf (stderr, "Performing interprocedural optimizations\n");
2603 state = IPA;
2604
2605 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2606 if (flag_generate_lto || flag_generate_offload)
2607 lto_streamer_hooks_init ();
2608
2609 /* Don't run the IPA passes if there was any error or sorry messages. */
2610 if (!seen_error ())
2611 ipa_passes ();
2612
2613 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2614 if (seen_error ()
2615 || ((!in_lto_p || flag_incremental_link == INCREMENTAL_LINK_LTO)
2616 && flag_lto && !flag_fat_lto_objects))
2617 {
2618 timevar_pop (TV_CGRAPHOPT);
2619 return;
2620 }
2621
2622 global_info_ready = true;
2623 if (dump_file)
2624 {
2625 fprintf (dump_file, "Optimized ");
2626 symtab->dump (dump_file);
2627 }
2628 if (post_ipa_mem_report)
2629 {
2630 fprintf (stderr, "Memory consumption after IPA\n");
2631 dump_memory_report (false);
2632 }
2633 timevar_pop (TV_CGRAPHOPT);
2634
2635 /* Output everything. */
2636 switch_to_section (text_section);
2637 (*debug_hooks->assembly_start) ();
2638 if (!quiet_flag)
2639 fprintf (stderr, "Assembling functions:\n");
2640 symtab_node::checking_verify_symtab_nodes ();
2641
2642 bitmap_obstack_initialize (NULL);
2643 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2644 bitmap_obstack_release (NULL);
2645 mark_functions_to_output ();
2646
2647 /* When weakref support is missing, we automatically translate all
2648 references to NODE to references to its ultimate alias target.
2649 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2650 TREE_CHAIN.
2651
2652 Set up this mapping before we output any assembler but once we are sure
2653 that all symbol renaming is done.
2654
2655 FIXME: All this uglyness can go away if we just do renaming at gimple
2656 level by physically rewritting the IL. At the moment we can only redirect
2657 calls, so we need infrastructure for renaming references as well. */
2658 #ifndef ASM_OUTPUT_WEAKREF
2659 symtab_node *node;
2660
2661 FOR_EACH_SYMBOL (node)
2662 if (node->alias
2663 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2664 {
2665 IDENTIFIER_TRANSPARENT_ALIAS
2666 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2667 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2668 = (node->alias_target ? node->alias_target
2669 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2670 }
2671 #endif
2672
2673 state = EXPANSION;
2674
2675 /* Output first asm statements and anything ordered. The process
2676 flag is cleared for these nodes, so we skip them later. */
2677 output_in_order ();
2678 expand_all_functions ();
2679 output_variables ();
2680
2681 process_new_functions ();
2682 state = FINISHED;
2683 output_weakrefs ();
2684
2685 if (dump_file)
2686 {
2687 fprintf (dump_file, "\nFinal ");
2688 symtab->dump (dump_file);
2689 }
2690 if (!flag_checking)
2691 return;
2692 symtab_node::verify_symtab_nodes ();
2693 /* Double check that all inline clones are gone and that all
2694 function bodies have been released from memory. */
2695 if (!seen_error ())
2696 {
2697 cgraph_node *node;
2698 bool error_found = false;
2699
2700 FOR_EACH_DEFINED_FUNCTION (node)
2701 if (node->global.inlined_to
2702 || gimple_has_body_p (node->decl))
2703 {
2704 error_found = true;
2705 node->debug ();
2706 }
2707 if (error_found)
2708 internal_error ("nodes with unreleased memory found");
2709 }
2710 }
2711
2712 /* Earlydebug dump file, flags, and number. */
2713
2714 static int debuginfo_early_dump_nr;
2715 static FILE *debuginfo_early_dump_file;
2716 static dump_flags_t debuginfo_early_dump_flags;
2717
2718 /* Debug dump file, flags, and number. */
2719
2720 static int debuginfo_dump_nr;
2721 static FILE *debuginfo_dump_file;
2722 static dump_flags_t debuginfo_dump_flags;
2723
2724 /* Register the debug and earlydebug dump files. */
2725
2726 void
2727 debuginfo_early_init (void)
2728 {
2729 gcc::dump_manager *dumps = g->get_dumps ();
2730 debuginfo_early_dump_nr = dumps->dump_register (".earlydebug", "earlydebug",
2731 "earlydebug", DK_tree,
2732 OPTGROUP_NONE,
2733 false);
2734 debuginfo_dump_nr = dumps->dump_register (".debug", "debug",
2735 "debug", DK_tree,
2736 OPTGROUP_NONE,
2737 false);
2738 }
2739
2740 /* Initialize the debug and earlydebug dump files. */
2741
2742 void
2743 debuginfo_init (void)
2744 {
2745 gcc::dump_manager *dumps = g->get_dumps ();
2746 debuginfo_dump_file = dump_begin (debuginfo_dump_nr, NULL);
2747 debuginfo_dump_flags = dumps->get_dump_file_info (debuginfo_dump_nr)->pflags;
2748 debuginfo_early_dump_file = dump_begin (debuginfo_early_dump_nr, NULL);
2749 debuginfo_early_dump_flags
2750 = dumps->get_dump_file_info (debuginfo_early_dump_nr)->pflags;
2751 }
2752
2753 /* Finalize the debug and earlydebug dump files. */
2754
2755 void
2756 debuginfo_fini (void)
2757 {
2758 if (debuginfo_dump_file)
2759 dump_end (debuginfo_dump_nr, debuginfo_dump_file);
2760 if (debuginfo_early_dump_file)
2761 dump_end (debuginfo_early_dump_nr, debuginfo_early_dump_file);
2762 }
2763
2764 /* Set dump_file to the debug dump file. */
2765
2766 void
2767 debuginfo_start (void)
2768 {
2769 set_dump_file (debuginfo_dump_file);
2770 }
2771
2772 /* Undo setting dump_file to the debug dump file. */
2773
2774 void
2775 debuginfo_stop (void)
2776 {
2777 set_dump_file (NULL);
2778 }
2779
2780 /* Set dump_file to the earlydebug dump file. */
2781
2782 void
2783 debuginfo_early_start (void)
2784 {
2785 set_dump_file (debuginfo_early_dump_file);
2786 }
2787
2788 /* Undo setting dump_file to the earlydebug dump file. */
2789
2790 void
2791 debuginfo_early_stop (void)
2792 {
2793 set_dump_file (NULL);
2794 }
2795
2796 /* Analyze the whole compilation unit once it is parsed completely. */
2797
2798 void
2799 symbol_table::finalize_compilation_unit (void)
2800 {
2801 timevar_push (TV_CGRAPH);
2802
2803 /* If we're here there's no current function anymore. Some frontends
2804 are lazy in clearing these. */
2805 current_function_decl = NULL;
2806 set_cfun (NULL);
2807
2808 /* Do not skip analyzing the functions if there were errors, we
2809 miss diagnostics for following functions otherwise. */
2810
2811 /* Emit size functions we didn't inline. */
2812 finalize_size_functions ();
2813
2814 /* Mark alias targets necessary and emit diagnostics. */
2815 handle_alias_pairs ();
2816
2817 if (!quiet_flag)
2818 {
2819 fprintf (stderr, "\nAnalyzing compilation unit\n");
2820 fflush (stderr);
2821 }
2822
2823 if (flag_dump_passes)
2824 dump_passes ();
2825
2826 /* Gimplify and lower all functions, compute reachability and
2827 remove unreachable nodes. */
2828 analyze_functions (/*first_time=*/true);
2829
2830 /* Mark alias targets necessary and emit diagnostics. */
2831 handle_alias_pairs ();
2832
2833 /* Gimplify and lower thunks. */
2834 analyze_functions (/*first_time=*/false);
2835
2836 /* Offloading requires LTO infrastructure. */
2837 if (!in_lto_p && g->have_offload)
2838 flag_generate_offload = 1;
2839
2840 if (!seen_error ())
2841 {
2842 /* Emit early debug for reachable functions, and by consequence,
2843 locally scoped symbols. */
2844 struct cgraph_node *cnode;
2845 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (cnode)
2846 (*debug_hooks->early_global_decl) (cnode->decl);
2847
2848 /* Clean up anything that needs cleaning up after initial debug
2849 generation. */
2850 debuginfo_early_start ();
2851 (*debug_hooks->early_finish) (main_input_filename);
2852 debuginfo_early_stop ();
2853 }
2854
2855 /* Finally drive the pass manager. */
2856 compile ();
2857
2858 timevar_pop (TV_CGRAPH);
2859 }
2860
2861 /* Reset all state within cgraphunit.c so that we can rerun the compiler
2862 within the same process. For use by toplev::finalize. */
2863
2864 void
2865 cgraphunit_c_finalize (void)
2866 {
2867 gcc_assert (cgraph_new_nodes.length () == 0);
2868 cgraph_new_nodes.truncate (0);
2869
2870 vtable_entry_type = NULL;
2871 queued_nodes = &symtab_terminator;
2872
2873 first_analyzed = NULL;
2874 first_analyzed_var = NULL;
2875 }
2876
2877 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2878 kind of wrapper method. */
2879
2880 void
2881 cgraph_node::create_wrapper (cgraph_node *target)
2882 {
2883 /* Preserve DECL_RESULT so we get right by reference flag. */
2884 tree decl_result = DECL_RESULT (decl);
2885
2886 /* Remove the function's body but keep arguments to be reused
2887 for thunk. */
2888 release_body (true);
2889 reset ();
2890
2891 DECL_UNINLINABLE (decl) = false;
2892 DECL_RESULT (decl) = decl_result;
2893 DECL_INITIAL (decl) = NULL;
2894 allocate_struct_function (decl, false);
2895 set_cfun (NULL);
2896
2897 /* Turn alias into thunk and expand it into GIMPLE representation. */
2898 definition = true;
2899
2900 memset (&thunk, 0, sizeof (cgraph_thunk_info));
2901 thunk.thunk_p = true;
2902 create_edge (target, NULL, count);
2903 callees->can_throw_external = !TREE_NOTHROW (target->decl);
2904
2905 tree arguments = DECL_ARGUMENTS (decl);
2906
2907 while (arguments)
2908 {
2909 TREE_ADDRESSABLE (arguments) = false;
2910 arguments = TREE_CHAIN (arguments);
2911 }
2912
2913 expand_thunk (false, true);
2914
2915 /* Inline summary set-up. */
2916 analyze ();
2917 inline_analyze_function (this);
2918 }
2919
2920 #include "gt-cgraphunit.h"