]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraphunit.c
cgraph.h (symtab_node): Add symver flag.
[thirdparty/gcc.git] / gcc / cgraphunit.c
1 /* Driver of optimization process
2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimizations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transactional memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO streaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multiple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functions (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158 */
159
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "backend.h"
164 #include "target.h"
165 #include "rtl.h"
166 #include "tree.h"
167 #include "gimple.h"
168 #include "cfghooks.h"
169 #include "regset.h" /* FIXME: For reg_obstack. */
170 #include "alloc-pool.h"
171 #include "tree-pass.h"
172 #include "stringpool.h"
173 #include "gimple-ssa.h"
174 #include "cgraph.h"
175 #include "coverage.h"
176 #include "lto-streamer.h"
177 #include "fold-const.h"
178 #include "varasm.h"
179 #include "stor-layout.h"
180 #include "output.h"
181 #include "cfgcleanup.h"
182 #include "gimple-fold.h"
183 #include "gimplify.h"
184 #include "gimple-iterator.h"
185 #include "gimplify-me.h"
186 #include "tree-cfg.h"
187 #include "tree-into-ssa.h"
188 #include "tree-ssa.h"
189 #include "langhooks.h"
190 #include "toplev.h"
191 #include "debug.h"
192 #include "symbol-summary.h"
193 #include "tree-vrp.h"
194 #include "ipa-prop.h"
195 #include "gimple-pretty-print.h"
196 #include "plugin.h"
197 #include "ipa-fnsummary.h"
198 #include "ipa-utils.h"
199 #include "except.h"
200 #include "cfgloop.h"
201 #include "context.h"
202 #include "pass_manager.h"
203 #include "tree-nested.h"
204 #include "dbgcnt.h"
205 #include "lto-section-names.h"
206 #include "stringpool.h"
207 #include "attribs.h"
208
209 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
210 secondary queue used during optimization to accommodate passes that
211 may generate new functions that need to be optimized and expanded. */
212 vec<cgraph_node *> cgraph_new_nodes;
213
214 static void expand_all_functions (void);
215 static void mark_functions_to_output (void);
216 static void handle_alias_pairs (void);
217
218 /* Used for vtable lookup in thunk adjusting. */
219 static GTY (()) tree vtable_entry_type;
220
221 /* Return true if this symbol is a function from the C frontend specified
222 directly in RTL form (with "__RTL"). */
223
224 bool
225 symtab_node::native_rtl_p () const
226 {
227 if (TREE_CODE (decl) != FUNCTION_DECL)
228 return false;
229 if (!DECL_STRUCT_FUNCTION (decl))
230 return false;
231 return DECL_STRUCT_FUNCTION (decl)->curr_properties & PROP_rtl;
232 }
233
234 /* Determine if symbol declaration is needed. That is, visible to something
235 either outside this translation unit, something magic in the system
236 configury */
237 bool
238 symtab_node::needed_p (void)
239 {
240 /* Double check that no one output the function into assembly file
241 early. */
242 if (!native_rtl_p ())
243 gcc_checking_assert
244 (!DECL_ASSEMBLER_NAME_SET_P (decl)
245 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
246
247 if (!definition)
248 return false;
249
250 if (DECL_EXTERNAL (decl))
251 return false;
252
253 /* If the user told us it is used, then it must be so. */
254 if (force_output)
255 return true;
256
257 /* ABI forced symbols are needed when they are external. */
258 if (forced_by_abi && TREE_PUBLIC (decl))
259 return true;
260
261 /* Keep constructors, destructors and virtual functions. */
262 if (TREE_CODE (decl) == FUNCTION_DECL
263 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
264 return true;
265
266 /* Externally visible variables must be output. The exception is
267 COMDAT variables that must be output only when they are needed. */
268 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
269 return true;
270
271 return false;
272 }
273
274 /* Head and terminator of the queue of nodes to be processed while building
275 callgraph. */
276
277 static symtab_node symtab_terminator;
278 static symtab_node *queued_nodes = &symtab_terminator;
279
280 /* Add NODE to queue starting at QUEUED_NODES.
281 The queue is linked via AUX pointers and terminated by pointer to 1. */
282
283 static void
284 enqueue_node (symtab_node *node)
285 {
286 if (node->aux)
287 return;
288 gcc_checking_assert (queued_nodes);
289 node->aux = queued_nodes;
290 queued_nodes = node;
291 }
292
293 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
294 functions into callgraph in a way so they look like ordinary reachable
295 functions inserted into callgraph already at construction time. */
296
297 void
298 symbol_table::process_new_functions (void)
299 {
300 tree fndecl;
301
302 if (!cgraph_new_nodes.exists ())
303 return;
304
305 handle_alias_pairs ();
306 /* Note that this queue may grow as its being processed, as the new
307 functions may generate new ones. */
308 for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
309 {
310 cgraph_node *node = cgraph_new_nodes[i];
311 fndecl = node->decl;
312 switch (state)
313 {
314 case CONSTRUCTION:
315 /* At construction time we just need to finalize function and move
316 it into reachable functions list. */
317
318 cgraph_node::finalize_function (fndecl, false);
319 call_cgraph_insertion_hooks (node);
320 enqueue_node (node);
321 break;
322
323 case IPA:
324 case IPA_SSA:
325 case IPA_SSA_AFTER_INLINING:
326 /* When IPA optimization already started, do all essential
327 transformations that has been already performed on the whole
328 cgraph but not on this function. */
329
330 gimple_register_cfg_hooks ();
331 if (!node->analyzed)
332 node->analyze ();
333 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
334 if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
335 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
336 {
337 bool summaried_computed = ipa_fn_summaries != NULL;
338 g->get_passes ()->execute_early_local_passes ();
339 /* Early passes compute inline parameters to do inlining
340 and splitting. This is redundant for functions added late.
341 Just throw away whatever it did. */
342 if (!summaried_computed)
343 {
344 ipa_free_fn_summary ();
345 ipa_free_size_summary ();
346 }
347 }
348 else if (ipa_fn_summaries != NULL)
349 compute_fn_summary (node, true);
350 free_dominance_info (CDI_POST_DOMINATORS);
351 free_dominance_info (CDI_DOMINATORS);
352 pop_cfun ();
353 call_cgraph_insertion_hooks (node);
354 break;
355
356 case EXPANSION:
357 /* Functions created during expansion shall be compiled
358 directly. */
359 node->process = 0;
360 call_cgraph_insertion_hooks (node);
361 node->expand ();
362 break;
363
364 default:
365 gcc_unreachable ();
366 break;
367 }
368 }
369
370 cgraph_new_nodes.release ();
371 }
372
373 /* As an GCC extension we allow redefinition of the function. The
374 semantics when both copies of bodies differ is not well defined.
375 We replace the old body with new body so in unit at a time mode
376 we always use new body, while in normal mode we may end up with
377 old body inlined into some functions and new body expanded and
378 inlined in others.
379
380 ??? It may make more sense to use one body for inlining and other
381 body for expanding the function but this is difficult to do. */
382
383 void
384 cgraph_node::reset (void)
385 {
386 /* If process is set, then we have already begun whole-unit analysis.
387 This is *not* testing for whether we've already emitted the function.
388 That case can be sort-of legitimately seen with real function redefinition
389 errors. I would argue that the front end should never present us with
390 such a case, but don't enforce that for now. */
391 gcc_assert (!process);
392
393 /* Reset our data structures so we can analyze the function again. */
394 inlined_to = NULL;
395 memset (&rtl, 0, sizeof (rtl));
396 analyzed = false;
397 definition = false;
398 alias = false;
399 transparent_alias = false;
400 weakref = false;
401 cpp_implicit_alias = false;
402
403 remove_callees ();
404 remove_all_references ();
405 }
406
407 /* Return true when there are references to the node. INCLUDE_SELF is
408 true if a self reference counts as a reference. */
409
410 bool
411 symtab_node::referred_to_p (bool include_self)
412 {
413 ipa_ref *ref = NULL;
414
415 /* See if there are any references at all. */
416 if (iterate_referring (0, ref))
417 return true;
418 /* For functions check also calls. */
419 cgraph_node *cn = dyn_cast <cgraph_node *> (this);
420 if (cn && cn->callers)
421 {
422 if (include_self)
423 return true;
424 for (cgraph_edge *e = cn->callers; e; e = e->next_caller)
425 if (e->caller != this)
426 return true;
427 }
428 return false;
429 }
430
431 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
432 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
433 the garbage collector run at the moment. We would need to either create
434 a new GC context, or just not compile right now. */
435
436 void
437 cgraph_node::finalize_function (tree decl, bool no_collect)
438 {
439 cgraph_node *node = cgraph_node::get_create (decl);
440
441 if (node->definition)
442 {
443 /* Nested functions should only be defined once. */
444 gcc_assert (!DECL_CONTEXT (decl)
445 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
446 node->reset ();
447 node->redefined_extern_inline = true;
448 }
449
450 /* Set definition first before calling notice_global_symbol so that
451 it is available to notice_global_symbol. */
452 node->definition = true;
453 notice_global_symbol (decl);
454 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
455 if (!flag_toplevel_reorder)
456 node->no_reorder = true;
457
458 /* With -fkeep-inline-functions we are keeping all inline functions except
459 for extern inline ones. */
460 if (flag_keep_inline_functions
461 && DECL_DECLARED_INLINE_P (decl)
462 && !DECL_EXTERNAL (decl)
463 && !DECL_DISREGARD_INLINE_LIMITS (decl))
464 node->force_output = 1;
465
466 /* __RTL functions were already output as soon as they were parsed (due
467 to the large amount of global state in the backend).
468 Mark such functions as "force_output" to reflect the fact that they
469 will be in the asm file when considering the symbols they reference.
470 The attempt to output them later on will bail out immediately. */
471 if (node->native_rtl_p ())
472 node->force_output = 1;
473
474 /* When not optimizing, also output the static functions. (see
475 PR24561), but don't do so for always_inline functions, functions
476 declared inline and nested functions. These were optimized out
477 in the original implementation and it is unclear whether we want
478 to change the behavior here. */
479 if (((!opt_for_fn (decl, optimize) || flag_keep_static_functions
480 || node->no_reorder)
481 && !node->cpp_implicit_alias
482 && !DECL_DISREGARD_INLINE_LIMITS (decl)
483 && !DECL_DECLARED_INLINE_P (decl)
484 && !(DECL_CONTEXT (decl)
485 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
486 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
487 node->force_output = 1;
488
489 /* If we've not yet emitted decl, tell the debug info about it. */
490 if (!TREE_ASM_WRITTEN (decl))
491 (*debug_hooks->deferred_inline_function) (decl);
492
493 if (!no_collect)
494 ggc_collect ();
495
496 if (symtab->state == CONSTRUCTION
497 && (node->needed_p () || node->referred_to_p ()))
498 enqueue_node (node);
499 }
500
501 /* Add the function FNDECL to the call graph.
502 Unlike finalize_function, this function is intended to be used
503 by middle end and allows insertion of new function at arbitrary point
504 of compilation. The function can be either in high, low or SSA form
505 GIMPLE.
506
507 The function is assumed to be reachable and have address taken (so no
508 API breaking optimizations are performed on it).
509
510 Main work done by this function is to enqueue the function for later
511 processing to avoid need the passes to be re-entrant. */
512
513 void
514 cgraph_node::add_new_function (tree fndecl, bool lowered)
515 {
516 gcc::pass_manager *passes = g->get_passes ();
517 cgraph_node *node;
518
519 if (dump_file)
520 {
521 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
522 const char *function_type = ((gimple_has_body_p (fndecl))
523 ? (lowered
524 ? (gimple_in_ssa_p (fn)
525 ? "ssa gimple"
526 : "low gimple")
527 : "high gimple")
528 : "to-be-gimplified");
529 fprintf (dump_file,
530 "Added new %s function %s to callgraph\n",
531 function_type,
532 fndecl_name (fndecl));
533 }
534
535 switch (symtab->state)
536 {
537 case PARSING:
538 cgraph_node::finalize_function (fndecl, false);
539 break;
540 case CONSTRUCTION:
541 /* Just enqueue function to be processed at nearest occurrence. */
542 node = cgraph_node::get_create (fndecl);
543 if (lowered)
544 node->lowered = true;
545 cgraph_new_nodes.safe_push (node);
546 break;
547
548 case IPA:
549 case IPA_SSA:
550 case IPA_SSA_AFTER_INLINING:
551 case EXPANSION:
552 /* Bring the function into finalized state and enqueue for later
553 analyzing and compilation. */
554 node = cgraph_node::get_create (fndecl);
555 node->local = false;
556 node->definition = true;
557 node->force_output = true;
558 if (TREE_PUBLIC (fndecl))
559 node->externally_visible = true;
560 if (!lowered && symtab->state == EXPANSION)
561 {
562 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
563 gimple_register_cfg_hooks ();
564 bitmap_obstack_initialize (NULL);
565 execute_pass_list (cfun, passes->all_lowering_passes);
566 passes->execute_early_local_passes ();
567 bitmap_obstack_release (NULL);
568 pop_cfun ();
569
570 lowered = true;
571 }
572 if (lowered)
573 node->lowered = true;
574 cgraph_new_nodes.safe_push (node);
575 break;
576
577 case FINISHED:
578 /* At the very end of compilation we have to do all the work up
579 to expansion. */
580 node = cgraph_node::create (fndecl);
581 if (lowered)
582 node->lowered = true;
583 node->definition = true;
584 node->analyze ();
585 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
586 gimple_register_cfg_hooks ();
587 bitmap_obstack_initialize (NULL);
588 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
589 g->get_passes ()->execute_early_local_passes ();
590 bitmap_obstack_release (NULL);
591 pop_cfun ();
592 node->expand ();
593 break;
594
595 default:
596 gcc_unreachable ();
597 }
598
599 /* Set a personality if required and we already passed EH lowering. */
600 if (lowered
601 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
602 == eh_personality_lang))
603 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
604 }
605
606 /* Analyze the function scheduled to be output. */
607 void
608 cgraph_node::analyze (void)
609 {
610 if (native_rtl_p ())
611 {
612 analyzed = true;
613 return;
614 }
615
616 tree decl = this->decl;
617 location_t saved_loc = input_location;
618 input_location = DECL_SOURCE_LOCATION (decl);
619
620 if (thunk.thunk_p)
621 {
622 cgraph_node *t = cgraph_node::get (thunk.alias);
623
624 create_edge (t, NULL, t->count);
625 callees->can_throw_external = !TREE_NOTHROW (t->decl);
626 /* Target code in expand_thunk may need the thunk's target
627 to be analyzed, so recurse here. */
628 if (!t->analyzed && t->definition)
629 t->analyze ();
630 if (t->alias)
631 {
632 t = t->get_alias_target ();
633 if (!t->analyzed && t->definition)
634 t->analyze ();
635 }
636 bool ret = expand_thunk (false, false);
637 thunk.alias = NULL;
638 if (!ret)
639 return;
640 }
641 if (alias)
642 resolve_alias (cgraph_node::get (alias_target), transparent_alias);
643 else if (dispatcher_function)
644 {
645 /* Generate the dispatcher body of multi-versioned functions. */
646 cgraph_function_version_info *dispatcher_version_info
647 = function_version ();
648 if (dispatcher_version_info != NULL
649 && (dispatcher_version_info->dispatcher_resolver
650 == NULL_TREE))
651 {
652 tree resolver = NULL_TREE;
653 gcc_assert (targetm.generate_version_dispatcher_body);
654 resolver = targetm.generate_version_dispatcher_body (this);
655 gcc_assert (resolver != NULL_TREE);
656 }
657 }
658 else
659 {
660 push_cfun (DECL_STRUCT_FUNCTION (decl));
661
662 assign_assembler_name_if_needed (decl);
663
664 /* Make sure to gimplify bodies only once. During analyzing a
665 function we lower it, which will require gimplified nested
666 functions, so we can end up here with an already gimplified
667 body. */
668 if (!gimple_has_body_p (decl))
669 gimplify_function_tree (decl);
670
671 /* Lower the function. */
672 if (!lowered)
673 {
674 if (nested)
675 lower_nested_functions (decl);
676 gcc_assert (!nested);
677
678 gimple_register_cfg_hooks ();
679 bitmap_obstack_initialize (NULL);
680 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
681 free_dominance_info (CDI_POST_DOMINATORS);
682 free_dominance_info (CDI_DOMINATORS);
683 compact_blocks ();
684 bitmap_obstack_release (NULL);
685 lowered = true;
686 }
687
688 pop_cfun ();
689 }
690 analyzed = true;
691
692 input_location = saved_loc;
693 }
694
695 /* C++ frontend produce same body aliases all over the place, even before PCH
696 gets streamed out. It relies on us linking the aliases with their function
697 in order to do the fixups, but ipa-ref is not PCH safe. Consequently we
698 first produce aliases without links, but once C++ FE is sure he won't stream
699 PCH we build the links via this function. */
700
701 void
702 symbol_table::process_same_body_aliases (void)
703 {
704 symtab_node *node;
705 FOR_EACH_SYMBOL (node)
706 if (node->cpp_implicit_alias && !node->analyzed)
707 node->resolve_alias
708 (VAR_P (node->alias_target)
709 ? (symtab_node *)varpool_node::get_create (node->alias_target)
710 : (symtab_node *)cgraph_node::get_create (node->alias_target));
711 cpp_implicit_aliases_done = true;
712 }
713
714 /* Process a symver attribute. */
715
716 static void
717 process_symver_attribute (symtab_node *n)
718 {
719 tree value = lookup_attribute ("symver", DECL_ATTRIBUTES (n->decl));
720
721 if (!value)
722 return;
723 if (lookup_attribute ("symver", TREE_CHAIN (value)))
724 {
725 error_at (DECL_SOURCE_LOCATION (n->decl),
726 "multiple versions for one symbol");
727 return;
728 }
729 tree symver = get_identifier_with_length
730 (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (value))),
731 TREE_STRING_LENGTH (TREE_VALUE (TREE_VALUE (value))));
732 symtab_node *def = symtab_node::get_for_asmname (symver);
733
734 if (def)
735 {
736 error_at (DECL_SOURCE_LOCATION (n->decl),
737 "duplicate definition of a symbol version");
738 inform (DECL_SOURCE_LOCATION (def->decl),
739 "same version was previously defined here");
740 return;
741 }
742 if (!n->definition)
743 {
744 error_at (DECL_SOURCE_LOCATION (n->decl),
745 "symbol needs to be defined to have a version");
746 return;
747 }
748 if (DECL_COMMON (n->decl))
749 {
750 error_at (DECL_SOURCE_LOCATION (n->decl),
751 "common symbol cannot be versioned");
752 return;
753 }
754 if (DECL_COMDAT (n->decl))
755 {
756 error_at (DECL_SOURCE_LOCATION (n->decl),
757 "comdat symbol cannot be versioned");
758 return;
759 }
760 if (n->weakref)
761 {
762 error_at (DECL_SOURCE_LOCATION (n->decl),
763 "weakref cannot be versioned");
764 return;
765 }
766 if (!TREE_PUBLIC (n->decl))
767 {
768 error_at (DECL_SOURCE_LOCATION (n->decl),
769 "versioned symbol must be public");
770 return;
771 }
772 if (DECL_VISIBILITY (n->decl) != VISIBILITY_DEFAULT)
773 {
774 error_at (DECL_SOURCE_LOCATION (n->decl),
775 "versioned symbol must have default visibility");
776 return;
777 }
778
779 /* Create new symbol table entry representing the version. */
780 tree new_decl = copy_node (n->decl);
781
782 DECL_INITIAL (new_decl) = NULL_TREE;
783 if (TREE_CODE (new_decl) == FUNCTION_DECL)
784 DECL_STRUCT_FUNCTION (new_decl) = NULL;
785 SET_DECL_ASSEMBLER_NAME (new_decl, symver);
786 TREE_PUBLIC (new_decl) = 1;
787 DECL_ATTRIBUTES (new_decl) = NULL;
788
789 symtab_node *symver_node = symtab_node::get_create (new_decl);
790 symver_node->alias = true;
791 symver_node->definition = true;
792 symver_node->symver = true;
793 symver_node->create_reference (n, IPA_REF_ALIAS, NULL);
794 symver_node->analyzed = true;
795 }
796
797 /* Process attributes common for vars and functions. */
798
799 static void
800 process_common_attributes (symtab_node *node, tree decl)
801 {
802 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
803
804 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
805 {
806 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
807 "%<weakref%> attribute should be accompanied with"
808 " an %<alias%> attribute");
809 DECL_WEAK (decl) = 0;
810 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
811 DECL_ATTRIBUTES (decl));
812 }
813
814 if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
815 node->no_reorder = 1;
816 process_symver_attribute (node);
817 }
818
819 /* Look for externally_visible and used attributes and mark cgraph nodes
820 accordingly.
821
822 We cannot mark the nodes at the point the attributes are processed (in
823 handle_*_attribute) because the copy of the declarations available at that
824 point may not be canonical. For example, in:
825
826 void f();
827 void f() __attribute__((used));
828
829 the declaration we see in handle_used_attribute will be the second
830 declaration -- but the front end will subsequently merge that declaration
831 with the original declaration and discard the second declaration.
832
833 Furthermore, we can't mark these nodes in finalize_function because:
834
835 void f() {}
836 void f() __attribute__((externally_visible));
837
838 is valid.
839
840 So, we walk the nodes at the end of the translation unit, applying the
841 attributes at that point. */
842
843 static void
844 process_function_and_variable_attributes (cgraph_node *first,
845 varpool_node *first_var)
846 {
847 cgraph_node *node;
848 varpool_node *vnode;
849
850 for (node = symtab->first_function (); node != first;
851 node = symtab->next_function (node))
852 {
853 tree decl = node->decl;
854 if (DECL_PRESERVE_P (decl))
855 node->mark_force_output ();
856 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
857 {
858 if (! TREE_PUBLIC (node->decl))
859 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
860 "%<externally_visible%>"
861 " attribute have effect only on public objects");
862 }
863 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
864 && (node->definition && !node->alias))
865 {
866 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
867 "%<weakref%> attribute ignored"
868 " because function is defined");
869 DECL_WEAK (decl) = 0;
870 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
871 DECL_ATTRIBUTES (decl));
872 }
873 else if (lookup_attribute ("alias", DECL_ATTRIBUTES (decl))
874 && node->definition
875 && !node->alias)
876 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
877 "%<alias%> attribute ignored"
878 " because function is defined");
879
880 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
881 && !DECL_DECLARED_INLINE_P (decl)
882 /* redefining extern inline function makes it DECL_UNINLINABLE. */
883 && !DECL_UNINLINABLE (decl))
884 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
885 "%<always_inline%> function might not be inlinable");
886
887 process_common_attributes (node, decl);
888 }
889 for (vnode = symtab->first_variable (); vnode != first_var;
890 vnode = symtab->next_variable (vnode))
891 {
892 tree decl = vnode->decl;
893 if (DECL_EXTERNAL (decl)
894 && DECL_INITIAL (decl))
895 varpool_node::finalize_decl (decl);
896 if (DECL_PRESERVE_P (decl))
897 vnode->force_output = true;
898 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
899 {
900 if (! TREE_PUBLIC (vnode->decl))
901 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
902 "%<externally_visible%>"
903 " attribute have effect only on public objects");
904 }
905 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
906 && vnode->definition
907 && DECL_INITIAL (decl))
908 {
909 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
910 "%<weakref%> attribute ignored"
911 " because variable is initialized");
912 DECL_WEAK (decl) = 0;
913 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
914 DECL_ATTRIBUTES (decl));
915 }
916 process_common_attributes (vnode, decl);
917 }
918 }
919
920 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
921 middle end to output the variable to asm file, if needed or externally
922 visible. */
923
924 void
925 varpool_node::finalize_decl (tree decl)
926 {
927 varpool_node *node = varpool_node::get_create (decl);
928
929 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
930
931 if (node->definition)
932 return;
933 /* Set definition first before calling notice_global_symbol so that
934 it is available to notice_global_symbol. */
935 node->definition = true;
936 notice_global_symbol (decl);
937 if (!flag_toplevel_reorder)
938 node->no_reorder = true;
939 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
940 /* Traditionally we do not eliminate static variables when not
941 optimizing and when not doing toplevel reorder. */
942 || (node->no_reorder && !DECL_COMDAT (node->decl)
943 && !DECL_ARTIFICIAL (node->decl)))
944 node->force_output = true;
945
946 if (symtab->state == CONSTRUCTION
947 && (node->needed_p () || node->referred_to_p ()))
948 enqueue_node (node);
949 if (symtab->state >= IPA_SSA)
950 node->analyze ();
951 /* Some frontends produce various interface variables after compilation
952 finished. */
953 if (symtab->state == FINISHED
954 || (node->no_reorder
955 && symtab->state == EXPANSION))
956 node->assemble_decl ();
957 }
958
959 /* EDGE is an polymorphic call. Mark all possible targets as reachable
960 and if there is only one target, perform trivial devirtualization.
961 REACHABLE_CALL_TARGETS collects target lists we already walked to
962 avoid duplicate work. */
963
964 static void
965 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
966 cgraph_edge *edge)
967 {
968 unsigned int i;
969 void *cache_token;
970 bool final;
971 vec <cgraph_node *>targets
972 = possible_polymorphic_call_targets
973 (edge, &final, &cache_token);
974
975 if (!reachable_call_targets->add (cache_token))
976 {
977 if (symtab->dump_file)
978 dump_possible_polymorphic_call_targets
979 (symtab->dump_file, edge);
980
981 for (i = 0; i < targets.length (); i++)
982 {
983 /* Do not bother to mark virtual methods in anonymous namespace;
984 either we will find use of virtual table defining it, or it is
985 unused. */
986 if (targets[i]->definition
987 && TREE_CODE
988 (TREE_TYPE (targets[i]->decl))
989 == METHOD_TYPE
990 && !type_in_anonymous_namespace_p
991 (TYPE_METHOD_BASETYPE (TREE_TYPE (targets[i]->decl))))
992 enqueue_node (targets[i]);
993 }
994 }
995
996 /* Very trivial devirtualization; when the type is
997 final or anonymous (so we know all its derivation)
998 and there is only one possible virtual call target,
999 make the edge direct. */
1000 if (final)
1001 {
1002 if (targets.length () <= 1 && dbg_cnt (devirt))
1003 {
1004 cgraph_node *target;
1005 if (targets.length () == 1)
1006 target = targets[0];
1007 else
1008 target = cgraph_node::create
1009 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
1010
1011 if (symtab->dump_file)
1012 {
1013 fprintf (symtab->dump_file,
1014 "Devirtualizing call: ");
1015 print_gimple_stmt (symtab->dump_file,
1016 edge->call_stmt, 0,
1017 TDF_SLIM);
1018 }
1019 if (dump_enabled_p ())
1020 {
1021 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, edge->call_stmt,
1022 "devirtualizing call in %s to %s\n",
1023 edge->caller->name (), target->name ());
1024 }
1025
1026 edge->make_direct (target);
1027 edge->redirect_call_stmt_to_callee ();
1028
1029 if (symtab->dump_file)
1030 {
1031 fprintf (symtab->dump_file,
1032 "Devirtualized as: ");
1033 print_gimple_stmt (symtab->dump_file,
1034 edge->call_stmt, 0,
1035 TDF_SLIM);
1036 }
1037 }
1038 }
1039 }
1040
1041 /* Issue appropriate warnings for the global declaration DECL. */
1042
1043 static void
1044 check_global_declaration (symtab_node *snode)
1045 {
1046 const char *decl_file;
1047 tree decl = snode->decl;
1048
1049 /* Warn about any function declared static but not defined. We don't
1050 warn about variables, because many programs have static variables
1051 that exist only to get some text into the object file. */
1052 if (TREE_CODE (decl) == FUNCTION_DECL
1053 && DECL_INITIAL (decl) == 0
1054 && DECL_EXTERNAL (decl)
1055 && ! DECL_ARTIFICIAL (decl)
1056 && ! TREE_NO_WARNING (decl)
1057 && ! TREE_PUBLIC (decl)
1058 && (warn_unused_function
1059 || snode->referred_to_p (/*include_self=*/false)))
1060 {
1061 if (snode->referred_to_p (/*include_self=*/false))
1062 pedwarn (input_location, 0, "%q+F used but never defined", decl);
1063 else
1064 warning (OPT_Wunused_function, "%q+F declared %<static%> but never defined", decl);
1065 /* This symbol is effectively an "extern" declaration now. */
1066 TREE_PUBLIC (decl) = 1;
1067 }
1068
1069 /* Warn about static fns or vars defined but not used. */
1070 if (((warn_unused_function && TREE_CODE (decl) == FUNCTION_DECL)
1071 || (((warn_unused_variable && ! TREE_READONLY (decl))
1072 || (warn_unused_const_variable > 0 && TREE_READONLY (decl)
1073 && (warn_unused_const_variable == 2
1074 || (main_input_filename != NULL
1075 && (decl_file = DECL_SOURCE_FILE (decl)) != NULL
1076 && filename_cmp (main_input_filename,
1077 decl_file) == 0))))
1078 && VAR_P (decl)))
1079 && ! DECL_IN_SYSTEM_HEADER (decl)
1080 && ! snode->referred_to_p (/*include_self=*/false)
1081 /* This TREE_USED check is needed in addition to referred_to_p
1082 above, because the `__unused__' attribute is not being
1083 considered for referred_to_p. */
1084 && ! TREE_USED (decl)
1085 /* The TREE_USED bit for file-scope decls is kept in the identifier,
1086 to handle multiple external decls in different scopes. */
1087 && ! (DECL_NAME (decl) && TREE_USED (DECL_NAME (decl)))
1088 && ! DECL_EXTERNAL (decl)
1089 && ! DECL_ARTIFICIAL (decl)
1090 && ! DECL_ABSTRACT_ORIGIN (decl)
1091 && ! TREE_PUBLIC (decl)
1092 /* A volatile variable might be used in some non-obvious way. */
1093 && (! VAR_P (decl) || ! TREE_THIS_VOLATILE (decl))
1094 /* Global register variables must be declared to reserve them. */
1095 && ! (VAR_P (decl) && DECL_REGISTER (decl))
1096 /* Global ctors and dtors are called by the runtime. */
1097 && (TREE_CODE (decl) != FUNCTION_DECL
1098 || (!DECL_STATIC_CONSTRUCTOR (decl)
1099 && !DECL_STATIC_DESTRUCTOR (decl)))
1100 /* Otherwise, ask the language. */
1101 && lang_hooks.decls.warn_unused_global (decl))
1102 warning_at (DECL_SOURCE_LOCATION (decl),
1103 (TREE_CODE (decl) == FUNCTION_DECL)
1104 ? OPT_Wunused_function
1105 : (TREE_READONLY (decl)
1106 ? OPT_Wunused_const_variable_
1107 : OPT_Wunused_variable),
1108 "%qD defined but not used", decl);
1109 }
1110
1111 /* Discover all functions and variables that are trivially needed, analyze
1112 them as well as all functions and variables referred by them */
1113 static cgraph_node *first_analyzed;
1114 static varpool_node *first_analyzed_var;
1115
1116 /* FIRST_TIME is set to TRUE for the first time we are called for a
1117 translation unit from finalize_compilation_unit() or false
1118 otherwise. */
1119
1120 static void
1121 analyze_functions (bool first_time)
1122 {
1123 /* Keep track of already processed nodes when called multiple times for
1124 intermodule optimization. */
1125 cgraph_node *first_handled = first_analyzed;
1126 varpool_node *first_handled_var = first_analyzed_var;
1127 hash_set<void *> reachable_call_targets;
1128
1129 symtab_node *node;
1130 symtab_node *next;
1131 int i;
1132 ipa_ref *ref;
1133 bool changed = true;
1134 location_t saved_loc = input_location;
1135
1136 bitmap_obstack_initialize (NULL);
1137 symtab->state = CONSTRUCTION;
1138 input_location = UNKNOWN_LOCATION;
1139
1140 /* Ugly, but the fixup cannot happen at a time same body alias is created;
1141 C++ FE is confused about the COMDAT groups being right. */
1142 if (symtab->cpp_implicit_aliases_done)
1143 FOR_EACH_SYMBOL (node)
1144 if (node->cpp_implicit_alias)
1145 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
1146 build_type_inheritance_graph ();
1147
1148 /* Analysis adds static variables that in turn adds references to new functions.
1149 So we need to iterate the process until it stabilize. */
1150 while (changed)
1151 {
1152 changed = false;
1153 process_function_and_variable_attributes (first_analyzed,
1154 first_analyzed_var);
1155
1156 /* First identify the trivially needed symbols. */
1157 for (node = symtab->first_symbol ();
1158 node != first_analyzed
1159 && node != first_analyzed_var; node = node->next)
1160 {
1161 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
1162 node->get_comdat_group_id ();
1163 if (node->needed_p ())
1164 {
1165 enqueue_node (node);
1166 if (!changed && symtab->dump_file)
1167 fprintf (symtab->dump_file, "Trivially needed symbols:");
1168 changed = true;
1169 if (symtab->dump_file)
1170 fprintf (symtab->dump_file, " %s", node->asm_name ());
1171 if (!changed && symtab->dump_file)
1172 fprintf (symtab->dump_file, "\n");
1173 }
1174 if (node == first_analyzed
1175 || node == first_analyzed_var)
1176 break;
1177 }
1178 symtab->process_new_functions ();
1179 first_analyzed_var = symtab->first_variable ();
1180 first_analyzed = symtab->first_function ();
1181
1182 if (changed && symtab->dump_file)
1183 fprintf (symtab->dump_file, "\n");
1184
1185 /* Lower representation, build callgraph edges and references for all trivially
1186 needed symbols and all symbols referred by them. */
1187 while (queued_nodes != &symtab_terminator)
1188 {
1189 changed = true;
1190 node = queued_nodes;
1191 queued_nodes = (symtab_node *)queued_nodes->aux;
1192 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1193 if (cnode && cnode->definition)
1194 {
1195 cgraph_edge *edge;
1196 tree decl = cnode->decl;
1197
1198 /* ??? It is possible to create extern inline function
1199 and later using weak alias attribute to kill its body.
1200 See gcc.c-torture/compile/20011119-1.c */
1201 if (!DECL_STRUCT_FUNCTION (decl)
1202 && !cnode->alias
1203 && !cnode->thunk.thunk_p
1204 && !cnode->dispatcher_function)
1205 {
1206 cnode->reset ();
1207 cnode->redefined_extern_inline = true;
1208 continue;
1209 }
1210
1211 if (!cnode->analyzed)
1212 cnode->analyze ();
1213
1214 for (edge = cnode->callees; edge; edge = edge->next_callee)
1215 if (edge->callee->definition
1216 && (!DECL_EXTERNAL (edge->callee->decl)
1217 /* When not optimizing, do not try to analyze extern
1218 inline functions. Doing so is pointless. */
1219 || opt_for_fn (edge->callee->decl, optimize)
1220 /* Weakrefs needs to be preserved. */
1221 || edge->callee->alias
1222 /* always_inline functions are inlined even at -O0. */
1223 || lookup_attribute
1224 ("always_inline",
1225 DECL_ATTRIBUTES (edge->callee->decl))
1226 /* Multiversioned functions needs the dispatcher to
1227 be produced locally even for extern functions. */
1228 || edge->callee->function_version ()))
1229 enqueue_node (edge->callee);
1230 if (opt_for_fn (cnode->decl, optimize)
1231 && opt_for_fn (cnode->decl, flag_devirtualize))
1232 {
1233 cgraph_edge *next;
1234
1235 for (edge = cnode->indirect_calls; edge; edge = next)
1236 {
1237 next = edge->next_callee;
1238 if (edge->indirect_info->polymorphic)
1239 walk_polymorphic_call_targets (&reachable_call_targets,
1240 edge);
1241 }
1242 }
1243
1244 /* If decl is a clone of an abstract function,
1245 mark that abstract function so that we don't release its body.
1246 The DECL_INITIAL() of that abstract function declaration
1247 will be later needed to output debug info. */
1248 if (DECL_ABSTRACT_ORIGIN (decl))
1249 {
1250 cgraph_node *origin_node
1251 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1252 origin_node->used_as_abstract_origin = true;
1253 }
1254 /* Preserve a functions function context node. It will
1255 later be needed to output debug info. */
1256 if (tree fn = decl_function_context (decl))
1257 {
1258 cgraph_node *origin_node = cgraph_node::get_create (fn);
1259 enqueue_node (origin_node);
1260 }
1261 }
1262 else
1263 {
1264 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1265 if (vnode && vnode->definition && !vnode->analyzed)
1266 vnode->analyze ();
1267 }
1268
1269 if (node->same_comdat_group)
1270 {
1271 symtab_node *next;
1272 for (next = node->same_comdat_group;
1273 next != node;
1274 next = next->same_comdat_group)
1275 if (!next->comdat_local_p ())
1276 enqueue_node (next);
1277 }
1278 for (i = 0; node->iterate_reference (i, ref); i++)
1279 if (ref->referred->definition
1280 && (!DECL_EXTERNAL (ref->referred->decl)
1281 || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
1282 && optimize)
1283 || (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
1284 && opt_for_fn (ref->referred->decl, optimize))
1285 || node->alias
1286 || ref->referred->alias)))
1287 enqueue_node (ref->referred);
1288 symtab->process_new_functions ();
1289 }
1290 }
1291 update_type_inheritance_graph ();
1292
1293 /* Collect entry points to the unit. */
1294 if (symtab->dump_file)
1295 {
1296 fprintf (symtab->dump_file, "\n\nInitial ");
1297 symtab->dump (symtab->dump_file);
1298 }
1299
1300 if (first_time)
1301 {
1302 symtab_node *snode;
1303 FOR_EACH_SYMBOL (snode)
1304 check_global_declaration (snode);
1305 }
1306
1307 if (symtab->dump_file)
1308 fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1309
1310 for (node = symtab->first_symbol ();
1311 node != first_handled
1312 && node != first_handled_var; node = next)
1313 {
1314 next = node->next;
1315 /* For symbols declared locally we clear TREE_READONLY when emitting
1316 the constructor (if one is needed). For external declarations we can
1317 not safely assume that the type is readonly because we may be called
1318 during its construction. */
1319 if (TREE_CODE (node->decl) == VAR_DECL
1320 && TYPE_P (TREE_TYPE (node->decl))
1321 && TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (node->decl))
1322 && DECL_EXTERNAL (node->decl))
1323 TREE_READONLY (node->decl) = 0;
1324 if (!node->aux && !node->referred_to_p ())
1325 {
1326 if (symtab->dump_file)
1327 fprintf (symtab->dump_file, " %s", node->name ());
1328
1329 /* See if the debugger can use anything before the DECL
1330 passes away. Perhaps it can notice a DECL that is now a
1331 constant and can tag the early DIE with an appropriate
1332 attribute.
1333
1334 Otherwise, this is the last chance the debug_hooks have
1335 at looking at optimized away DECLs, since
1336 late_global_decl will subsequently be called from the
1337 contents of the now pruned symbol table. */
1338 if (VAR_P (node->decl)
1339 && !decl_function_context (node->decl))
1340 {
1341 /* We are reclaiming totally unreachable code and variables
1342 so they effectively appear as readonly. Show that to
1343 the debug machinery. */
1344 TREE_READONLY (node->decl) = 1;
1345 node->definition = false;
1346 (*debug_hooks->late_global_decl) (node->decl);
1347 }
1348
1349 node->remove ();
1350 continue;
1351 }
1352 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1353 {
1354 tree decl = node->decl;
1355
1356 if (cnode->definition && !gimple_has_body_p (decl)
1357 && !cnode->alias
1358 && !cnode->thunk.thunk_p)
1359 cnode->reset ();
1360
1361 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1362 || cnode->alias
1363 || gimple_has_body_p (decl)
1364 || cnode->native_rtl_p ());
1365 gcc_assert (cnode->analyzed == cnode->definition);
1366 }
1367 node->aux = NULL;
1368 }
1369 for (;node; node = node->next)
1370 node->aux = NULL;
1371 first_analyzed = symtab->first_function ();
1372 first_analyzed_var = symtab->first_variable ();
1373 if (symtab->dump_file)
1374 {
1375 fprintf (symtab->dump_file, "\n\nReclaimed ");
1376 symtab->dump (symtab->dump_file);
1377 }
1378 bitmap_obstack_release (NULL);
1379 ggc_collect ();
1380 /* Initialize assembler name hash, in particular we want to trigger C++
1381 mangling and same body alias creation before we free DECL_ARGUMENTS
1382 used by it. */
1383 if (!seen_error ())
1384 symtab->symtab_initialize_asm_name_hash ();
1385
1386 input_location = saved_loc;
1387 }
1388
1389 /* Check declaration of the type of ALIAS for compatibility with its TARGET
1390 (which may be an ifunc resolver) and issue a diagnostic when they are
1391 not compatible according to language rules (plus a C++ extension for
1392 non-static member functions). */
1393
1394 static void
1395 maybe_diag_incompatible_alias (tree alias, tree target)
1396 {
1397 tree altype = TREE_TYPE (alias);
1398 tree targtype = TREE_TYPE (target);
1399
1400 bool ifunc = cgraph_node::get (alias)->ifunc_resolver;
1401 tree funcptr = altype;
1402
1403 if (ifunc)
1404 {
1405 /* Handle attribute ifunc first. */
1406 if (TREE_CODE (altype) == METHOD_TYPE)
1407 {
1408 /* Set FUNCPTR to the type of the alias target. If the type
1409 is a non-static member function of class C, construct a type
1410 of an ordinary function taking C* as the first argument,
1411 followed by the member function argument list, and use it
1412 instead to check for incompatibility. This conversion is
1413 not defined by the language but an extension provided by
1414 G++. */
1415
1416 tree rettype = TREE_TYPE (altype);
1417 tree args = TYPE_ARG_TYPES (altype);
1418 altype = build_function_type (rettype, args);
1419 funcptr = altype;
1420 }
1421
1422 targtype = TREE_TYPE (targtype);
1423
1424 if (POINTER_TYPE_P (targtype))
1425 {
1426 targtype = TREE_TYPE (targtype);
1427
1428 /* Only issue Wattribute-alias for conversions to void* with
1429 -Wextra. */
1430 if (VOID_TYPE_P (targtype) && !extra_warnings)
1431 return;
1432
1433 /* Proceed to handle incompatible ifunc resolvers below. */
1434 }
1435 else
1436 {
1437 funcptr = build_pointer_type (funcptr);
1438
1439 error_at (DECL_SOURCE_LOCATION (target),
1440 "%<ifunc%> resolver for %qD must return %qT",
1441 alias, funcptr);
1442 inform (DECL_SOURCE_LOCATION (alias),
1443 "resolver indirect function declared here");
1444 return;
1445 }
1446 }
1447
1448 if ((!FUNC_OR_METHOD_TYPE_P (targtype)
1449 || (prototype_p (altype)
1450 && prototype_p (targtype)
1451 && !types_compatible_p (altype, targtype))))
1452 {
1453 /* Warn for incompatibilities. Avoid warning for functions
1454 without a prototype to make it possible to declare aliases
1455 without knowing the exact type, as libstdc++ does. */
1456 if (ifunc)
1457 {
1458 funcptr = build_pointer_type (funcptr);
1459
1460 auto_diagnostic_group d;
1461 if (warning_at (DECL_SOURCE_LOCATION (target),
1462 OPT_Wattribute_alias_,
1463 "%<ifunc%> resolver for %qD should return %qT",
1464 alias, funcptr))
1465 inform (DECL_SOURCE_LOCATION (alias),
1466 "resolver indirect function declared here");
1467 }
1468 else
1469 {
1470 auto_diagnostic_group d;
1471 if (warning_at (DECL_SOURCE_LOCATION (alias),
1472 OPT_Wattribute_alias_,
1473 "%qD alias between functions of incompatible "
1474 "types %qT and %qT", alias, altype, targtype))
1475 inform (DECL_SOURCE_LOCATION (target),
1476 "aliased declaration here");
1477 }
1478 }
1479 }
1480
1481 /* Translate the ugly representation of aliases as alias pairs into nice
1482 representation in callgraph. We don't handle all cases yet,
1483 unfortunately. */
1484
1485 static void
1486 handle_alias_pairs (void)
1487 {
1488 alias_pair *p;
1489 unsigned i;
1490
1491 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1492 {
1493 symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1494
1495 /* Weakrefs with target not defined in current unit are easy to handle:
1496 they behave just as external variables except we need to note the
1497 alias flag to later output the weakref pseudo op into asm file. */
1498 if (!target_node
1499 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1500 {
1501 symtab_node *node = symtab_node::get (p->decl);
1502 if (node)
1503 {
1504 node->alias_target = p->target;
1505 node->weakref = true;
1506 node->alias = true;
1507 node->transparent_alias = true;
1508 }
1509 alias_pairs->unordered_remove (i);
1510 continue;
1511 }
1512 else if (!target_node)
1513 {
1514 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1515 symtab_node *node = symtab_node::get (p->decl);
1516 if (node)
1517 node->alias = false;
1518 alias_pairs->unordered_remove (i);
1519 continue;
1520 }
1521
1522 if (DECL_EXTERNAL (target_node->decl)
1523 /* We use local aliases for C++ thunks to force the tailcall
1524 to bind locally. This is a hack - to keep it working do
1525 the following (which is not strictly correct). */
1526 && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1527 || ! DECL_VIRTUAL_P (target_node->decl))
1528 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1529 {
1530 error ("%q+D aliased to external symbol %qE",
1531 p->decl, p->target);
1532 }
1533
1534 if (TREE_CODE (p->decl) == FUNCTION_DECL
1535 && target_node && is_a <cgraph_node *> (target_node))
1536 {
1537 maybe_diag_incompatible_alias (p->decl, target_node->decl);
1538
1539 maybe_diag_alias_attributes (p->decl, target_node->decl);
1540
1541 cgraph_node *src_node = cgraph_node::get (p->decl);
1542 if (src_node && src_node->definition)
1543 src_node->reset ();
1544 cgraph_node::create_alias (p->decl, target_node->decl);
1545 alias_pairs->unordered_remove (i);
1546 }
1547 else if (VAR_P (p->decl)
1548 && target_node && is_a <varpool_node *> (target_node))
1549 {
1550 varpool_node::create_alias (p->decl, target_node->decl);
1551 alias_pairs->unordered_remove (i);
1552 }
1553 else
1554 {
1555 error ("%q+D alias between function and variable is not supported",
1556 p->decl);
1557 inform (DECL_SOURCE_LOCATION (target_node->decl),
1558 "aliased declaration here");
1559
1560 alias_pairs->unordered_remove (i);
1561 }
1562 }
1563 vec_free (alias_pairs);
1564 }
1565
1566
1567 /* Figure out what functions we want to assemble. */
1568
1569 static void
1570 mark_functions_to_output (void)
1571 {
1572 bool check_same_comdat_groups = false;
1573 cgraph_node *node;
1574
1575 if (flag_checking)
1576 FOR_EACH_FUNCTION (node)
1577 gcc_assert (!node->process);
1578
1579 FOR_EACH_FUNCTION (node)
1580 {
1581 tree decl = node->decl;
1582
1583 gcc_assert (!node->process || node->same_comdat_group);
1584 if (node->process)
1585 continue;
1586
1587 /* We need to output all local functions that are used and not
1588 always inlined, as well as those that are reachable from
1589 outside the current compilation unit. */
1590 if (node->analyzed
1591 && !node->thunk.thunk_p
1592 && !node->alias
1593 && !node->inlined_to
1594 && !TREE_ASM_WRITTEN (decl)
1595 && !DECL_EXTERNAL (decl))
1596 {
1597 node->process = 1;
1598 if (node->same_comdat_group)
1599 {
1600 cgraph_node *next;
1601 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1602 next != node;
1603 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1604 if (!next->thunk.thunk_p && !next->alias
1605 && !next->comdat_local_p ())
1606 next->process = 1;
1607 }
1608 }
1609 else if (node->same_comdat_group)
1610 {
1611 if (flag_checking)
1612 check_same_comdat_groups = true;
1613 }
1614 else
1615 {
1616 /* We should've reclaimed all functions that are not needed. */
1617 if (flag_checking
1618 && !node->inlined_to
1619 && gimple_has_body_p (decl)
1620 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1621 are inside partition, we can end up not removing the body since we no longer
1622 have analyzed node pointing to it. */
1623 && !node->in_other_partition
1624 && !node->alias
1625 && !node->clones
1626 && !DECL_EXTERNAL (decl))
1627 {
1628 node->debug ();
1629 internal_error ("failed to reclaim unneeded function");
1630 }
1631 gcc_assert (node->inlined_to
1632 || !gimple_has_body_p (decl)
1633 || node->in_other_partition
1634 || node->clones
1635 || DECL_ARTIFICIAL (decl)
1636 || DECL_EXTERNAL (decl));
1637
1638 }
1639
1640 }
1641 if (flag_checking && check_same_comdat_groups)
1642 FOR_EACH_FUNCTION (node)
1643 if (node->same_comdat_group && !node->process)
1644 {
1645 tree decl = node->decl;
1646 if (!node->inlined_to
1647 && gimple_has_body_p (decl)
1648 /* FIXME: in an ltrans unit when the offline copy is outside a
1649 partition but inline copies are inside a partition, we can
1650 end up not removing the body since we no longer have an
1651 analyzed node pointing to it. */
1652 && !node->in_other_partition
1653 && !node->clones
1654 && !DECL_EXTERNAL (decl))
1655 {
1656 node->debug ();
1657 internal_error ("failed to reclaim unneeded function in same "
1658 "comdat group");
1659 }
1660 }
1661 }
1662
1663 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1664 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1665
1666 Set current_function_decl and cfun to newly constructed empty function body.
1667 return basic block in the function body. */
1668
1669 basic_block
1670 init_lowered_empty_function (tree decl, bool in_ssa, profile_count count)
1671 {
1672 basic_block bb;
1673 edge e;
1674
1675 current_function_decl = decl;
1676 allocate_struct_function (decl, false);
1677 gimple_register_cfg_hooks ();
1678 init_empty_tree_cfg ();
1679 init_tree_ssa (cfun);
1680
1681 if (in_ssa)
1682 {
1683 init_ssa_operands (cfun);
1684 cfun->gimple_df->in_ssa_p = true;
1685 cfun->curr_properties |= PROP_ssa;
1686 }
1687
1688 DECL_INITIAL (decl) = make_node (BLOCK);
1689 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1690
1691 DECL_SAVED_TREE (decl) = error_mark_node;
1692 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1693 | PROP_cfg | PROP_loops);
1694
1695 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1696 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1697 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1698
1699 /* Create BB for body of the function and connect it properly. */
1700 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = count;
1701 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = count;
1702 bb = create_basic_block (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1703 bb->count = count;
1704 e = make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1705 e->probability = profile_probability::always ();
1706 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1707 e->probability = profile_probability::always ();
1708 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1709
1710 return bb;
1711 }
1712
1713 /* Adjust PTR by the constant FIXED_OFFSET, by the vtable offset indicated by
1714 VIRTUAL_OFFSET, and by the indirect offset indicated by INDIRECT_OFFSET, if
1715 it is non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and zero
1716 for a result adjusting thunk. */
1717
1718 tree
1719 thunk_adjust (gimple_stmt_iterator * bsi,
1720 tree ptr, bool this_adjusting,
1721 HOST_WIDE_INT fixed_offset, tree virtual_offset,
1722 HOST_WIDE_INT indirect_offset)
1723 {
1724 gassign *stmt;
1725 tree ret;
1726
1727 if (this_adjusting
1728 && fixed_offset != 0)
1729 {
1730 stmt = gimple_build_assign
1731 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1732 ptr,
1733 fixed_offset));
1734 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1735 }
1736
1737 if (!vtable_entry_type && (virtual_offset || indirect_offset != 0))
1738 {
1739 tree vfunc_type = make_node (FUNCTION_TYPE);
1740 TREE_TYPE (vfunc_type) = integer_type_node;
1741 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1742 layout_type (vfunc_type);
1743
1744 vtable_entry_type = build_pointer_type (vfunc_type);
1745 }
1746
1747 /* If there's a virtual offset, look up that value in the vtable and
1748 adjust the pointer again. */
1749 if (virtual_offset)
1750 {
1751 tree vtabletmp;
1752 tree vtabletmp2;
1753 tree vtabletmp3;
1754
1755 vtabletmp =
1756 create_tmp_reg (build_pointer_type
1757 (build_pointer_type (vtable_entry_type)), "vptr");
1758
1759 /* The vptr is always at offset zero in the object. */
1760 stmt = gimple_build_assign (vtabletmp,
1761 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1762 ptr));
1763 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1764
1765 /* Form the vtable address. */
1766 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1767 "vtableaddr");
1768 stmt = gimple_build_assign (vtabletmp2,
1769 build_simple_mem_ref (vtabletmp));
1770 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1771
1772 /* Find the entry with the vcall offset. */
1773 stmt = gimple_build_assign (vtabletmp2,
1774 fold_build_pointer_plus_loc (input_location,
1775 vtabletmp2,
1776 virtual_offset));
1777 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1778
1779 /* Get the offset itself. */
1780 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1781 "vcalloffset");
1782 stmt = gimple_build_assign (vtabletmp3,
1783 build_simple_mem_ref (vtabletmp2));
1784 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1785
1786 /* Adjust the `this' pointer. */
1787 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1788 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1789 GSI_CONTINUE_LINKING);
1790 }
1791
1792 /* Likewise for an offset that is stored in the object that contains the
1793 vtable. */
1794 if (indirect_offset != 0)
1795 {
1796 tree offset_ptr, offset_tree;
1797
1798 /* Get the address of the offset. */
1799 offset_ptr
1800 = create_tmp_reg (build_pointer_type
1801 (build_pointer_type (vtable_entry_type)),
1802 "offset_ptr");
1803 stmt = gimple_build_assign (offset_ptr,
1804 build1 (NOP_EXPR, TREE_TYPE (offset_ptr),
1805 ptr));
1806 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1807
1808 stmt = gimple_build_assign
1809 (offset_ptr,
1810 fold_build_pointer_plus_hwi_loc (input_location, offset_ptr,
1811 indirect_offset));
1812 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1813
1814 /* Get the offset itself. */
1815 offset_tree = create_tmp_reg (TREE_TYPE (TREE_TYPE (offset_ptr)),
1816 "offset");
1817 stmt = gimple_build_assign (offset_tree,
1818 build_simple_mem_ref (offset_ptr));
1819 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1820
1821 /* Adjust the `this' pointer. */
1822 ptr = fold_build_pointer_plus_loc (input_location, ptr, offset_tree);
1823 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1824 GSI_CONTINUE_LINKING);
1825 }
1826
1827 if (!this_adjusting
1828 && fixed_offset != 0)
1829 /* Adjust the pointer by the constant. */
1830 {
1831 tree ptrtmp;
1832
1833 if (VAR_P (ptr))
1834 ptrtmp = ptr;
1835 else
1836 {
1837 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1838 stmt = gimple_build_assign (ptrtmp, ptr);
1839 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1840 }
1841 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1842 ptrtmp, fixed_offset);
1843 }
1844
1845 /* Emit the statement and gimplify the adjustment expression. */
1846 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1847 stmt = gimple_build_assign (ret, ptr);
1848 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1849
1850 return ret;
1851 }
1852
1853 /* Expand thunk NODE to gimple if possible.
1854 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1855 no assembler is produced.
1856 When OUTPUT_ASM_THUNK is true, also produce assembler for
1857 thunks that are not lowered. */
1858
1859 bool
1860 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1861 {
1862 bool this_adjusting = thunk.this_adjusting;
1863 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1864 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1865 HOST_WIDE_INT indirect_offset = thunk.indirect_offset;
1866 tree virtual_offset = NULL;
1867 tree alias = callees->callee->decl;
1868 tree thunk_fndecl = decl;
1869 tree a;
1870
1871 if (!force_gimple_thunk
1872 && this_adjusting
1873 && indirect_offset == 0
1874 && !DECL_EXTERNAL (alias)
1875 && !DECL_STATIC_CHAIN (alias)
1876 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1877 virtual_value, alias))
1878 {
1879 tree fn_block;
1880 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1881
1882 if (!output_asm_thunks)
1883 {
1884 analyzed = true;
1885 return false;
1886 }
1887
1888 if (in_lto_p)
1889 get_untransformed_body ();
1890 a = DECL_ARGUMENTS (thunk_fndecl);
1891
1892 current_function_decl = thunk_fndecl;
1893
1894 /* Ensure thunks are emitted in their correct sections. */
1895 resolve_unique_section (thunk_fndecl, 0,
1896 flag_function_sections);
1897
1898 DECL_RESULT (thunk_fndecl)
1899 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1900 RESULT_DECL, 0, restype);
1901 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1902
1903 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1904 create one. */
1905 fn_block = make_node (BLOCK);
1906 BLOCK_VARS (fn_block) = a;
1907 DECL_INITIAL (thunk_fndecl) = fn_block;
1908 BLOCK_SUPERCONTEXT (fn_block) = thunk_fndecl;
1909 allocate_struct_function (thunk_fndecl, false);
1910 init_function_start (thunk_fndecl);
1911 cfun->is_thunk = 1;
1912 insn_locations_init ();
1913 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1914 prologue_location = curr_insn_location ();
1915
1916 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1917 fixed_offset, virtual_value, alias);
1918
1919 insn_locations_finalize ();
1920 init_insn_lengths ();
1921 free_after_compilation (cfun);
1922 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1923 thunk.thunk_p = false;
1924 analyzed = false;
1925 }
1926 else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
1927 {
1928 error ("generic thunk code fails for method %qD which uses %<...%>",
1929 thunk_fndecl);
1930 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1931 analyzed = true;
1932 return false;
1933 }
1934 else
1935 {
1936 tree restype;
1937 basic_block bb, then_bb, else_bb, return_bb;
1938 gimple_stmt_iterator bsi;
1939 int nargs = 0;
1940 tree arg;
1941 int i;
1942 tree resdecl;
1943 tree restmp = NULL;
1944
1945 gcall *call;
1946 greturn *ret;
1947 bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
1948
1949 /* We may be called from expand_thunk that releases body except for
1950 DECL_ARGUMENTS. In this case force_gimple_thunk is true. */
1951 if (in_lto_p && !force_gimple_thunk)
1952 get_untransformed_body ();
1953
1954 /* We need to force DECL_IGNORED_P when the thunk is created
1955 after early debug was run. */
1956 if (force_gimple_thunk)
1957 DECL_IGNORED_P (thunk_fndecl) = 1;
1958
1959 a = DECL_ARGUMENTS (thunk_fndecl);
1960
1961 current_function_decl = thunk_fndecl;
1962
1963 /* Ensure thunks are emitted in their correct sections. */
1964 resolve_unique_section (thunk_fndecl, 0,
1965 flag_function_sections);
1966
1967 bitmap_obstack_initialize (NULL);
1968
1969 if (thunk.virtual_offset_p)
1970 virtual_offset = size_int (virtual_value);
1971
1972 /* Build the return declaration for the function. */
1973 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1974 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1975 {
1976 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1977 DECL_ARTIFICIAL (resdecl) = 1;
1978 DECL_IGNORED_P (resdecl) = 1;
1979 DECL_CONTEXT (resdecl) = thunk_fndecl;
1980 DECL_RESULT (thunk_fndecl) = resdecl;
1981 }
1982 else
1983 resdecl = DECL_RESULT (thunk_fndecl);
1984
1985 profile_count cfg_count = count;
1986 if (!cfg_count.initialized_p ())
1987 cfg_count = profile_count::from_gcov_type (BB_FREQ_MAX).guessed_local ();
1988
1989 bb = then_bb = else_bb = return_bb
1990 = init_lowered_empty_function (thunk_fndecl, true, cfg_count);
1991
1992 bsi = gsi_start_bb (bb);
1993
1994 /* Build call to the function being thunked. */
1995 if (!VOID_TYPE_P (restype)
1996 && (!alias_is_noreturn
1997 || TREE_ADDRESSABLE (restype)
1998 || TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
1999 {
2000 if (DECL_BY_REFERENCE (resdecl))
2001 {
2002 restmp = gimple_fold_indirect_ref (resdecl);
2003 if (!restmp)
2004 restmp = build2 (MEM_REF,
2005 TREE_TYPE (TREE_TYPE (resdecl)),
2006 resdecl,
2007 build_int_cst (TREE_TYPE (resdecl), 0));
2008 }
2009 else if (!is_gimple_reg_type (restype))
2010 {
2011 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
2012 {
2013 restmp = resdecl;
2014
2015 if (VAR_P (restmp))
2016 {
2017 add_local_decl (cfun, restmp);
2018 BLOCK_VARS (DECL_INITIAL (current_function_decl))
2019 = restmp;
2020 }
2021 }
2022 else
2023 restmp = create_tmp_var (restype, "retval");
2024 }
2025 else
2026 restmp = create_tmp_reg (restype, "retval");
2027 }
2028
2029 for (arg = a; arg; arg = DECL_CHAIN (arg))
2030 nargs++;
2031 auto_vec<tree> vargs (nargs);
2032 i = 0;
2033 arg = a;
2034 if (this_adjusting)
2035 {
2036 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
2037 virtual_offset, indirect_offset));
2038 arg = DECL_CHAIN (a);
2039 i = 1;
2040 }
2041
2042 if (nargs)
2043 for (; i < nargs; i++, arg = DECL_CHAIN (arg))
2044 {
2045 tree tmp = arg;
2046 if (VECTOR_TYPE_P (TREE_TYPE (arg))
2047 || TREE_CODE (TREE_TYPE (arg)) == COMPLEX_TYPE)
2048 DECL_GIMPLE_REG_P (arg) = 1;
2049
2050 if (!is_gimple_val (arg))
2051 {
2052 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
2053 (TREE_TYPE (arg)), "arg");
2054 gimple *stmt = gimple_build_assign (tmp, arg);
2055 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2056 }
2057 vargs.quick_push (tmp);
2058 }
2059 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
2060 callees->call_stmt = call;
2061 gimple_call_set_from_thunk (call, true);
2062 if (DECL_STATIC_CHAIN (alias))
2063 {
2064 tree p = DECL_STRUCT_FUNCTION (alias)->static_chain_decl;
2065 tree type = TREE_TYPE (p);
2066 tree decl = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
2067 PARM_DECL, create_tmp_var_name ("CHAIN"),
2068 type);
2069 DECL_ARTIFICIAL (decl) = 1;
2070 DECL_IGNORED_P (decl) = 1;
2071 TREE_USED (decl) = 1;
2072 DECL_CONTEXT (decl) = thunk_fndecl;
2073 DECL_ARG_TYPE (decl) = type;
2074 TREE_READONLY (decl) = 1;
2075
2076 struct function *sf = DECL_STRUCT_FUNCTION (thunk_fndecl);
2077 sf->static_chain_decl = decl;
2078
2079 gimple_call_set_chain (call, decl);
2080 }
2081
2082 /* Return slot optimization is always possible and in fact required to
2083 return values with DECL_BY_REFERENCE. */
2084 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
2085 && (!is_gimple_reg_type (TREE_TYPE (resdecl))
2086 || DECL_BY_REFERENCE (resdecl)))
2087 gimple_call_set_return_slot_opt (call, true);
2088
2089 if (restmp)
2090 {
2091 gimple_call_set_lhs (call, restmp);
2092 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
2093 TREE_TYPE (TREE_TYPE (alias))));
2094 }
2095 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
2096 if (!alias_is_noreturn)
2097 {
2098 if (restmp && !this_adjusting
2099 && (fixed_offset || virtual_offset))
2100 {
2101 tree true_label = NULL_TREE;
2102
2103 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
2104 {
2105 gimple *stmt;
2106 edge e;
2107 /* If the return type is a pointer, we need to
2108 protect against NULL. We know there will be an
2109 adjustment, because that's why we're emitting a
2110 thunk. */
2111 then_bb = create_basic_block (NULL, bb);
2112 then_bb->count = cfg_count - cfg_count.apply_scale (1, 16);
2113 return_bb = create_basic_block (NULL, then_bb);
2114 return_bb->count = cfg_count;
2115 else_bb = create_basic_block (NULL, else_bb);
2116 else_bb->count = cfg_count.apply_scale (1, 16);
2117 add_bb_to_loop (then_bb, bb->loop_father);
2118 add_bb_to_loop (return_bb, bb->loop_father);
2119 add_bb_to_loop (else_bb, bb->loop_father);
2120 remove_edge (single_succ_edge (bb));
2121 true_label = gimple_block_label (then_bb);
2122 stmt = gimple_build_cond (NE_EXPR, restmp,
2123 build_zero_cst (TREE_TYPE (restmp)),
2124 NULL_TREE, NULL_TREE);
2125 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2126 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
2127 e->probability = profile_probability::guessed_always ()
2128 .apply_scale (1, 16);
2129 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
2130 e->probability = profile_probability::guessed_always ()
2131 .apply_scale (1, 16);
2132 make_single_succ_edge (return_bb,
2133 EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
2134 make_single_succ_edge (then_bb, return_bb, EDGE_FALLTHRU);
2135 e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
2136 e->probability = profile_probability::always ();
2137 bsi = gsi_last_bb (then_bb);
2138 }
2139
2140 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
2141 fixed_offset, virtual_offset,
2142 indirect_offset);
2143 if (true_label)
2144 {
2145 gimple *stmt;
2146 bsi = gsi_last_bb (else_bb);
2147 stmt = gimple_build_assign (restmp,
2148 build_zero_cst (TREE_TYPE (restmp)));
2149 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2150 bsi = gsi_last_bb (return_bb);
2151 }
2152 }
2153 else
2154 gimple_call_set_tail (call, true);
2155
2156 /* Build return value. */
2157 if (!DECL_BY_REFERENCE (resdecl))
2158 ret = gimple_build_return (restmp);
2159 else
2160 ret = gimple_build_return (resdecl);
2161
2162 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
2163 }
2164 else
2165 {
2166 gimple_call_set_tail (call, true);
2167 remove_edge (single_succ_edge (bb));
2168 }
2169
2170 cfun->gimple_df->in_ssa_p = true;
2171 update_max_bb_count ();
2172 profile_status_for_fn (cfun)
2173 = cfg_count.initialized_p () && cfg_count.ipa_p ()
2174 ? PROFILE_READ : PROFILE_GUESSED;
2175 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
2176 TREE_ASM_WRITTEN (thunk_fndecl) = false;
2177 delete_unreachable_blocks ();
2178 update_ssa (TODO_update_ssa);
2179 checking_verify_flow_info ();
2180 free_dominance_info (CDI_DOMINATORS);
2181
2182 /* Since we want to emit the thunk, we explicitly mark its name as
2183 referenced. */
2184 thunk.thunk_p = false;
2185 lowered = true;
2186 bitmap_obstack_release (NULL);
2187 }
2188 current_function_decl = NULL;
2189 set_cfun (NULL);
2190 return true;
2191 }
2192
2193 /* Assemble thunks and aliases associated to node. */
2194
2195 void
2196 cgraph_node::assemble_thunks_and_aliases (void)
2197 {
2198 cgraph_edge *e;
2199 ipa_ref *ref;
2200
2201 for (e = callers; e;)
2202 if (e->caller->thunk.thunk_p
2203 && !e->caller->inlined_to)
2204 {
2205 cgraph_node *thunk = e->caller;
2206
2207 e = e->next_caller;
2208 thunk->expand_thunk (true, false);
2209 thunk->assemble_thunks_and_aliases ();
2210 }
2211 else
2212 e = e->next_caller;
2213
2214 FOR_EACH_ALIAS (this, ref)
2215 {
2216 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2217 if (!alias->transparent_alias)
2218 {
2219 bool saved_written = TREE_ASM_WRITTEN (decl);
2220
2221 /* Force assemble_alias to really output the alias this time instead
2222 of buffering it in same alias pairs. */
2223 TREE_ASM_WRITTEN (decl) = 1;
2224 if (alias->symver)
2225 do_assemble_symver (alias->decl,
2226 DECL_ASSEMBLER_NAME (decl));
2227 else
2228 do_assemble_alias (alias->decl,
2229 DECL_ASSEMBLER_NAME (decl));
2230 alias->assemble_thunks_and_aliases ();
2231 TREE_ASM_WRITTEN (decl) = saved_written;
2232 }
2233 }
2234 }
2235
2236 /* Expand function specified by node. */
2237
2238 void
2239 cgraph_node::expand (void)
2240 {
2241 location_t saved_loc;
2242
2243 /* We ought to not compile any inline clones. */
2244 gcc_assert (!inlined_to);
2245
2246 /* __RTL functions are compiled as soon as they are parsed, so don't
2247 do it again. */
2248 if (native_rtl_p ())
2249 return;
2250
2251 announce_function (decl);
2252 process = 0;
2253 gcc_assert (lowered);
2254 get_untransformed_body ();
2255
2256 /* Generate RTL for the body of DECL. */
2257
2258 timevar_push (TV_REST_OF_COMPILATION);
2259
2260 gcc_assert (symtab->global_info_ready);
2261
2262 /* Initialize the default bitmap obstack. */
2263 bitmap_obstack_initialize (NULL);
2264
2265 /* Initialize the RTL code for the function. */
2266 saved_loc = input_location;
2267 input_location = DECL_SOURCE_LOCATION (decl);
2268
2269 gcc_assert (DECL_STRUCT_FUNCTION (decl));
2270 push_cfun (DECL_STRUCT_FUNCTION (decl));
2271 init_function_start (decl);
2272
2273 gimple_register_cfg_hooks ();
2274
2275 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
2276
2277 execute_all_ipa_transforms (false);
2278
2279 /* Perform all tree transforms and optimizations. */
2280
2281 /* Signal the start of passes. */
2282 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
2283
2284 execute_pass_list (cfun, g->get_passes ()->all_passes);
2285
2286 /* Signal the end of passes. */
2287 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
2288
2289 bitmap_obstack_release (&reg_obstack);
2290
2291 /* Release the default bitmap obstack. */
2292 bitmap_obstack_release (NULL);
2293
2294 /* If requested, warn about function definitions where the function will
2295 return a value (usually of some struct or union type) which itself will
2296 take up a lot of stack space. */
2297 if (!DECL_EXTERNAL (decl) && TREE_TYPE (decl))
2298 {
2299 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
2300
2301 if (ret_type && TYPE_SIZE_UNIT (ret_type)
2302 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
2303 && compare_tree_int (TYPE_SIZE_UNIT (ret_type),
2304 warn_larger_than_size) > 0)
2305 {
2306 unsigned int size_as_int
2307 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
2308
2309 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
2310 warning (OPT_Wlarger_than_,
2311 "size of return value of %q+D is %u bytes",
2312 decl, size_as_int);
2313 else
2314 warning (OPT_Wlarger_than_,
2315 "size of return value of %q+D is larger than %wu bytes",
2316 decl, warn_larger_than_size);
2317 }
2318 }
2319
2320 gimple_set_body (decl, NULL);
2321 if (DECL_STRUCT_FUNCTION (decl) == 0
2322 && !cgraph_node::get (decl)->origin)
2323 {
2324 /* Stop pointing to the local nodes about to be freed.
2325 But DECL_INITIAL must remain nonzero so we know this
2326 was an actual function definition.
2327 For a nested function, this is done in c_pop_function_context.
2328 If rest_of_compilation set this to 0, leave it 0. */
2329 if (DECL_INITIAL (decl) != 0)
2330 DECL_INITIAL (decl) = error_mark_node;
2331 }
2332
2333 input_location = saved_loc;
2334
2335 ggc_collect ();
2336 timevar_pop (TV_REST_OF_COMPILATION);
2337
2338 /* Make sure that BE didn't give up on compiling. */
2339 gcc_assert (TREE_ASM_WRITTEN (decl));
2340 if (cfun)
2341 pop_cfun ();
2342
2343 /* It would make a lot more sense to output thunks before function body to
2344 get more forward and fewer backward jumps. This however would need
2345 solving problem with comdats. See PR48668. Also aliases must come after
2346 function itself to make one pass assemblers, like one on AIX, happy.
2347 See PR 50689.
2348 FIXME: Perhaps thunks should be move before function IFF they are not in
2349 comdat groups. */
2350 assemble_thunks_and_aliases ();
2351 release_body ();
2352 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
2353 points to the dead function body. */
2354 remove_callees ();
2355 remove_all_references ();
2356 }
2357
2358 /* Node comparator that is responsible for the order that corresponds
2359 to time when a function was launched for the first time. */
2360
2361 static int
2362 node_cmp (const void *pa, const void *pb)
2363 {
2364 const cgraph_node *a = *(const cgraph_node * const *) pa;
2365 const cgraph_node *b = *(const cgraph_node * const *) pb;
2366
2367 /* Functions with time profile must be before these without profile. */
2368 if (!a->tp_first_run || !b->tp_first_run)
2369 return a->tp_first_run - b->tp_first_run;
2370
2371 return a->tp_first_run != b->tp_first_run
2372 ? b->tp_first_run - a->tp_first_run
2373 : b->order - a->order;
2374 }
2375
2376 /* Expand all functions that must be output.
2377
2378 Attempt to topologically sort the nodes so function is output when
2379 all called functions are already assembled to allow data to be
2380 propagated across the callgraph. Use a stack to get smaller distance
2381 between a function and its callees (later we may choose to use a more
2382 sophisticated algorithm for function reordering; we will likely want
2383 to use subsections to make the output functions appear in top-down
2384 order). */
2385
2386 static void
2387 expand_all_functions (void)
2388 {
2389 cgraph_node *node;
2390 cgraph_node **order = XCNEWVEC (cgraph_node *,
2391 symtab->cgraph_count);
2392 unsigned int expanded_func_count = 0, profiled_func_count = 0;
2393 int order_pos, new_order_pos = 0;
2394 int i;
2395
2396 order_pos = ipa_reverse_postorder (order);
2397 gcc_assert (order_pos == symtab->cgraph_count);
2398
2399 /* Garbage collector may remove inline clones we eliminate during
2400 optimization. So we must be sure to not reference them. */
2401 for (i = 0; i < order_pos; i++)
2402 if (order[i]->process)
2403 order[new_order_pos++] = order[i];
2404
2405 if (flag_profile_reorder_functions)
2406 qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
2407
2408 for (i = new_order_pos - 1; i >= 0; i--)
2409 {
2410 node = order[i];
2411
2412 if (node->process)
2413 {
2414 expanded_func_count++;
2415 if(node->tp_first_run)
2416 profiled_func_count++;
2417
2418 if (symtab->dump_file)
2419 fprintf (symtab->dump_file,
2420 "Time profile order in expand_all_functions:%s:%d\n",
2421 node->asm_name (), node->tp_first_run);
2422 node->process = 0;
2423 node->expand ();
2424 }
2425 }
2426
2427 if (dump_file)
2428 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
2429 main_input_filename, profiled_func_count, expanded_func_count);
2430
2431 if (symtab->dump_file && flag_profile_reorder_functions)
2432 fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
2433 profiled_func_count, expanded_func_count);
2434
2435 symtab->process_new_functions ();
2436 free_gimplify_stack ();
2437
2438 free (order);
2439 }
2440
2441 /* This is used to sort the node types by the cgraph order number. */
2442
2443 enum cgraph_order_sort_kind
2444 {
2445 ORDER_UNDEFINED = 0,
2446 ORDER_FUNCTION,
2447 ORDER_VAR,
2448 ORDER_VAR_UNDEF,
2449 ORDER_ASM
2450 };
2451
2452 struct cgraph_order_sort
2453 {
2454 enum cgraph_order_sort_kind kind;
2455 union
2456 {
2457 cgraph_node *f;
2458 varpool_node *v;
2459 asm_node *a;
2460 } u;
2461 };
2462
2463 /* Output all functions, variables, and asm statements in the order
2464 according to their order fields, which is the order in which they
2465 appeared in the file. This implements -fno-toplevel-reorder. In
2466 this mode we may output functions and variables which don't really
2467 need to be output. */
2468
2469 static void
2470 output_in_order (void)
2471 {
2472 int max;
2473 cgraph_order_sort *nodes;
2474 int i;
2475 cgraph_node *pf;
2476 varpool_node *pv;
2477 asm_node *pa;
2478 max = symtab->order;
2479 nodes = XCNEWVEC (cgraph_order_sort, max);
2480
2481 FOR_EACH_DEFINED_FUNCTION (pf)
2482 {
2483 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
2484 {
2485 if (!pf->no_reorder)
2486 continue;
2487 i = pf->order;
2488 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2489 nodes[i].kind = ORDER_FUNCTION;
2490 nodes[i].u.f = pf;
2491 }
2492 }
2493
2494 /* There is a similar loop in symbol_table::output_variables.
2495 Please keep them in sync. */
2496 FOR_EACH_VARIABLE (pv)
2497 {
2498 if (!pv->no_reorder)
2499 continue;
2500 if (DECL_HARD_REGISTER (pv->decl)
2501 || DECL_HAS_VALUE_EXPR_P (pv->decl))
2502 continue;
2503 i = pv->order;
2504 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2505 nodes[i].kind = pv->definition ? ORDER_VAR : ORDER_VAR_UNDEF;
2506 nodes[i].u.v = pv;
2507 }
2508
2509 for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
2510 {
2511 i = pa->order;
2512 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2513 nodes[i].kind = ORDER_ASM;
2514 nodes[i].u.a = pa;
2515 }
2516
2517 /* In toplevel reorder mode we output all statics; mark them as needed. */
2518
2519 for (i = 0; i < max; ++i)
2520 if (nodes[i].kind == ORDER_VAR)
2521 nodes[i].u.v->finalize_named_section_flags ();
2522
2523 for (i = 0; i < max; ++i)
2524 {
2525 switch (nodes[i].kind)
2526 {
2527 case ORDER_FUNCTION:
2528 nodes[i].u.f->process = 0;
2529 nodes[i].u.f->expand ();
2530 break;
2531
2532 case ORDER_VAR:
2533 nodes[i].u.v->assemble_decl ();
2534 break;
2535
2536 case ORDER_VAR_UNDEF:
2537 assemble_undefined_decl (nodes[i].u.v->decl);
2538 break;
2539
2540 case ORDER_ASM:
2541 assemble_asm (nodes[i].u.a->asm_str);
2542 break;
2543
2544 case ORDER_UNDEFINED:
2545 break;
2546
2547 default:
2548 gcc_unreachable ();
2549 }
2550 }
2551
2552 symtab->clear_asm_symbols ();
2553
2554 free (nodes);
2555 }
2556
2557 static void
2558 ipa_passes (void)
2559 {
2560 gcc::pass_manager *passes = g->get_passes ();
2561
2562 set_cfun (NULL);
2563 current_function_decl = NULL;
2564 gimple_register_cfg_hooks ();
2565 bitmap_obstack_initialize (NULL);
2566
2567 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2568
2569 if (!in_lto_p)
2570 {
2571 execute_ipa_pass_list (passes->all_small_ipa_passes);
2572 if (seen_error ())
2573 return;
2574 }
2575
2576 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2577 devirtualization and other changes where removal iterate. */
2578 symtab->remove_unreachable_nodes (symtab->dump_file);
2579
2580 /* If pass_all_early_optimizations was not scheduled, the state of
2581 the cgraph will not be properly updated. Update it now. */
2582 if (symtab->state < IPA_SSA)
2583 symtab->state = IPA_SSA;
2584
2585 if (!in_lto_p)
2586 {
2587 /* Generate coverage variables and constructors. */
2588 coverage_finish ();
2589
2590 /* Process new functions added. */
2591 set_cfun (NULL);
2592 current_function_decl = NULL;
2593 symtab->process_new_functions ();
2594
2595 execute_ipa_summary_passes
2596 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2597 }
2598
2599 /* Some targets need to handle LTO assembler output specially. */
2600 if (flag_generate_lto || flag_generate_offload)
2601 targetm.asm_out.lto_start ();
2602
2603 if (!in_lto_p
2604 || flag_incremental_link == INCREMENTAL_LINK_LTO)
2605 {
2606 if (!quiet_flag)
2607 fprintf (stderr, "Streaming LTO\n");
2608 if (g->have_offload)
2609 {
2610 section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2611 lto_stream_offload_p = true;
2612 ipa_write_summaries ();
2613 lto_stream_offload_p = false;
2614 }
2615 if (flag_lto)
2616 {
2617 section_name_prefix = LTO_SECTION_NAME_PREFIX;
2618 lto_stream_offload_p = false;
2619 ipa_write_summaries ();
2620 }
2621 }
2622
2623 if (flag_generate_lto || flag_generate_offload)
2624 targetm.asm_out.lto_end ();
2625
2626 if (!flag_ltrans
2627 && ((in_lto_p && flag_incremental_link != INCREMENTAL_LINK_LTO)
2628 || !flag_lto || flag_fat_lto_objects))
2629 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2630 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2631
2632 bitmap_obstack_release (NULL);
2633 }
2634
2635
2636 /* Return string alias is alias of. */
2637
2638 static tree
2639 get_alias_symbol (tree decl)
2640 {
2641 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2642 return get_identifier (TREE_STRING_POINTER
2643 (TREE_VALUE (TREE_VALUE (alias))));
2644 }
2645
2646
2647 /* Weakrefs may be associated to external decls and thus not output
2648 at expansion time. Emit all necessary aliases. */
2649
2650 void
2651 symbol_table::output_weakrefs (void)
2652 {
2653 symtab_node *node;
2654 FOR_EACH_SYMBOL (node)
2655 if (node->alias
2656 && !TREE_ASM_WRITTEN (node->decl)
2657 && node->weakref)
2658 {
2659 tree target;
2660
2661 /* Weakrefs are special by not requiring target definition in current
2662 compilation unit. It is thus bit hard to work out what we want to
2663 alias.
2664 When alias target is defined, we need to fetch it from symtab reference,
2665 otherwise it is pointed to by alias_target. */
2666 if (node->alias_target)
2667 target = (DECL_P (node->alias_target)
2668 ? DECL_ASSEMBLER_NAME (node->alias_target)
2669 : node->alias_target);
2670 else if (node->analyzed)
2671 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2672 else
2673 {
2674 gcc_unreachable ();
2675 target = get_alias_symbol (node->decl);
2676 }
2677 do_assemble_alias (node->decl, target);
2678 }
2679 }
2680
2681 /* Perform simple optimizations based on callgraph. */
2682
2683 void
2684 symbol_table::compile (void)
2685 {
2686 if (seen_error ())
2687 return;
2688
2689 symtab_node::checking_verify_symtab_nodes ();
2690
2691 timevar_push (TV_CGRAPHOPT);
2692 if (pre_ipa_mem_report)
2693 dump_memory_report ("Memory consumption before IPA");
2694 if (!quiet_flag)
2695 fprintf (stderr, "Performing interprocedural optimizations\n");
2696 state = IPA;
2697
2698 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2699 if (flag_generate_lto || flag_generate_offload)
2700 lto_streamer_hooks_init ();
2701
2702 /* Don't run the IPA passes if there was any error or sorry messages. */
2703 if (!seen_error ())
2704 {
2705 timevar_start (TV_CGRAPH_IPA_PASSES);
2706 ipa_passes ();
2707 timevar_stop (TV_CGRAPH_IPA_PASSES);
2708 }
2709 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2710 if (seen_error ()
2711 || ((!in_lto_p || flag_incremental_link == INCREMENTAL_LINK_LTO)
2712 && flag_lto && !flag_fat_lto_objects))
2713 {
2714 timevar_pop (TV_CGRAPHOPT);
2715 return;
2716 }
2717
2718 global_info_ready = true;
2719 if (dump_file)
2720 {
2721 fprintf (dump_file, "Optimized ");
2722 symtab->dump (dump_file);
2723 }
2724 if (post_ipa_mem_report)
2725 dump_memory_report ("Memory consumption after IPA");
2726 timevar_pop (TV_CGRAPHOPT);
2727
2728 /* Output everything. */
2729 switch_to_section (text_section);
2730 (*debug_hooks->assembly_start) ();
2731 if (!quiet_flag)
2732 fprintf (stderr, "Assembling functions:\n");
2733 symtab_node::checking_verify_symtab_nodes ();
2734
2735 bitmap_obstack_initialize (NULL);
2736 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2737 bitmap_obstack_release (NULL);
2738 mark_functions_to_output ();
2739
2740 /* When weakref support is missing, we automatically translate all
2741 references to NODE to references to its ultimate alias target.
2742 The renaming mechanism uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2743 TREE_CHAIN.
2744
2745 Set up this mapping before we output any assembler but once we are sure
2746 that all symbol renaming is done.
2747
2748 FIXME: All this ugliness can go away if we just do renaming at gimple
2749 level by physically rewriting the IL. At the moment we can only redirect
2750 calls, so we need infrastructure for renaming references as well. */
2751 #ifndef ASM_OUTPUT_WEAKREF
2752 symtab_node *node;
2753
2754 FOR_EACH_SYMBOL (node)
2755 if (node->alias
2756 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2757 {
2758 IDENTIFIER_TRANSPARENT_ALIAS
2759 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2760 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2761 = (node->alias_target ? node->alias_target
2762 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2763 }
2764 #endif
2765
2766 state = EXPANSION;
2767
2768 /* Output first asm statements and anything ordered. The process
2769 flag is cleared for these nodes, so we skip them later. */
2770 output_in_order ();
2771
2772 timevar_start (TV_CGRAPH_FUNC_EXPANSION);
2773 expand_all_functions ();
2774 timevar_stop (TV_CGRAPH_FUNC_EXPANSION);
2775
2776 output_variables ();
2777
2778 process_new_functions ();
2779 state = FINISHED;
2780 output_weakrefs ();
2781
2782 if (dump_file)
2783 {
2784 fprintf (dump_file, "\nFinal ");
2785 symtab->dump (dump_file);
2786 }
2787 if (!flag_checking)
2788 return;
2789 symtab_node::verify_symtab_nodes ();
2790 /* Double check that all inline clones are gone and that all
2791 function bodies have been released from memory. */
2792 if (!seen_error ())
2793 {
2794 cgraph_node *node;
2795 bool error_found = false;
2796
2797 FOR_EACH_DEFINED_FUNCTION (node)
2798 if (node->inlined_to
2799 || gimple_has_body_p (node->decl))
2800 {
2801 error_found = true;
2802 node->debug ();
2803 }
2804 if (error_found)
2805 internal_error ("nodes with unreleased memory found");
2806 }
2807 }
2808
2809 /* Earlydebug dump file, flags, and number. */
2810
2811 static int debuginfo_early_dump_nr;
2812 static FILE *debuginfo_early_dump_file;
2813 static dump_flags_t debuginfo_early_dump_flags;
2814
2815 /* Debug dump file, flags, and number. */
2816
2817 static int debuginfo_dump_nr;
2818 static FILE *debuginfo_dump_file;
2819 static dump_flags_t debuginfo_dump_flags;
2820
2821 /* Register the debug and earlydebug dump files. */
2822
2823 void
2824 debuginfo_early_init (void)
2825 {
2826 gcc::dump_manager *dumps = g->get_dumps ();
2827 debuginfo_early_dump_nr = dumps->dump_register (".earlydebug", "earlydebug",
2828 "earlydebug", DK_tree,
2829 OPTGROUP_NONE,
2830 false);
2831 debuginfo_dump_nr = dumps->dump_register (".debug", "debug",
2832 "debug", DK_tree,
2833 OPTGROUP_NONE,
2834 false);
2835 }
2836
2837 /* Initialize the debug and earlydebug dump files. */
2838
2839 void
2840 debuginfo_init (void)
2841 {
2842 gcc::dump_manager *dumps = g->get_dumps ();
2843 debuginfo_dump_file = dump_begin (debuginfo_dump_nr, NULL);
2844 debuginfo_dump_flags = dumps->get_dump_file_info (debuginfo_dump_nr)->pflags;
2845 debuginfo_early_dump_file = dump_begin (debuginfo_early_dump_nr, NULL);
2846 debuginfo_early_dump_flags
2847 = dumps->get_dump_file_info (debuginfo_early_dump_nr)->pflags;
2848 }
2849
2850 /* Finalize the debug and earlydebug dump files. */
2851
2852 void
2853 debuginfo_fini (void)
2854 {
2855 if (debuginfo_dump_file)
2856 dump_end (debuginfo_dump_nr, debuginfo_dump_file);
2857 if (debuginfo_early_dump_file)
2858 dump_end (debuginfo_early_dump_nr, debuginfo_early_dump_file);
2859 }
2860
2861 /* Set dump_file to the debug dump file. */
2862
2863 void
2864 debuginfo_start (void)
2865 {
2866 set_dump_file (debuginfo_dump_file);
2867 }
2868
2869 /* Undo setting dump_file to the debug dump file. */
2870
2871 void
2872 debuginfo_stop (void)
2873 {
2874 set_dump_file (NULL);
2875 }
2876
2877 /* Set dump_file to the earlydebug dump file. */
2878
2879 void
2880 debuginfo_early_start (void)
2881 {
2882 set_dump_file (debuginfo_early_dump_file);
2883 }
2884
2885 /* Undo setting dump_file to the earlydebug dump file. */
2886
2887 void
2888 debuginfo_early_stop (void)
2889 {
2890 set_dump_file (NULL);
2891 }
2892
2893 /* Analyze the whole compilation unit once it is parsed completely. */
2894
2895 void
2896 symbol_table::finalize_compilation_unit (void)
2897 {
2898 timevar_push (TV_CGRAPH);
2899
2900 /* If we're here there's no current function anymore. Some frontends
2901 are lazy in clearing these. */
2902 current_function_decl = NULL;
2903 set_cfun (NULL);
2904
2905 /* Do not skip analyzing the functions if there were errors, we
2906 miss diagnostics for following functions otherwise. */
2907
2908 /* Emit size functions we didn't inline. */
2909 finalize_size_functions ();
2910
2911 /* Mark alias targets necessary and emit diagnostics. */
2912 handle_alias_pairs ();
2913
2914 if (!quiet_flag)
2915 {
2916 fprintf (stderr, "\nAnalyzing compilation unit\n");
2917 fflush (stderr);
2918 }
2919
2920 if (flag_dump_passes)
2921 dump_passes ();
2922
2923 /* Gimplify and lower all functions, compute reachability and
2924 remove unreachable nodes. */
2925 analyze_functions (/*first_time=*/true);
2926
2927 /* Mark alias targets necessary and emit diagnostics. */
2928 handle_alias_pairs ();
2929
2930 /* Gimplify and lower thunks. */
2931 analyze_functions (/*first_time=*/false);
2932
2933 /* Offloading requires LTO infrastructure. */
2934 if (!in_lto_p && g->have_offload)
2935 flag_generate_offload = 1;
2936
2937 if (!seen_error ())
2938 {
2939 /* Emit early debug for reachable functions, and by consequence,
2940 locally scoped symbols. */
2941 struct cgraph_node *cnode;
2942 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (cnode)
2943 (*debug_hooks->early_global_decl) (cnode->decl);
2944
2945 /* Clean up anything that needs cleaning up after initial debug
2946 generation. */
2947 debuginfo_early_start ();
2948 (*debug_hooks->early_finish) (main_input_filename);
2949 debuginfo_early_stop ();
2950 }
2951
2952 /* Finally drive the pass manager. */
2953 compile ();
2954
2955 timevar_pop (TV_CGRAPH);
2956 }
2957
2958 /* Reset all state within cgraphunit.c so that we can rerun the compiler
2959 within the same process. For use by toplev::finalize. */
2960
2961 void
2962 cgraphunit_c_finalize (void)
2963 {
2964 gcc_assert (cgraph_new_nodes.length () == 0);
2965 cgraph_new_nodes.truncate (0);
2966
2967 vtable_entry_type = NULL;
2968 queued_nodes = &symtab_terminator;
2969
2970 first_analyzed = NULL;
2971 first_analyzed_var = NULL;
2972 }
2973
2974 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2975 kind of wrapper method. */
2976
2977 void
2978 cgraph_node::create_wrapper (cgraph_node *target)
2979 {
2980 /* Preserve DECL_RESULT so we get right by reference flag. */
2981 tree decl_result = DECL_RESULT (decl);
2982
2983 /* Remove the function's body but keep arguments to be reused
2984 for thunk. */
2985 release_body (true);
2986 reset ();
2987
2988 DECL_UNINLINABLE (decl) = false;
2989 DECL_RESULT (decl) = decl_result;
2990 DECL_INITIAL (decl) = NULL;
2991 allocate_struct_function (decl, false);
2992 set_cfun (NULL);
2993
2994 /* Turn alias into thunk and expand it into GIMPLE representation. */
2995 definition = true;
2996
2997 memset (&thunk, 0, sizeof (cgraph_thunk_info));
2998 thunk.thunk_p = true;
2999 create_edge (target, NULL, count);
3000 callees->can_throw_external = !TREE_NOTHROW (target->decl);
3001
3002 tree arguments = DECL_ARGUMENTS (decl);
3003
3004 while (arguments)
3005 {
3006 TREE_ADDRESSABLE (arguments) = false;
3007 arguments = TREE_CHAIN (arguments);
3008 }
3009
3010 expand_thunk (false, true);
3011
3012 /* Inline summary set-up. */
3013 analyze ();
3014 inline_analyze_function (this);
3015 }
3016
3017 #include "gt-cgraphunit.h"