]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraphunit.c
IPA symver: allow multiple symvers for a definition
[thirdparty/gcc.git] / gcc / cgraphunit.c
1 /* Driver of optimization process
2 Copyright (C) 2003-2020 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimizations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transactional memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO streaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multiple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functions (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158 */
159
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "backend.h"
164 #include "target.h"
165 #include "rtl.h"
166 #include "tree.h"
167 #include "gimple.h"
168 #include "cfghooks.h"
169 #include "regset.h" /* FIXME: For reg_obstack. */
170 #include "alloc-pool.h"
171 #include "tree-pass.h"
172 #include "stringpool.h"
173 #include "gimple-ssa.h"
174 #include "cgraph.h"
175 #include "coverage.h"
176 #include "lto-streamer.h"
177 #include "fold-const.h"
178 #include "varasm.h"
179 #include "stor-layout.h"
180 #include "output.h"
181 #include "cfgcleanup.h"
182 #include "gimple-fold.h"
183 #include "gimplify.h"
184 #include "gimple-iterator.h"
185 #include "gimplify-me.h"
186 #include "tree-cfg.h"
187 #include "tree-into-ssa.h"
188 #include "tree-ssa.h"
189 #include "langhooks.h"
190 #include "toplev.h"
191 #include "debug.h"
192 #include "symbol-summary.h"
193 #include "tree-vrp.h"
194 #include "ipa-prop.h"
195 #include "gimple-pretty-print.h"
196 #include "plugin.h"
197 #include "ipa-fnsummary.h"
198 #include "ipa-utils.h"
199 #include "except.h"
200 #include "cfgloop.h"
201 #include "context.h"
202 #include "pass_manager.h"
203 #include "tree-nested.h"
204 #include "dbgcnt.h"
205 #include "lto-section-names.h"
206 #include "stringpool.h"
207 #include "attribs.h"
208 #include "ipa-inline.h"
209 #include "omp-offload.h"
210
211 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
212 secondary queue used during optimization to accommodate passes that
213 may generate new functions that need to be optimized and expanded. */
214 vec<cgraph_node *> cgraph_new_nodes;
215
216 static void expand_all_functions (void);
217 static void mark_functions_to_output (void);
218 static void handle_alias_pairs (void);
219
220 /* Used for vtable lookup in thunk adjusting. */
221 static GTY (()) tree vtable_entry_type;
222
223 /* Return true if this symbol is a function from the C frontend specified
224 directly in RTL form (with "__RTL"). */
225
226 bool
227 symtab_node::native_rtl_p () const
228 {
229 if (TREE_CODE (decl) != FUNCTION_DECL)
230 return false;
231 if (!DECL_STRUCT_FUNCTION (decl))
232 return false;
233 return DECL_STRUCT_FUNCTION (decl)->curr_properties & PROP_rtl;
234 }
235
236 /* Determine if symbol declaration is needed. That is, visible to something
237 either outside this translation unit, something magic in the system
238 configury */
239 bool
240 symtab_node::needed_p (void)
241 {
242 /* Double check that no one output the function into assembly file
243 early. */
244 if (!native_rtl_p ())
245 gcc_checking_assert
246 (!DECL_ASSEMBLER_NAME_SET_P (decl)
247 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
248
249 if (!definition)
250 return false;
251
252 if (DECL_EXTERNAL (decl))
253 return false;
254
255 /* If the user told us it is used, then it must be so. */
256 if (force_output)
257 return true;
258
259 /* ABI forced symbols are needed when they are external. */
260 if (forced_by_abi && TREE_PUBLIC (decl))
261 return true;
262
263 /* Keep constructors, destructors and virtual functions. */
264 if (TREE_CODE (decl) == FUNCTION_DECL
265 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
266 return true;
267
268 /* Externally visible variables must be output. The exception is
269 COMDAT variables that must be output only when they are needed. */
270 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
271 return true;
272
273 return false;
274 }
275
276 /* Head and terminator of the queue of nodes to be processed while building
277 callgraph. */
278
279 static symtab_node symtab_terminator (SYMTAB_SYMBOL);
280 static symtab_node *queued_nodes = &symtab_terminator;
281
282 /* Add NODE to queue starting at QUEUED_NODES.
283 The queue is linked via AUX pointers and terminated by pointer to 1. */
284
285 static void
286 enqueue_node (symtab_node *node)
287 {
288 if (node->aux)
289 return;
290 gcc_checking_assert (queued_nodes);
291 node->aux = queued_nodes;
292 queued_nodes = node;
293 }
294
295 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
296 functions into callgraph in a way so they look like ordinary reachable
297 functions inserted into callgraph already at construction time. */
298
299 void
300 symbol_table::process_new_functions (void)
301 {
302 tree fndecl;
303
304 if (!cgraph_new_nodes.exists ())
305 return;
306
307 handle_alias_pairs ();
308 /* Note that this queue may grow as its being processed, as the new
309 functions may generate new ones. */
310 for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
311 {
312 cgraph_node *node = cgraph_new_nodes[i];
313 fndecl = node->decl;
314 switch (state)
315 {
316 case CONSTRUCTION:
317 /* At construction time we just need to finalize function and move
318 it into reachable functions list. */
319
320 cgraph_node::finalize_function (fndecl, false);
321 call_cgraph_insertion_hooks (node);
322 enqueue_node (node);
323 break;
324
325 case IPA:
326 case IPA_SSA:
327 case IPA_SSA_AFTER_INLINING:
328 /* When IPA optimization already started, do all essential
329 transformations that has been already performed on the whole
330 cgraph but not on this function. */
331
332 gimple_register_cfg_hooks ();
333 if (!node->analyzed)
334 node->analyze ();
335 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
336 if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
337 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
338 {
339 bool summaried_computed = ipa_fn_summaries != NULL;
340 g->get_passes ()->execute_early_local_passes ();
341 /* Early passes compute inline parameters to do inlining
342 and splitting. This is redundant for functions added late.
343 Just throw away whatever it did. */
344 if (!summaried_computed)
345 {
346 ipa_free_fn_summary ();
347 ipa_free_size_summary ();
348 }
349 }
350 else if (ipa_fn_summaries != NULL)
351 compute_fn_summary (node, true);
352 free_dominance_info (CDI_POST_DOMINATORS);
353 free_dominance_info (CDI_DOMINATORS);
354 pop_cfun ();
355 call_cgraph_insertion_hooks (node);
356 break;
357
358 case EXPANSION:
359 /* Functions created during expansion shall be compiled
360 directly. */
361 node->process = 0;
362 call_cgraph_insertion_hooks (node);
363 node->expand ();
364 break;
365
366 default:
367 gcc_unreachable ();
368 break;
369 }
370 }
371
372 cgraph_new_nodes.release ();
373 }
374
375 /* As an GCC extension we allow redefinition of the function. The
376 semantics when both copies of bodies differ is not well defined.
377 We replace the old body with new body so in unit at a time mode
378 we always use new body, while in normal mode we may end up with
379 old body inlined into some functions and new body expanded and
380 inlined in others.
381
382 ??? It may make more sense to use one body for inlining and other
383 body for expanding the function but this is difficult to do. */
384
385 void
386 cgraph_node::reset (void)
387 {
388 /* If process is set, then we have already begun whole-unit analysis.
389 This is *not* testing for whether we've already emitted the function.
390 That case can be sort-of legitimately seen with real function redefinition
391 errors. I would argue that the front end should never present us with
392 such a case, but don't enforce that for now. */
393 gcc_assert (!process);
394
395 /* Reset our data structures so we can analyze the function again. */
396 inlined_to = NULL;
397 memset (&rtl, 0, sizeof (rtl));
398 analyzed = false;
399 definition = false;
400 alias = false;
401 transparent_alias = false;
402 weakref = false;
403 cpp_implicit_alias = false;
404
405 remove_callees ();
406 remove_all_references ();
407 }
408
409 /* Return true when there are references to the node. INCLUDE_SELF is
410 true if a self reference counts as a reference. */
411
412 bool
413 symtab_node::referred_to_p (bool include_self)
414 {
415 ipa_ref *ref = NULL;
416
417 /* See if there are any references at all. */
418 if (iterate_referring (0, ref))
419 return true;
420 /* For functions check also calls. */
421 cgraph_node *cn = dyn_cast <cgraph_node *> (this);
422 if (cn && cn->callers)
423 {
424 if (include_self)
425 return true;
426 for (cgraph_edge *e = cn->callers; e; e = e->next_caller)
427 if (e->caller != this)
428 return true;
429 }
430 return false;
431 }
432
433 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
434 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
435 the garbage collector run at the moment. We would need to either create
436 a new GC context, or just not compile right now. */
437
438 void
439 cgraph_node::finalize_function (tree decl, bool no_collect)
440 {
441 cgraph_node *node = cgraph_node::get_create (decl);
442
443 if (node->definition)
444 {
445 /* Nested functions should only be defined once. */
446 gcc_assert (!DECL_CONTEXT (decl)
447 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
448 node->reset ();
449 node->redefined_extern_inline = true;
450 }
451
452 /* Set definition first before calling notice_global_symbol so that
453 it is available to notice_global_symbol. */
454 node->definition = true;
455 notice_global_symbol (decl);
456 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
457 if (!flag_toplevel_reorder)
458 node->no_reorder = true;
459
460 /* With -fkeep-inline-functions we are keeping all inline functions except
461 for extern inline ones. */
462 if (flag_keep_inline_functions
463 && DECL_DECLARED_INLINE_P (decl)
464 && !DECL_EXTERNAL (decl)
465 && !DECL_DISREGARD_INLINE_LIMITS (decl))
466 node->force_output = 1;
467
468 /* __RTL functions were already output as soon as they were parsed (due
469 to the large amount of global state in the backend).
470 Mark such functions as "force_output" to reflect the fact that they
471 will be in the asm file when considering the symbols they reference.
472 The attempt to output them later on will bail out immediately. */
473 if (node->native_rtl_p ())
474 node->force_output = 1;
475
476 /* When not optimizing, also output the static functions. (see
477 PR24561), but don't do so for always_inline functions, functions
478 declared inline and nested functions. These were optimized out
479 in the original implementation and it is unclear whether we want
480 to change the behavior here. */
481 if (((!opt_for_fn (decl, optimize) || flag_keep_static_functions
482 || node->no_reorder)
483 && !node->cpp_implicit_alias
484 && !DECL_DISREGARD_INLINE_LIMITS (decl)
485 && !DECL_DECLARED_INLINE_P (decl)
486 && !(DECL_CONTEXT (decl)
487 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
488 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
489 node->force_output = 1;
490
491 /* If we've not yet emitted decl, tell the debug info about it. */
492 if (!TREE_ASM_WRITTEN (decl))
493 (*debug_hooks->deferred_inline_function) (decl);
494
495 if (!no_collect)
496 ggc_collect ();
497
498 if (symtab->state == CONSTRUCTION
499 && (node->needed_p () || node->referred_to_p ()))
500 enqueue_node (node);
501 }
502
503 /* Add the function FNDECL to the call graph.
504 Unlike finalize_function, this function is intended to be used
505 by middle end and allows insertion of new function at arbitrary point
506 of compilation. The function can be either in high, low or SSA form
507 GIMPLE.
508
509 The function is assumed to be reachable and have address taken (so no
510 API breaking optimizations are performed on it).
511
512 Main work done by this function is to enqueue the function for later
513 processing to avoid need the passes to be re-entrant. */
514
515 void
516 cgraph_node::add_new_function (tree fndecl, bool lowered)
517 {
518 gcc::pass_manager *passes = g->get_passes ();
519 cgraph_node *node;
520
521 if (dump_file)
522 {
523 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
524 const char *function_type = ((gimple_has_body_p (fndecl))
525 ? (lowered
526 ? (gimple_in_ssa_p (fn)
527 ? "ssa gimple"
528 : "low gimple")
529 : "high gimple")
530 : "to-be-gimplified");
531 fprintf (dump_file,
532 "Added new %s function %s to callgraph\n",
533 function_type,
534 fndecl_name (fndecl));
535 }
536
537 switch (symtab->state)
538 {
539 case PARSING:
540 cgraph_node::finalize_function (fndecl, false);
541 break;
542 case CONSTRUCTION:
543 /* Just enqueue function to be processed at nearest occurrence. */
544 node = cgraph_node::get_create (fndecl);
545 if (lowered)
546 node->lowered = true;
547 cgraph_new_nodes.safe_push (node);
548 break;
549
550 case IPA:
551 case IPA_SSA:
552 case IPA_SSA_AFTER_INLINING:
553 case EXPANSION:
554 /* Bring the function into finalized state and enqueue for later
555 analyzing and compilation. */
556 node = cgraph_node::get_create (fndecl);
557 node->local = false;
558 node->definition = true;
559 node->force_output = true;
560 if (TREE_PUBLIC (fndecl))
561 node->externally_visible = true;
562 if (!lowered && symtab->state == EXPANSION)
563 {
564 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
565 gimple_register_cfg_hooks ();
566 bitmap_obstack_initialize (NULL);
567 execute_pass_list (cfun, passes->all_lowering_passes);
568 passes->execute_early_local_passes ();
569 bitmap_obstack_release (NULL);
570 pop_cfun ();
571
572 lowered = true;
573 }
574 if (lowered)
575 node->lowered = true;
576 cgraph_new_nodes.safe_push (node);
577 break;
578
579 case FINISHED:
580 /* At the very end of compilation we have to do all the work up
581 to expansion. */
582 node = cgraph_node::create (fndecl);
583 if (lowered)
584 node->lowered = true;
585 node->definition = true;
586 node->analyze ();
587 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
588 gimple_register_cfg_hooks ();
589 bitmap_obstack_initialize (NULL);
590 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
591 g->get_passes ()->execute_early_local_passes ();
592 bitmap_obstack_release (NULL);
593 pop_cfun ();
594 node->expand ();
595 break;
596
597 default:
598 gcc_unreachable ();
599 }
600
601 /* Set a personality if required and we already passed EH lowering. */
602 if (lowered
603 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
604 == eh_personality_lang))
605 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
606 }
607
608 /* Analyze the function scheduled to be output. */
609 void
610 cgraph_node::analyze (void)
611 {
612 if (native_rtl_p ())
613 {
614 analyzed = true;
615 return;
616 }
617
618 tree decl = this->decl;
619 location_t saved_loc = input_location;
620 input_location = DECL_SOURCE_LOCATION (decl);
621
622 if (thunk.thunk_p)
623 {
624 cgraph_node *t = cgraph_node::get (thunk.alias);
625
626 create_edge (t, NULL, t->count);
627 callees->can_throw_external = !TREE_NOTHROW (t->decl);
628 /* Target code in expand_thunk may need the thunk's target
629 to be analyzed, so recurse here. */
630 if (!t->analyzed && t->definition)
631 t->analyze ();
632 if (t->alias)
633 {
634 t = t->get_alias_target ();
635 if (!t->analyzed && t->definition)
636 t->analyze ();
637 }
638 bool ret = expand_thunk (false, false);
639 thunk.alias = NULL;
640 if (!ret)
641 return;
642 }
643 if (alias)
644 resolve_alias (cgraph_node::get (alias_target), transparent_alias);
645 else if (dispatcher_function)
646 {
647 /* Generate the dispatcher body of multi-versioned functions. */
648 cgraph_function_version_info *dispatcher_version_info
649 = function_version ();
650 if (dispatcher_version_info != NULL
651 && (dispatcher_version_info->dispatcher_resolver
652 == NULL_TREE))
653 {
654 tree resolver = NULL_TREE;
655 gcc_assert (targetm.generate_version_dispatcher_body);
656 resolver = targetm.generate_version_dispatcher_body (this);
657 gcc_assert (resolver != NULL_TREE);
658 }
659 }
660 else
661 {
662 push_cfun (DECL_STRUCT_FUNCTION (decl));
663
664 assign_assembler_name_if_needed (decl);
665
666 /* Make sure to gimplify bodies only once. During analyzing a
667 function we lower it, which will require gimplified nested
668 functions, so we can end up here with an already gimplified
669 body. */
670 if (!gimple_has_body_p (decl))
671 gimplify_function_tree (decl);
672
673 /* Lower the function. */
674 if (!lowered)
675 {
676 if (nested)
677 lower_nested_functions (decl);
678 gcc_assert (!nested);
679
680 gimple_register_cfg_hooks ();
681 bitmap_obstack_initialize (NULL);
682 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
683 free_dominance_info (CDI_POST_DOMINATORS);
684 free_dominance_info (CDI_DOMINATORS);
685 compact_blocks ();
686 bitmap_obstack_release (NULL);
687 lowered = true;
688 }
689
690 pop_cfun ();
691 }
692 analyzed = true;
693
694 input_location = saved_loc;
695 }
696
697 /* C++ frontend produce same body aliases all over the place, even before PCH
698 gets streamed out. It relies on us linking the aliases with their function
699 in order to do the fixups, but ipa-ref is not PCH safe. Consequently we
700 first produce aliases without links, but once C++ FE is sure he won't stream
701 PCH we build the links via this function. */
702
703 void
704 symbol_table::process_same_body_aliases (void)
705 {
706 symtab_node *node;
707 FOR_EACH_SYMBOL (node)
708 if (node->cpp_implicit_alias && !node->analyzed)
709 node->resolve_alias
710 (VAR_P (node->alias_target)
711 ? (symtab_node *)varpool_node::get_create (node->alias_target)
712 : (symtab_node *)cgraph_node::get_create (node->alias_target));
713 cpp_implicit_aliases_done = true;
714 }
715
716 /* Process a symver attribute. */
717
718 static void
719 process_symver_attribute (symtab_node *n)
720 {
721 tree value = lookup_attribute ("symver", DECL_ATTRIBUTES (n->decl));
722
723 for (; value != NULL; value = TREE_CHAIN (value))
724 {
725 /* Starting from bintuils 2.35 gas supports:
726 # Assign foo to bar@V1 and baz@V2.
727 .symver foo, bar@V1
728 .symver foo, baz@V2
729 */
730
731 tree symver = get_identifier_with_length
732 (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (value))),
733 TREE_STRING_LENGTH (TREE_VALUE (TREE_VALUE (value))));
734 symtab_node *def = symtab_node::get_for_asmname (symver);
735
736 if (def)
737 {
738 error_at (DECL_SOURCE_LOCATION (n->decl),
739 "duplicate definition of a symbol version");
740 inform (DECL_SOURCE_LOCATION (def->decl),
741 "same version was previously defined here");
742 return;
743 }
744 if (!n->definition)
745 {
746 error_at (DECL_SOURCE_LOCATION (n->decl),
747 "symbol needs to be defined to have a version");
748 return;
749 }
750 if (DECL_COMMON (n->decl))
751 {
752 error_at (DECL_SOURCE_LOCATION (n->decl),
753 "common symbol cannot be versioned");
754 return;
755 }
756 if (DECL_COMDAT (n->decl))
757 {
758 error_at (DECL_SOURCE_LOCATION (n->decl),
759 "comdat symbol cannot be versioned");
760 return;
761 }
762 if (n->weakref)
763 {
764 error_at (DECL_SOURCE_LOCATION (n->decl),
765 "%<weakref%> cannot be versioned");
766 return;
767 }
768 if (!TREE_PUBLIC (n->decl))
769 {
770 error_at (DECL_SOURCE_LOCATION (n->decl),
771 "versioned symbol must be public");
772 return;
773 }
774 if (DECL_VISIBILITY (n->decl) != VISIBILITY_DEFAULT)
775 {
776 error_at (DECL_SOURCE_LOCATION (n->decl),
777 "versioned symbol must have default visibility");
778 return;
779 }
780
781 /* Create new symbol table entry representing the version. */
782 tree new_decl = copy_node (n->decl);
783
784 DECL_INITIAL (new_decl) = NULL_TREE;
785 if (TREE_CODE (new_decl) == FUNCTION_DECL)
786 DECL_STRUCT_FUNCTION (new_decl) = NULL;
787 SET_DECL_ASSEMBLER_NAME (new_decl, symver);
788 TREE_PUBLIC (new_decl) = 1;
789 DECL_ATTRIBUTES (new_decl) = NULL;
790
791 symtab_node *symver_node = symtab_node::get_create (new_decl);
792 symver_node->alias = true;
793 symver_node->definition = true;
794 symver_node->symver = true;
795 symver_node->create_reference (n, IPA_REF_ALIAS, NULL);
796 symver_node->analyzed = true;
797 }
798 }
799
800 /* Process attributes common for vars and functions. */
801
802 static void
803 process_common_attributes (symtab_node *node, tree decl)
804 {
805 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
806
807 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
808 {
809 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
810 "%<weakref%> attribute should be accompanied with"
811 " an %<alias%> attribute");
812 DECL_WEAK (decl) = 0;
813 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
814 DECL_ATTRIBUTES (decl));
815 }
816
817 if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
818 node->no_reorder = 1;
819 process_symver_attribute (node);
820 }
821
822 /* Look for externally_visible and used attributes and mark cgraph nodes
823 accordingly.
824
825 We cannot mark the nodes at the point the attributes are processed (in
826 handle_*_attribute) because the copy of the declarations available at that
827 point may not be canonical. For example, in:
828
829 void f();
830 void f() __attribute__((used));
831
832 the declaration we see in handle_used_attribute will be the second
833 declaration -- but the front end will subsequently merge that declaration
834 with the original declaration and discard the second declaration.
835
836 Furthermore, we can't mark these nodes in finalize_function because:
837
838 void f() {}
839 void f() __attribute__((externally_visible));
840
841 is valid.
842
843 So, we walk the nodes at the end of the translation unit, applying the
844 attributes at that point. */
845
846 static void
847 process_function_and_variable_attributes (cgraph_node *first,
848 varpool_node *first_var)
849 {
850 cgraph_node *node;
851 varpool_node *vnode;
852
853 for (node = symtab->first_function (); node != first;
854 node = symtab->next_function (node))
855 {
856 tree decl = node->decl;
857
858 if (node->alias
859 && lookup_attribute ("flatten", DECL_ATTRIBUTES (decl)))
860 {
861 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
862 "%<flatten%> attribute is ignored on aliases");
863 }
864 if (DECL_PRESERVE_P (decl))
865 node->mark_force_output ();
866 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
867 {
868 if (! TREE_PUBLIC (node->decl))
869 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
870 "%<externally_visible%>"
871 " attribute have effect only on public objects");
872 }
873 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
874 && node->definition
875 && (!node->alias || DECL_INITIAL (decl) != error_mark_node))
876 {
877 /* NODE->DEFINITION && NODE->ALIAS is nonzero for valid weakref
878 function declarations; DECL_INITIAL is non-null for invalid
879 weakref functions that are also defined. */
880 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
881 "%<weakref%> attribute ignored"
882 " because function is defined");
883 DECL_WEAK (decl) = 0;
884 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
885 DECL_ATTRIBUTES (decl));
886 DECL_ATTRIBUTES (decl) = remove_attribute ("alias",
887 DECL_ATTRIBUTES (decl));
888 node->alias = false;
889 node->weakref = false;
890 node->transparent_alias = false;
891 }
892 else if (lookup_attribute ("alias", DECL_ATTRIBUTES (decl))
893 && node->definition
894 && !node->alias)
895 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
896 "%<alias%> attribute ignored"
897 " because function is defined");
898
899 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
900 && !DECL_DECLARED_INLINE_P (decl)
901 /* redefining extern inline function makes it DECL_UNINLINABLE. */
902 && !DECL_UNINLINABLE (decl))
903 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
904 "%<always_inline%> function might not be inlinable");
905
906 process_common_attributes (node, decl);
907 }
908 for (vnode = symtab->first_variable (); vnode != first_var;
909 vnode = symtab->next_variable (vnode))
910 {
911 tree decl = vnode->decl;
912 if (DECL_EXTERNAL (decl)
913 && DECL_INITIAL (decl))
914 varpool_node::finalize_decl (decl);
915 if (DECL_PRESERVE_P (decl))
916 vnode->force_output = true;
917 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
918 {
919 if (! TREE_PUBLIC (vnode->decl))
920 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
921 "%<externally_visible%>"
922 " attribute have effect only on public objects");
923 }
924 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
925 && vnode->definition
926 && DECL_INITIAL (decl))
927 {
928 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
929 "%<weakref%> attribute ignored"
930 " because variable is initialized");
931 DECL_WEAK (decl) = 0;
932 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
933 DECL_ATTRIBUTES (decl));
934 }
935 process_common_attributes (vnode, decl);
936 }
937 }
938
939 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
940 middle end to output the variable to asm file, if needed or externally
941 visible. */
942
943 void
944 varpool_node::finalize_decl (tree decl)
945 {
946 varpool_node *node = varpool_node::get_create (decl);
947
948 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
949
950 if (node->definition)
951 return;
952 /* Set definition first before calling notice_global_symbol so that
953 it is available to notice_global_symbol. */
954 node->definition = true;
955 notice_global_symbol (decl);
956 if (!flag_toplevel_reorder)
957 node->no_reorder = true;
958 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
959 /* Traditionally we do not eliminate static variables when not
960 optimizing and when not doing toplevel reorder. */
961 || (node->no_reorder && !DECL_COMDAT (node->decl)
962 && !DECL_ARTIFICIAL (node->decl)))
963 node->force_output = true;
964
965 if (symtab->state == CONSTRUCTION
966 && (node->needed_p () || node->referred_to_p ()))
967 enqueue_node (node);
968 if (symtab->state >= IPA_SSA)
969 node->analyze ();
970 /* Some frontends produce various interface variables after compilation
971 finished. */
972 if (symtab->state == FINISHED
973 || (node->no_reorder
974 && symtab->state == EXPANSION))
975 node->assemble_decl ();
976 }
977
978 /* EDGE is an polymorphic call. Mark all possible targets as reachable
979 and if there is only one target, perform trivial devirtualization.
980 REACHABLE_CALL_TARGETS collects target lists we already walked to
981 avoid duplicate work. */
982
983 static void
984 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
985 cgraph_edge *edge)
986 {
987 unsigned int i;
988 void *cache_token;
989 bool final;
990 vec <cgraph_node *>targets
991 = possible_polymorphic_call_targets
992 (edge, &final, &cache_token);
993
994 if (!reachable_call_targets->add (cache_token))
995 {
996 if (symtab->dump_file)
997 dump_possible_polymorphic_call_targets
998 (symtab->dump_file, edge);
999
1000 for (i = 0; i < targets.length (); i++)
1001 {
1002 /* Do not bother to mark virtual methods in anonymous namespace;
1003 either we will find use of virtual table defining it, or it is
1004 unused. */
1005 if (targets[i]->definition
1006 && TREE_CODE
1007 (TREE_TYPE (targets[i]->decl))
1008 == METHOD_TYPE
1009 && !type_in_anonymous_namespace_p
1010 (TYPE_METHOD_BASETYPE (TREE_TYPE (targets[i]->decl))))
1011 enqueue_node (targets[i]);
1012 }
1013 }
1014
1015 /* Very trivial devirtualization; when the type is
1016 final or anonymous (so we know all its derivation)
1017 and there is only one possible virtual call target,
1018 make the edge direct. */
1019 if (final)
1020 {
1021 if (targets.length () <= 1 && dbg_cnt (devirt))
1022 {
1023 cgraph_node *target;
1024 if (targets.length () == 1)
1025 target = targets[0];
1026 else
1027 target = cgraph_node::create
1028 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
1029
1030 if (symtab->dump_file)
1031 {
1032 fprintf (symtab->dump_file,
1033 "Devirtualizing call: ");
1034 print_gimple_stmt (symtab->dump_file,
1035 edge->call_stmt, 0,
1036 TDF_SLIM);
1037 }
1038 if (dump_enabled_p ())
1039 {
1040 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, edge->call_stmt,
1041 "devirtualizing call in %s to %s\n",
1042 edge->caller->dump_name (),
1043 target->dump_name ());
1044 }
1045
1046 edge = cgraph_edge::make_direct (edge, target);
1047 gimple *new_call = cgraph_edge::redirect_call_stmt_to_callee (edge);
1048
1049 if (symtab->dump_file)
1050 {
1051 fprintf (symtab->dump_file, "Devirtualized as: ");
1052 print_gimple_stmt (symtab->dump_file, new_call, 0, TDF_SLIM);
1053 }
1054 }
1055 }
1056 }
1057
1058 /* Issue appropriate warnings for the global declaration DECL. */
1059
1060 static void
1061 check_global_declaration (symtab_node *snode)
1062 {
1063 const char *decl_file;
1064 tree decl = snode->decl;
1065
1066 /* Warn about any function declared static but not defined. We don't
1067 warn about variables, because many programs have static variables
1068 that exist only to get some text into the object file. */
1069 if (TREE_CODE (decl) == FUNCTION_DECL
1070 && DECL_INITIAL (decl) == 0
1071 && DECL_EXTERNAL (decl)
1072 && ! DECL_ARTIFICIAL (decl)
1073 && ! TREE_PUBLIC (decl))
1074 {
1075 if (TREE_NO_WARNING (decl))
1076 ;
1077 else if (snode->referred_to_p (/*include_self=*/false))
1078 pedwarn (input_location, 0, "%q+F used but never defined", decl);
1079 else
1080 warning (OPT_Wunused_function, "%q+F declared %<static%> but never "
1081 "defined", decl);
1082 /* This symbol is effectively an "extern" declaration now. */
1083 TREE_PUBLIC (decl) = 1;
1084 }
1085
1086 /* Warn about static fns or vars defined but not used. */
1087 if (((warn_unused_function && TREE_CODE (decl) == FUNCTION_DECL)
1088 || (((warn_unused_variable && ! TREE_READONLY (decl))
1089 || (warn_unused_const_variable > 0 && TREE_READONLY (decl)
1090 && (warn_unused_const_variable == 2
1091 || (main_input_filename != NULL
1092 && (decl_file = DECL_SOURCE_FILE (decl)) != NULL
1093 && filename_cmp (main_input_filename,
1094 decl_file) == 0))))
1095 && VAR_P (decl)))
1096 && ! DECL_IN_SYSTEM_HEADER (decl)
1097 && ! snode->referred_to_p (/*include_self=*/false)
1098 /* This TREE_USED check is needed in addition to referred_to_p
1099 above, because the `__unused__' attribute is not being
1100 considered for referred_to_p. */
1101 && ! TREE_USED (decl)
1102 /* The TREE_USED bit for file-scope decls is kept in the identifier,
1103 to handle multiple external decls in different scopes. */
1104 && ! (DECL_NAME (decl) && TREE_USED (DECL_NAME (decl)))
1105 && ! DECL_EXTERNAL (decl)
1106 && ! DECL_ARTIFICIAL (decl)
1107 && ! DECL_ABSTRACT_ORIGIN (decl)
1108 && ! TREE_PUBLIC (decl)
1109 /* A volatile variable might be used in some non-obvious way. */
1110 && (! VAR_P (decl) || ! TREE_THIS_VOLATILE (decl))
1111 /* Global register variables must be declared to reserve them. */
1112 && ! (VAR_P (decl) && DECL_REGISTER (decl))
1113 /* Global ctors and dtors are called by the runtime. */
1114 && (TREE_CODE (decl) != FUNCTION_DECL
1115 || (!DECL_STATIC_CONSTRUCTOR (decl)
1116 && !DECL_STATIC_DESTRUCTOR (decl)))
1117 /* Otherwise, ask the language. */
1118 && lang_hooks.decls.warn_unused_global (decl))
1119 warning_at (DECL_SOURCE_LOCATION (decl),
1120 (TREE_CODE (decl) == FUNCTION_DECL)
1121 ? OPT_Wunused_function
1122 : (TREE_READONLY (decl)
1123 ? OPT_Wunused_const_variable_
1124 : OPT_Wunused_variable),
1125 "%qD defined but not used", decl);
1126 }
1127
1128 /* Discover all functions and variables that are trivially needed, analyze
1129 them as well as all functions and variables referred by them */
1130 static cgraph_node *first_analyzed;
1131 static varpool_node *first_analyzed_var;
1132
1133 /* FIRST_TIME is set to TRUE for the first time we are called for a
1134 translation unit from finalize_compilation_unit() or false
1135 otherwise. */
1136
1137 static void
1138 analyze_functions (bool first_time)
1139 {
1140 /* Keep track of already processed nodes when called multiple times for
1141 intermodule optimization. */
1142 cgraph_node *first_handled = first_analyzed;
1143 varpool_node *first_handled_var = first_analyzed_var;
1144 hash_set<void *> reachable_call_targets;
1145
1146 symtab_node *node;
1147 symtab_node *next;
1148 int i;
1149 ipa_ref *ref;
1150 bool changed = true;
1151 location_t saved_loc = input_location;
1152
1153 bitmap_obstack_initialize (NULL);
1154 symtab->state = CONSTRUCTION;
1155 input_location = UNKNOWN_LOCATION;
1156
1157 /* Ugly, but the fixup cannot happen at a time same body alias is created;
1158 C++ FE is confused about the COMDAT groups being right. */
1159 if (symtab->cpp_implicit_aliases_done)
1160 FOR_EACH_SYMBOL (node)
1161 if (node->cpp_implicit_alias)
1162 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
1163 build_type_inheritance_graph ();
1164
1165 if (flag_openmp && first_time)
1166 omp_discover_implicit_declare_target ();
1167
1168 /* Analysis adds static variables that in turn adds references to new functions.
1169 So we need to iterate the process until it stabilize. */
1170 while (changed)
1171 {
1172 changed = false;
1173 process_function_and_variable_attributes (first_analyzed,
1174 first_analyzed_var);
1175
1176 /* First identify the trivially needed symbols. */
1177 for (node = symtab->first_symbol ();
1178 node != first_analyzed
1179 && node != first_analyzed_var; node = node->next)
1180 {
1181 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
1182 node->get_comdat_group_id ();
1183 if (node->needed_p ())
1184 {
1185 enqueue_node (node);
1186 if (!changed && symtab->dump_file)
1187 fprintf (symtab->dump_file, "Trivially needed symbols:");
1188 changed = true;
1189 if (symtab->dump_file)
1190 fprintf (symtab->dump_file, " %s", node->dump_asm_name ());
1191 if (!changed && symtab->dump_file)
1192 fprintf (symtab->dump_file, "\n");
1193 }
1194 if (node == first_analyzed
1195 || node == first_analyzed_var)
1196 break;
1197 }
1198 symtab->process_new_functions ();
1199 first_analyzed_var = symtab->first_variable ();
1200 first_analyzed = symtab->first_function ();
1201
1202 if (changed && symtab->dump_file)
1203 fprintf (symtab->dump_file, "\n");
1204
1205 /* Lower representation, build callgraph edges and references for all trivially
1206 needed symbols and all symbols referred by them. */
1207 while (queued_nodes != &symtab_terminator)
1208 {
1209 changed = true;
1210 node = queued_nodes;
1211 queued_nodes = (symtab_node *)queued_nodes->aux;
1212 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1213 if (cnode && cnode->definition)
1214 {
1215 cgraph_edge *edge;
1216 tree decl = cnode->decl;
1217
1218 /* ??? It is possible to create extern inline function
1219 and later using weak alias attribute to kill its body.
1220 See gcc.c-torture/compile/20011119-1.c */
1221 if (!DECL_STRUCT_FUNCTION (decl)
1222 && !cnode->alias
1223 && !cnode->thunk.thunk_p
1224 && !cnode->dispatcher_function)
1225 {
1226 cnode->reset ();
1227 cnode->redefined_extern_inline = true;
1228 continue;
1229 }
1230
1231 if (!cnode->analyzed)
1232 cnode->analyze ();
1233
1234 for (edge = cnode->callees; edge; edge = edge->next_callee)
1235 if (edge->callee->definition
1236 && (!DECL_EXTERNAL (edge->callee->decl)
1237 /* When not optimizing, do not try to analyze extern
1238 inline functions. Doing so is pointless. */
1239 || opt_for_fn (edge->callee->decl, optimize)
1240 /* Weakrefs needs to be preserved. */
1241 || edge->callee->alias
1242 /* always_inline functions are inlined even at -O0. */
1243 || lookup_attribute
1244 ("always_inline",
1245 DECL_ATTRIBUTES (edge->callee->decl))
1246 /* Multiversioned functions needs the dispatcher to
1247 be produced locally even for extern functions. */
1248 || edge->callee->function_version ()))
1249 enqueue_node (edge->callee);
1250 if (opt_for_fn (cnode->decl, optimize)
1251 && opt_for_fn (cnode->decl, flag_devirtualize))
1252 {
1253 cgraph_edge *next;
1254
1255 for (edge = cnode->indirect_calls; edge; edge = next)
1256 {
1257 next = edge->next_callee;
1258 if (edge->indirect_info->polymorphic)
1259 walk_polymorphic_call_targets (&reachable_call_targets,
1260 edge);
1261 }
1262 }
1263
1264 /* If decl is a clone of an abstract function,
1265 mark that abstract function so that we don't release its body.
1266 The DECL_INITIAL() of that abstract function declaration
1267 will be later needed to output debug info. */
1268 if (DECL_ABSTRACT_ORIGIN (decl))
1269 {
1270 cgraph_node *origin_node
1271 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1272 origin_node->used_as_abstract_origin = true;
1273 }
1274 /* Preserve a functions function context node. It will
1275 later be needed to output debug info. */
1276 if (tree fn = decl_function_context (decl))
1277 {
1278 cgraph_node *origin_node = cgraph_node::get_create (fn);
1279 enqueue_node (origin_node);
1280 }
1281 }
1282 else
1283 {
1284 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1285 if (vnode && vnode->definition && !vnode->analyzed)
1286 vnode->analyze ();
1287 }
1288
1289 if (node->same_comdat_group)
1290 {
1291 symtab_node *next;
1292 for (next = node->same_comdat_group;
1293 next != node;
1294 next = next->same_comdat_group)
1295 if (!next->comdat_local_p ())
1296 enqueue_node (next);
1297 }
1298 for (i = 0; node->iterate_reference (i, ref); i++)
1299 if (ref->referred->definition
1300 && (!DECL_EXTERNAL (ref->referred->decl)
1301 || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
1302 && optimize)
1303 || (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
1304 && opt_for_fn (ref->referred->decl, optimize))
1305 || node->alias
1306 || ref->referred->alias)))
1307 enqueue_node (ref->referred);
1308 symtab->process_new_functions ();
1309 }
1310 }
1311 update_type_inheritance_graph ();
1312
1313 /* Collect entry points to the unit. */
1314 if (symtab->dump_file)
1315 {
1316 fprintf (symtab->dump_file, "\n\nInitial ");
1317 symtab->dump (symtab->dump_file);
1318 }
1319
1320 if (first_time)
1321 {
1322 symtab_node *snode;
1323 FOR_EACH_SYMBOL (snode)
1324 check_global_declaration (snode);
1325 }
1326
1327 if (symtab->dump_file)
1328 fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1329
1330 for (node = symtab->first_symbol ();
1331 node != first_handled
1332 && node != first_handled_var; node = next)
1333 {
1334 next = node->next;
1335 /* For symbols declared locally we clear TREE_READONLY when emitting
1336 the constructor (if one is needed). For external declarations we can
1337 not safely assume that the type is readonly because we may be called
1338 during its construction. */
1339 if (TREE_CODE (node->decl) == VAR_DECL
1340 && TYPE_P (TREE_TYPE (node->decl))
1341 && TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (node->decl))
1342 && DECL_EXTERNAL (node->decl))
1343 TREE_READONLY (node->decl) = 0;
1344 if (!node->aux && !node->referred_to_p ())
1345 {
1346 if (symtab->dump_file)
1347 fprintf (symtab->dump_file, " %s", node->dump_name ());
1348
1349 /* See if the debugger can use anything before the DECL
1350 passes away. Perhaps it can notice a DECL that is now a
1351 constant and can tag the early DIE with an appropriate
1352 attribute.
1353
1354 Otherwise, this is the last chance the debug_hooks have
1355 at looking at optimized away DECLs, since
1356 late_global_decl will subsequently be called from the
1357 contents of the now pruned symbol table. */
1358 if (VAR_P (node->decl)
1359 && !decl_function_context (node->decl))
1360 {
1361 /* We are reclaiming totally unreachable code and variables
1362 so they effectively appear as readonly. Show that to
1363 the debug machinery. */
1364 TREE_READONLY (node->decl) = 1;
1365 node->definition = false;
1366 (*debug_hooks->late_global_decl) (node->decl);
1367 }
1368
1369 node->remove ();
1370 continue;
1371 }
1372 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1373 {
1374 tree decl = node->decl;
1375
1376 if (cnode->definition && !gimple_has_body_p (decl)
1377 && !cnode->alias
1378 && !cnode->thunk.thunk_p)
1379 cnode->reset ();
1380
1381 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1382 || cnode->alias
1383 || gimple_has_body_p (decl)
1384 || cnode->native_rtl_p ());
1385 gcc_assert (cnode->analyzed == cnode->definition);
1386 }
1387 node->aux = NULL;
1388 }
1389 for (;node; node = node->next)
1390 node->aux = NULL;
1391 first_analyzed = symtab->first_function ();
1392 first_analyzed_var = symtab->first_variable ();
1393 if (symtab->dump_file)
1394 {
1395 fprintf (symtab->dump_file, "\n\nReclaimed ");
1396 symtab->dump (symtab->dump_file);
1397 }
1398 bitmap_obstack_release (NULL);
1399 ggc_collect ();
1400 /* Initialize assembler name hash, in particular we want to trigger C++
1401 mangling and same body alias creation before we free DECL_ARGUMENTS
1402 used by it. */
1403 if (!seen_error ())
1404 symtab->symtab_initialize_asm_name_hash ();
1405
1406 input_location = saved_loc;
1407 }
1408
1409 /* Check declaration of the type of ALIAS for compatibility with its TARGET
1410 (which may be an ifunc resolver) and issue a diagnostic when they are
1411 not compatible according to language rules (plus a C++ extension for
1412 non-static member functions). */
1413
1414 static void
1415 maybe_diag_incompatible_alias (tree alias, tree target)
1416 {
1417 tree altype = TREE_TYPE (alias);
1418 tree targtype = TREE_TYPE (target);
1419
1420 bool ifunc = cgraph_node::get (alias)->ifunc_resolver;
1421 tree funcptr = altype;
1422
1423 if (ifunc)
1424 {
1425 /* Handle attribute ifunc first. */
1426 if (TREE_CODE (altype) == METHOD_TYPE)
1427 {
1428 /* Set FUNCPTR to the type of the alias target. If the type
1429 is a non-static member function of class C, construct a type
1430 of an ordinary function taking C* as the first argument,
1431 followed by the member function argument list, and use it
1432 instead to check for incompatibility. This conversion is
1433 not defined by the language but an extension provided by
1434 G++. */
1435
1436 tree rettype = TREE_TYPE (altype);
1437 tree args = TYPE_ARG_TYPES (altype);
1438 altype = build_function_type (rettype, args);
1439 funcptr = altype;
1440 }
1441
1442 targtype = TREE_TYPE (targtype);
1443
1444 if (POINTER_TYPE_P (targtype))
1445 {
1446 targtype = TREE_TYPE (targtype);
1447
1448 /* Only issue Wattribute-alias for conversions to void* with
1449 -Wextra. */
1450 if (VOID_TYPE_P (targtype) && !extra_warnings)
1451 return;
1452
1453 /* Proceed to handle incompatible ifunc resolvers below. */
1454 }
1455 else
1456 {
1457 funcptr = build_pointer_type (funcptr);
1458
1459 error_at (DECL_SOURCE_LOCATION (target),
1460 "%<ifunc%> resolver for %qD must return %qT",
1461 alias, funcptr);
1462 inform (DECL_SOURCE_LOCATION (alias),
1463 "resolver indirect function declared here");
1464 return;
1465 }
1466 }
1467
1468 if ((!FUNC_OR_METHOD_TYPE_P (targtype)
1469 || (prototype_p (altype)
1470 && prototype_p (targtype)
1471 && !types_compatible_p (altype, targtype))))
1472 {
1473 /* Warn for incompatibilities. Avoid warning for functions
1474 without a prototype to make it possible to declare aliases
1475 without knowing the exact type, as libstdc++ does. */
1476 if (ifunc)
1477 {
1478 funcptr = build_pointer_type (funcptr);
1479
1480 auto_diagnostic_group d;
1481 if (warning_at (DECL_SOURCE_LOCATION (target),
1482 OPT_Wattribute_alias_,
1483 "%<ifunc%> resolver for %qD should return %qT",
1484 alias, funcptr))
1485 inform (DECL_SOURCE_LOCATION (alias),
1486 "resolver indirect function declared here");
1487 }
1488 else
1489 {
1490 auto_diagnostic_group d;
1491 if (warning_at (DECL_SOURCE_LOCATION (alias),
1492 OPT_Wattribute_alias_,
1493 "%qD alias between functions of incompatible "
1494 "types %qT and %qT", alias, altype, targtype))
1495 inform (DECL_SOURCE_LOCATION (target),
1496 "aliased declaration here");
1497 }
1498 }
1499 }
1500
1501 /* Translate the ugly representation of aliases as alias pairs into nice
1502 representation in callgraph. We don't handle all cases yet,
1503 unfortunately. */
1504
1505 static void
1506 handle_alias_pairs (void)
1507 {
1508 alias_pair *p;
1509 unsigned i;
1510
1511 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1512 {
1513 symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1514
1515 /* Weakrefs with target not defined in current unit are easy to handle:
1516 they behave just as external variables except we need to note the
1517 alias flag to later output the weakref pseudo op into asm file. */
1518 if (!target_node
1519 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1520 {
1521 symtab_node *node = symtab_node::get (p->decl);
1522 if (node)
1523 {
1524 node->alias_target = p->target;
1525 node->weakref = true;
1526 node->alias = true;
1527 node->transparent_alias = true;
1528 }
1529 alias_pairs->unordered_remove (i);
1530 continue;
1531 }
1532 else if (!target_node)
1533 {
1534 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1535 symtab_node *node = symtab_node::get (p->decl);
1536 if (node)
1537 node->alias = false;
1538 alias_pairs->unordered_remove (i);
1539 continue;
1540 }
1541
1542 if (DECL_EXTERNAL (target_node->decl)
1543 /* We use local aliases for C++ thunks to force the tailcall
1544 to bind locally. This is a hack - to keep it working do
1545 the following (which is not strictly correct). */
1546 && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1547 || ! DECL_VIRTUAL_P (target_node->decl))
1548 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1549 {
1550 error ("%q+D aliased to external symbol %qE",
1551 p->decl, p->target);
1552 }
1553
1554 if (TREE_CODE (p->decl) == FUNCTION_DECL
1555 && target_node && is_a <cgraph_node *> (target_node))
1556 {
1557 maybe_diag_incompatible_alias (p->decl, target_node->decl);
1558
1559 maybe_diag_alias_attributes (p->decl, target_node->decl);
1560
1561 cgraph_node *src_node = cgraph_node::get (p->decl);
1562 if (src_node && src_node->definition)
1563 src_node->reset ();
1564 cgraph_node::create_alias (p->decl, target_node->decl);
1565 alias_pairs->unordered_remove (i);
1566 }
1567 else if (VAR_P (p->decl)
1568 && target_node && is_a <varpool_node *> (target_node))
1569 {
1570 varpool_node::create_alias (p->decl, target_node->decl);
1571 alias_pairs->unordered_remove (i);
1572 }
1573 else
1574 {
1575 error ("%q+D alias between function and variable is not supported",
1576 p->decl);
1577 inform (DECL_SOURCE_LOCATION (target_node->decl),
1578 "aliased declaration here");
1579
1580 alias_pairs->unordered_remove (i);
1581 }
1582 }
1583 vec_free (alias_pairs);
1584 }
1585
1586
1587 /* Figure out what functions we want to assemble. */
1588
1589 static void
1590 mark_functions_to_output (void)
1591 {
1592 bool check_same_comdat_groups = false;
1593 cgraph_node *node;
1594
1595 if (flag_checking)
1596 FOR_EACH_FUNCTION (node)
1597 gcc_assert (!node->process);
1598
1599 FOR_EACH_FUNCTION (node)
1600 {
1601 tree decl = node->decl;
1602
1603 gcc_assert (!node->process || node->same_comdat_group);
1604 if (node->process)
1605 continue;
1606
1607 /* We need to output all local functions that are used and not
1608 always inlined, as well as those that are reachable from
1609 outside the current compilation unit. */
1610 if (node->analyzed
1611 && !node->thunk.thunk_p
1612 && !node->alias
1613 && !node->inlined_to
1614 && !TREE_ASM_WRITTEN (decl)
1615 && !DECL_EXTERNAL (decl))
1616 {
1617 node->process = 1;
1618 if (node->same_comdat_group)
1619 {
1620 cgraph_node *next;
1621 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1622 next != node;
1623 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1624 if (!next->thunk.thunk_p && !next->alias
1625 && !next->comdat_local_p ())
1626 next->process = 1;
1627 }
1628 }
1629 else if (node->same_comdat_group)
1630 {
1631 if (flag_checking)
1632 check_same_comdat_groups = true;
1633 }
1634 else
1635 {
1636 /* We should've reclaimed all functions that are not needed. */
1637 if (flag_checking
1638 && !node->inlined_to
1639 && gimple_has_body_p (decl)
1640 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1641 are inside partition, we can end up not removing the body since we no longer
1642 have analyzed node pointing to it. */
1643 && !node->in_other_partition
1644 && !node->alias
1645 && !node->clones
1646 && !DECL_EXTERNAL (decl))
1647 {
1648 node->debug ();
1649 internal_error ("failed to reclaim unneeded function");
1650 }
1651 gcc_assert (node->inlined_to
1652 || !gimple_has_body_p (decl)
1653 || node->in_other_partition
1654 || node->clones
1655 || DECL_ARTIFICIAL (decl)
1656 || DECL_EXTERNAL (decl));
1657
1658 }
1659
1660 }
1661 if (flag_checking && check_same_comdat_groups)
1662 FOR_EACH_FUNCTION (node)
1663 if (node->same_comdat_group && !node->process)
1664 {
1665 tree decl = node->decl;
1666 if (!node->inlined_to
1667 && gimple_has_body_p (decl)
1668 /* FIXME: in an ltrans unit when the offline copy is outside a
1669 partition but inline copies are inside a partition, we can
1670 end up not removing the body since we no longer have an
1671 analyzed node pointing to it. */
1672 && !node->in_other_partition
1673 && !node->clones
1674 && !DECL_EXTERNAL (decl))
1675 {
1676 node->debug ();
1677 internal_error ("failed to reclaim unneeded function in same "
1678 "comdat group");
1679 }
1680 }
1681 }
1682
1683 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1684 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1685
1686 Set current_function_decl and cfun to newly constructed empty function body.
1687 return basic block in the function body. */
1688
1689 basic_block
1690 init_lowered_empty_function (tree decl, bool in_ssa, profile_count count)
1691 {
1692 basic_block bb;
1693 edge e;
1694
1695 current_function_decl = decl;
1696 allocate_struct_function (decl, false);
1697 gimple_register_cfg_hooks ();
1698 init_empty_tree_cfg ();
1699 init_tree_ssa (cfun);
1700
1701 if (in_ssa)
1702 {
1703 init_ssa_operands (cfun);
1704 cfun->gimple_df->in_ssa_p = true;
1705 cfun->curr_properties |= PROP_ssa;
1706 }
1707
1708 DECL_INITIAL (decl) = make_node (BLOCK);
1709 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1710
1711 DECL_SAVED_TREE (decl) = error_mark_node;
1712 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1713 | PROP_cfg | PROP_loops);
1714
1715 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1716 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1717 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1718
1719 /* Create BB for body of the function and connect it properly. */
1720 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = count;
1721 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = count;
1722 bb = create_basic_block (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1723 bb->count = count;
1724 e = make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1725 e->probability = profile_probability::always ();
1726 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1727 e->probability = profile_probability::always ();
1728 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1729
1730 return bb;
1731 }
1732
1733 /* Adjust PTR by the constant FIXED_OFFSET, by the vtable offset indicated by
1734 VIRTUAL_OFFSET, and by the indirect offset indicated by INDIRECT_OFFSET, if
1735 it is non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and zero
1736 for a result adjusting thunk. */
1737
1738 tree
1739 thunk_adjust (gimple_stmt_iterator * bsi,
1740 tree ptr, bool this_adjusting,
1741 HOST_WIDE_INT fixed_offset, tree virtual_offset,
1742 HOST_WIDE_INT indirect_offset)
1743 {
1744 gassign *stmt;
1745 tree ret;
1746
1747 if (this_adjusting
1748 && fixed_offset != 0)
1749 {
1750 stmt = gimple_build_assign
1751 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1752 ptr,
1753 fixed_offset));
1754 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1755 }
1756
1757 if (!vtable_entry_type && (virtual_offset || indirect_offset != 0))
1758 {
1759 tree vfunc_type = make_node (FUNCTION_TYPE);
1760 TREE_TYPE (vfunc_type) = integer_type_node;
1761 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1762 layout_type (vfunc_type);
1763
1764 vtable_entry_type = build_pointer_type (vfunc_type);
1765 }
1766
1767 /* If there's a virtual offset, look up that value in the vtable and
1768 adjust the pointer again. */
1769 if (virtual_offset)
1770 {
1771 tree vtabletmp;
1772 tree vtabletmp2;
1773 tree vtabletmp3;
1774
1775 vtabletmp =
1776 create_tmp_reg (build_pointer_type
1777 (build_pointer_type (vtable_entry_type)), "vptr");
1778
1779 /* The vptr is always at offset zero in the object. */
1780 stmt = gimple_build_assign (vtabletmp,
1781 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1782 ptr));
1783 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1784
1785 /* Form the vtable address. */
1786 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1787 "vtableaddr");
1788 stmt = gimple_build_assign (vtabletmp2,
1789 build_simple_mem_ref (vtabletmp));
1790 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1791
1792 /* Find the entry with the vcall offset. */
1793 stmt = gimple_build_assign (vtabletmp2,
1794 fold_build_pointer_plus_loc (input_location,
1795 vtabletmp2,
1796 virtual_offset));
1797 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1798
1799 /* Get the offset itself. */
1800 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1801 "vcalloffset");
1802 stmt = gimple_build_assign (vtabletmp3,
1803 build_simple_mem_ref (vtabletmp2));
1804 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1805
1806 /* Adjust the `this' pointer. */
1807 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1808 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1809 GSI_CONTINUE_LINKING);
1810 }
1811
1812 /* Likewise for an offset that is stored in the object that contains the
1813 vtable. */
1814 if (indirect_offset != 0)
1815 {
1816 tree offset_ptr, offset_tree;
1817
1818 /* Get the address of the offset. */
1819 offset_ptr
1820 = create_tmp_reg (build_pointer_type
1821 (build_pointer_type (vtable_entry_type)),
1822 "offset_ptr");
1823 stmt = gimple_build_assign (offset_ptr,
1824 build1 (NOP_EXPR, TREE_TYPE (offset_ptr),
1825 ptr));
1826 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1827
1828 stmt = gimple_build_assign
1829 (offset_ptr,
1830 fold_build_pointer_plus_hwi_loc (input_location, offset_ptr,
1831 indirect_offset));
1832 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1833
1834 /* Get the offset itself. */
1835 offset_tree = create_tmp_reg (TREE_TYPE (TREE_TYPE (offset_ptr)),
1836 "offset");
1837 stmt = gimple_build_assign (offset_tree,
1838 build_simple_mem_ref (offset_ptr));
1839 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1840
1841 /* Adjust the `this' pointer. */
1842 ptr = fold_build_pointer_plus_loc (input_location, ptr, offset_tree);
1843 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1844 GSI_CONTINUE_LINKING);
1845 }
1846
1847 if (!this_adjusting
1848 && fixed_offset != 0)
1849 /* Adjust the pointer by the constant. */
1850 {
1851 tree ptrtmp;
1852
1853 if (VAR_P (ptr))
1854 ptrtmp = ptr;
1855 else
1856 {
1857 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1858 stmt = gimple_build_assign (ptrtmp, ptr);
1859 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1860 }
1861 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1862 ptrtmp, fixed_offset);
1863 }
1864
1865 /* Emit the statement and gimplify the adjustment expression. */
1866 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1867 stmt = gimple_build_assign (ret, ptr);
1868 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1869
1870 return ret;
1871 }
1872
1873 /* Expand thunk NODE to gimple if possible.
1874 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1875 no assembler is produced.
1876 When OUTPUT_ASM_THUNK is true, also produce assembler for
1877 thunks that are not lowered. */
1878
1879 bool
1880 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1881 {
1882 bool this_adjusting = thunk.this_adjusting;
1883 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1884 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1885 HOST_WIDE_INT indirect_offset = thunk.indirect_offset;
1886 tree virtual_offset = NULL;
1887 tree alias = callees->callee->decl;
1888 tree thunk_fndecl = decl;
1889 tree a;
1890
1891 if (!force_gimple_thunk
1892 && this_adjusting
1893 && indirect_offset == 0
1894 && !DECL_EXTERNAL (alias)
1895 && !DECL_STATIC_CHAIN (alias)
1896 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1897 virtual_value, alias))
1898 {
1899 tree fn_block;
1900 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1901
1902 if (!output_asm_thunks)
1903 {
1904 analyzed = true;
1905 return false;
1906 }
1907
1908 if (in_lto_p)
1909 get_untransformed_body ();
1910 a = DECL_ARGUMENTS (thunk_fndecl);
1911
1912 current_function_decl = thunk_fndecl;
1913
1914 /* Ensure thunks are emitted in their correct sections. */
1915 resolve_unique_section (thunk_fndecl, 0,
1916 flag_function_sections);
1917
1918 DECL_RESULT (thunk_fndecl)
1919 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1920 RESULT_DECL, 0, restype);
1921 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1922
1923 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1924 create one. */
1925 fn_block = make_node (BLOCK);
1926 BLOCK_VARS (fn_block) = a;
1927 DECL_INITIAL (thunk_fndecl) = fn_block;
1928 BLOCK_SUPERCONTEXT (fn_block) = thunk_fndecl;
1929 allocate_struct_function (thunk_fndecl, false);
1930 init_function_start (thunk_fndecl);
1931 cfun->is_thunk = 1;
1932 insn_locations_init ();
1933 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1934 prologue_location = curr_insn_location ();
1935
1936 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1937 fixed_offset, virtual_value, alias);
1938
1939 insn_locations_finalize ();
1940 init_insn_lengths ();
1941 free_after_compilation (cfun);
1942 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1943 thunk.thunk_p = false;
1944 analyzed = false;
1945 }
1946 else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
1947 {
1948 error ("generic thunk code fails for method %qD which uses %<...%>",
1949 thunk_fndecl);
1950 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1951 analyzed = true;
1952 return false;
1953 }
1954 else
1955 {
1956 tree restype;
1957 basic_block bb, then_bb, else_bb, return_bb;
1958 gimple_stmt_iterator bsi;
1959 int nargs = 0;
1960 tree arg;
1961 int i;
1962 tree resdecl;
1963 tree restmp = NULL;
1964
1965 gcall *call;
1966 greturn *ret;
1967 bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
1968
1969 /* We may be called from expand_thunk that releases body except for
1970 DECL_ARGUMENTS. In this case force_gimple_thunk is true. */
1971 if (in_lto_p && !force_gimple_thunk)
1972 get_untransformed_body ();
1973
1974 /* We need to force DECL_IGNORED_P when the thunk is created
1975 after early debug was run. */
1976 if (force_gimple_thunk)
1977 DECL_IGNORED_P (thunk_fndecl) = 1;
1978
1979 a = DECL_ARGUMENTS (thunk_fndecl);
1980
1981 current_function_decl = thunk_fndecl;
1982
1983 /* Ensure thunks are emitted in their correct sections. */
1984 resolve_unique_section (thunk_fndecl, 0,
1985 flag_function_sections);
1986
1987 bitmap_obstack_initialize (NULL);
1988
1989 if (thunk.virtual_offset_p)
1990 virtual_offset = size_int (virtual_value);
1991
1992 /* Build the return declaration for the function. */
1993 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1994 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1995 {
1996 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1997 DECL_ARTIFICIAL (resdecl) = 1;
1998 DECL_IGNORED_P (resdecl) = 1;
1999 DECL_CONTEXT (resdecl) = thunk_fndecl;
2000 DECL_RESULT (thunk_fndecl) = resdecl;
2001 }
2002 else
2003 resdecl = DECL_RESULT (thunk_fndecl);
2004
2005 profile_count cfg_count = count;
2006 if (!cfg_count.initialized_p ())
2007 cfg_count = profile_count::from_gcov_type (BB_FREQ_MAX).guessed_local ();
2008
2009 bb = then_bb = else_bb = return_bb
2010 = init_lowered_empty_function (thunk_fndecl, true, cfg_count);
2011
2012 bsi = gsi_start_bb (bb);
2013
2014 /* Build call to the function being thunked. */
2015 if (!VOID_TYPE_P (restype)
2016 && (!alias_is_noreturn
2017 || TREE_ADDRESSABLE (restype)
2018 || TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
2019 {
2020 if (DECL_BY_REFERENCE (resdecl))
2021 {
2022 restmp = gimple_fold_indirect_ref (resdecl);
2023 if (!restmp)
2024 restmp = build2 (MEM_REF,
2025 TREE_TYPE (TREE_TYPE (resdecl)),
2026 resdecl,
2027 build_int_cst (TREE_TYPE (resdecl), 0));
2028 }
2029 else if (!is_gimple_reg_type (restype))
2030 {
2031 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
2032 {
2033 restmp = resdecl;
2034
2035 if (VAR_P (restmp))
2036 {
2037 add_local_decl (cfun, restmp);
2038 BLOCK_VARS (DECL_INITIAL (current_function_decl))
2039 = restmp;
2040 }
2041 }
2042 else
2043 restmp = create_tmp_var (restype, "retval");
2044 }
2045 else
2046 restmp = create_tmp_reg (restype, "retval");
2047 }
2048
2049 for (arg = a; arg; arg = DECL_CHAIN (arg))
2050 nargs++;
2051 auto_vec<tree> vargs (nargs);
2052 i = 0;
2053 arg = a;
2054 if (this_adjusting)
2055 {
2056 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
2057 virtual_offset, indirect_offset));
2058 arg = DECL_CHAIN (a);
2059 i = 1;
2060 }
2061
2062 if (nargs)
2063 for (; i < nargs; i++, arg = DECL_CHAIN (arg))
2064 {
2065 tree tmp = arg;
2066 DECL_NOT_GIMPLE_REG_P (arg) = 0;
2067 if (!is_gimple_val (arg))
2068 {
2069 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
2070 (TREE_TYPE (arg)), "arg");
2071 gimple *stmt = gimple_build_assign (tmp, arg);
2072 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2073 }
2074 vargs.quick_push (tmp);
2075 }
2076 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
2077 callees->call_stmt = call;
2078 gimple_call_set_from_thunk (call, true);
2079 if (DECL_STATIC_CHAIN (alias))
2080 {
2081 tree p = DECL_STRUCT_FUNCTION (alias)->static_chain_decl;
2082 tree type = TREE_TYPE (p);
2083 tree decl = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
2084 PARM_DECL, create_tmp_var_name ("CHAIN"),
2085 type);
2086 DECL_ARTIFICIAL (decl) = 1;
2087 DECL_IGNORED_P (decl) = 1;
2088 TREE_USED (decl) = 1;
2089 DECL_CONTEXT (decl) = thunk_fndecl;
2090 DECL_ARG_TYPE (decl) = type;
2091 TREE_READONLY (decl) = 1;
2092
2093 struct function *sf = DECL_STRUCT_FUNCTION (thunk_fndecl);
2094 sf->static_chain_decl = decl;
2095
2096 gimple_call_set_chain (call, decl);
2097 }
2098
2099 /* Return slot optimization is always possible and in fact required to
2100 return values with DECL_BY_REFERENCE. */
2101 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
2102 && (!is_gimple_reg_type (TREE_TYPE (resdecl))
2103 || DECL_BY_REFERENCE (resdecl)))
2104 gimple_call_set_return_slot_opt (call, true);
2105
2106 if (restmp)
2107 {
2108 gimple_call_set_lhs (call, restmp);
2109 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
2110 TREE_TYPE (TREE_TYPE (alias))));
2111 }
2112 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
2113 if (!alias_is_noreturn)
2114 {
2115 if (restmp && !this_adjusting
2116 && (fixed_offset || virtual_offset))
2117 {
2118 tree true_label = NULL_TREE;
2119
2120 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
2121 {
2122 gimple *stmt;
2123 edge e;
2124 /* If the return type is a pointer, we need to
2125 protect against NULL. We know there will be an
2126 adjustment, because that's why we're emitting a
2127 thunk. */
2128 then_bb = create_basic_block (NULL, bb);
2129 then_bb->count = cfg_count - cfg_count.apply_scale (1, 16);
2130 return_bb = create_basic_block (NULL, then_bb);
2131 return_bb->count = cfg_count;
2132 else_bb = create_basic_block (NULL, else_bb);
2133 else_bb->count = cfg_count.apply_scale (1, 16);
2134 add_bb_to_loop (then_bb, bb->loop_father);
2135 add_bb_to_loop (return_bb, bb->loop_father);
2136 add_bb_to_loop (else_bb, bb->loop_father);
2137 remove_edge (single_succ_edge (bb));
2138 true_label = gimple_block_label (then_bb);
2139 stmt = gimple_build_cond (NE_EXPR, restmp,
2140 build_zero_cst (TREE_TYPE (restmp)),
2141 NULL_TREE, NULL_TREE);
2142 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2143 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
2144 e->probability = profile_probability::guessed_always ()
2145 .apply_scale (1, 16);
2146 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
2147 e->probability = profile_probability::guessed_always ()
2148 .apply_scale (1, 16);
2149 make_single_succ_edge (return_bb,
2150 EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
2151 make_single_succ_edge (then_bb, return_bb, EDGE_FALLTHRU);
2152 e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
2153 e->probability = profile_probability::always ();
2154 bsi = gsi_last_bb (then_bb);
2155 }
2156
2157 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
2158 fixed_offset, virtual_offset,
2159 indirect_offset);
2160 if (true_label)
2161 {
2162 gimple *stmt;
2163 bsi = gsi_last_bb (else_bb);
2164 stmt = gimple_build_assign (restmp,
2165 build_zero_cst (TREE_TYPE (restmp)));
2166 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2167 bsi = gsi_last_bb (return_bb);
2168 }
2169 }
2170 else
2171 gimple_call_set_tail (call, true);
2172
2173 /* Build return value. */
2174 if (!DECL_BY_REFERENCE (resdecl))
2175 ret = gimple_build_return (restmp);
2176 else
2177 ret = gimple_build_return (resdecl);
2178
2179 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
2180 }
2181 else
2182 {
2183 gimple_call_set_tail (call, true);
2184 remove_edge (single_succ_edge (bb));
2185 }
2186
2187 cfun->gimple_df->in_ssa_p = true;
2188 update_max_bb_count ();
2189 profile_status_for_fn (cfun)
2190 = cfg_count.initialized_p () && cfg_count.ipa_p ()
2191 ? PROFILE_READ : PROFILE_GUESSED;
2192 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
2193 TREE_ASM_WRITTEN (thunk_fndecl) = false;
2194 delete_unreachable_blocks ();
2195 update_ssa (TODO_update_ssa);
2196 checking_verify_flow_info ();
2197 free_dominance_info (CDI_DOMINATORS);
2198
2199 /* Since we want to emit the thunk, we explicitly mark its name as
2200 referenced. */
2201 thunk.thunk_p = false;
2202 lowered = true;
2203 bitmap_obstack_release (NULL);
2204 }
2205 current_function_decl = NULL;
2206 set_cfun (NULL);
2207 return true;
2208 }
2209
2210 /* Assemble thunks and aliases associated to node. */
2211
2212 void
2213 cgraph_node::assemble_thunks_and_aliases (void)
2214 {
2215 cgraph_edge *e;
2216 ipa_ref *ref;
2217
2218 for (e = callers; e;)
2219 if (e->caller->thunk.thunk_p
2220 && !e->caller->inlined_to)
2221 {
2222 cgraph_node *thunk = e->caller;
2223
2224 e = e->next_caller;
2225 thunk->expand_thunk (true, false);
2226 thunk->assemble_thunks_and_aliases ();
2227 }
2228 else
2229 e = e->next_caller;
2230
2231 FOR_EACH_ALIAS (this, ref)
2232 {
2233 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2234 if (!alias->transparent_alias)
2235 {
2236 bool saved_written = TREE_ASM_WRITTEN (decl);
2237
2238 /* Force assemble_alias to really output the alias this time instead
2239 of buffering it in same alias pairs. */
2240 TREE_ASM_WRITTEN (decl) = 1;
2241 if (alias->symver)
2242 do_assemble_symver (alias->decl,
2243 DECL_ASSEMBLER_NAME (decl));
2244 else
2245 do_assemble_alias (alias->decl,
2246 DECL_ASSEMBLER_NAME (decl));
2247 alias->assemble_thunks_and_aliases ();
2248 TREE_ASM_WRITTEN (decl) = saved_written;
2249 }
2250 }
2251 }
2252
2253 /* Expand function specified by node. */
2254
2255 void
2256 cgraph_node::expand (void)
2257 {
2258 location_t saved_loc;
2259
2260 /* We ought to not compile any inline clones. */
2261 gcc_assert (!inlined_to);
2262
2263 /* __RTL functions are compiled as soon as they are parsed, so don't
2264 do it again. */
2265 if (native_rtl_p ())
2266 return;
2267
2268 announce_function (decl);
2269 process = 0;
2270 gcc_assert (lowered);
2271 get_untransformed_body ();
2272
2273 /* Generate RTL for the body of DECL. */
2274
2275 timevar_push (TV_REST_OF_COMPILATION);
2276
2277 gcc_assert (symtab->global_info_ready);
2278
2279 /* Initialize the default bitmap obstack. */
2280 bitmap_obstack_initialize (NULL);
2281
2282 /* Initialize the RTL code for the function. */
2283 saved_loc = input_location;
2284 input_location = DECL_SOURCE_LOCATION (decl);
2285
2286 gcc_assert (DECL_STRUCT_FUNCTION (decl));
2287 push_cfun (DECL_STRUCT_FUNCTION (decl));
2288 init_function_start (decl);
2289
2290 gimple_register_cfg_hooks ();
2291
2292 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
2293
2294 update_ssa (TODO_update_ssa_only_virtuals);
2295 execute_all_ipa_transforms (false);
2296
2297 /* Perform all tree transforms and optimizations. */
2298
2299 /* Signal the start of passes. */
2300 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
2301
2302 execute_pass_list (cfun, g->get_passes ()->all_passes);
2303
2304 /* Signal the end of passes. */
2305 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
2306
2307 bitmap_obstack_release (&reg_obstack);
2308
2309 /* Release the default bitmap obstack. */
2310 bitmap_obstack_release (NULL);
2311
2312 /* If requested, warn about function definitions where the function will
2313 return a value (usually of some struct or union type) which itself will
2314 take up a lot of stack space. */
2315 if (!DECL_EXTERNAL (decl) && TREE_TYPE (decl))
2316 {
2317 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
2318
2319 if (ret_type && TYPE_SIZE_UNIT (ret_type)
2320 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
2321 && compare_tree_int (TYPE_SIZE_UNIT (ret_type),
2322 warn_larger_than_size) > 0)
2323 {
2324 unsigned int size_as_int
2325 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
2326
2327 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
2328 warning (OPT_Wlarger_than_,
2329 "size of return value of %q+D is %u bytes",
2330 decl, size_as_int);
2331 else
2332 warning (OPT_Wlarger_than_,
2333 "size of return value of %q+D is larger than %wu bytes",
2334 decl, warn_larger_than_size);
2335 }
2336 }
2337
2338 gimple_set_body (decl, NULL);
2339 if (DECL_STRUCT_FUNCTION (decl) == 0
2340 && !cgraph_node::get (decl)->origin)
2341 {
2342 /* Stop pointing to the local nodes about to be freed.
2343 But DECL_INITIAL must remain nonzero so we know this
2344 was an actual function definition.
2345 For a nested function, this is done in c_pop_function_context.
2346 If rest_of_compilation set this to 0, leave it 0. */
2347 if (DECL_INITIAL (decl) != 0)
2348 DECL_INITIAL (decl) = error_mark_node;
2349 }
2350
2351 input_location = saved_loc;
2352
2353 ggc_collect ();
2354 timevar_pop (TV_REST_OF_COMPILATION);
2355
2356 /* Make sure that BE didn't give up on compiling. */
2357 gcc_assert (TREE_ASM_WRITTEN (decl));
2358 if (cfun)
2359 pop_cfun ();
2360
2361 /* It would make a lot more sense to output thunks before function body to
2362 get more forward and fewer backward jumps. This however would need
2363 solving problem with comdats. See PR48668. Also aliases must come after
2364 function itself to make one pass assemblers, like one on AIX, happy.
2365 See PR 50689.
2366 FIXME: Perhaps thunks should be move before function IFF they are not in
2367 comdat groups. */
2368 assemble_thunks_and_aliases ();
2369 release_body ();
2370 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
2371 points to the dead function body. */
2372 remove_callees ();
2373 remove_all_references ();
2374 }
2375
2376 /* Node comparator that is responsible for the order that corresponds
2377 to time when a function was launched for the first time. */
2378
2379 int
2380 tp_first_run_node_cmp (const void *pa, const void *pb)
2381 {
2382 const cgraph_node *a = *(const cgraph_node * const *) pa;
2383 const cgraph_node *b = *(const cgraph_node * const *) pb;
2384 unsigned int tp_first_run_a = a->tp_first_run;
2385 unsigned int tp_first_run_b = b->tp_first_run;
2386
2387 if (!opt_for_fn (a->decl, flag_profile_reorder_functions)
2388 || a->no_reorder)
2389 tp_first_run_a = 0;
2390 if (!opt_for_fn (b->decl, flag_profile_reorder_functions)
2391 || b->no_reorder)
2392 tp_first_run_b = 0;
2393
2394 if (tp_first_run_a == tp_first_run_b)
2395 return a->order - b->order;
2396
2397 /* Functions with time profile must be before these without profile. */
2398 tp_first_run_a = (tp_first_run_a - 1) & INT_MAX;
2399 tp_first_run_b = (tp_first_run_b - 1) & INT_MAX;
2400
2401 return tp_first_run_a - tp_first_run_b;
2402 }
2403
2404 /* Expand all functions that must be output.
2405
2406 Attempt to topologically sort the nodes so function is output when
2407 all called functions are already assembled to allow data to be
2408 propagated across the callgraph. Use a stack to get smaller distance
2409 between a function and its callees (later we may choose to use a more
2410 sophisticated algorithm for function reordering; we will likely want
2411 to use subsections to make the output functions appear in top-down
2412 order). */
2413
2414 static void
2415 expand_all_functions (void)
2416 {
2417 cgraph_node *node;
2418 cgraph_node **order = XCNEWVEC (cgraph_node *,
2419 symtab->cgraph_count);
2420 cgraph_node **tp_first_run_order = XCNEWVEC (cgraph_node *,
2421 symtab->cgraph_count);
2422 unsigned int expanded_func_count = 0, profiled_func_count = 0;
2423 int order_pos, tp_first_run_order_pos = 0, new_order_pos = 0;
2424 int i;
2425
2426 order_pos = ipa_reverse_postorder (order);
2427 gcc_assert (order_pos == symtab->cgraph_count);
2428
2429 /* Garbage collector may remove inline clones we eliminate during
2430 optimization. So we must be sure to not reference them. */
2431 for (i = 0; i < order_pos; i++)
2432 if (order[i]->process)
2433 {
2434 if (order[i]->tp_first_run
2435 && opt_for_fn (order[i]->decl, flag_profile_reorder_functions))
2436 tp_first_run_order[tp_first_run_order_pos++] = order[i];
2437 else
2438 order[new_order_pos++] = order[i];
2439 }
2440
2441 /* First output functions with time profile in specified order. */
2442 qsort (tp_first_run_order, tp_first_run_order_pos,
2443 sizeof (cgraph_node *), tp_first_run_node_cmp);
2444 for (i = 0; i < tp_first_run_order_pos; i++)
2445 {
2446 node = tp_first_run_order[i];
2447
2448 if (node->process)
2449 {
2450 expanded_func_count++;
2451 profiled_func_count++;
2452
2453 if (symtab->dump_file)
2454 fprintf (symtab->dump_file,
2455 "Time profile order in expand_all_functions:%s:%d\n",
2456 node->dump_asm_name (), node->tp_first_run);
2457 node->process = 0;
2458 node->expand ();
2459 }
2460 }
2461
2462 /* Output functions in RPO so callees get optimized before callers. This
2463 makes ipa-ra and other propagators to work.
2464 FIXME: This is far from optimal code layout. */
2465 for (i = new_order_pos - 1; i >= 0; i--)
2466 {
2467 node = order[i];
2468
2469 if (node->process)
2470 {
2471 expanded_func_count++;
2472 node->process = 0;
2473 node->expand ();
2474 }
2475 }
2476
2477 if (dump_file)
2478 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
2479 main_input_filename, profiled_func_count, expanded_func_count);
2480
2481 if (symtab->dump_file && tp_first_run_order_pos)
2482 fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
2483 profiled_func_count, expanded_func_count);
2484
2485 symtab->process_new_functions ();
2486 free_gimplify_stack ();
2487 delete ipa_saved_clone_sources;
2488 ipa_saved_clone_sources = NULL;
2489 free (order);
2490 }
2491
2492 /* This is used to sort the node types by the cgraph order number. */
2493
2494 enum cgraph_order_sort_kind
2495 {
2496 ORDER_FUNCTION,
2497 ORDER_VAR,
2498 ORDER_VAR_UNDEF,
2499 ORDER_ASM
2500 };
2501
2502 struct cgraph_order_sort
2503 {
2504 /* Construct from a cgraph_node. */
2505 cgraph_order_sort (cgraph_node *node)
2506 : kind (ORDER_FUNCTION), order (node->order)
2507 {
2508 u.f = node;
2509 }
2510
2511 /* Construct from a varpool_node. */
2512 cgraph_order_sort (varpool_node *node)
2513 : kind (node->definition ? ORDER_VAR : ORDER_VAR_UNDEF), order (node->order)
2514 {
2515 u.v = node;
2516 }
2517
2518 /* Construct from a asm_node. */
2519 cgraph_order_sort (asm_node *node)
2520 : kind (ORDER_ASM), order (node->order)
2521 {
2522 u.a = node;
2523 }
2524
2525 /* Assembly cgraph_order_sort based on its type. */
2526 void process ();
2527
2528 enum cgraph_order_sort_kind kind;
2529 union
2530 {
2531 cgraph_node *f;
2532 varpool_node *v;
2533 asm_node *a;
2534 } u;
2535 int order;
2536 };
2537
2538 /* Assembly cgraph_order_sort based on its type. */
2539
2540 void
2541 cgraph_order_sort::process ()
2542 {
2543 switch (kind)
2544 {
2545 case ORDER_FUNCTION:
2546 u.f->process = 0;
2547 u.f->expand ();
2548 break;
2549 case ORDER_VAR:
2550 u.v->assemble_decl ();
2551 break;
2552 case ORDER_VAR_UNDEF:
2553 assemble_undefined_decl (u.v->decl);
2554 break;
2555 case ORDER_ASM:
2556 assemble_asm (u.a->asm_str);
2557 break;
2558 default:
2559 gcc_unreachable ();
2560 }
2561 }
2562
2563 /* Compare cgraph_order_sort by order. */
2564
2565 static int
2566 cgraph_order_cmp (const void *a_p, const void *b_p)
2567 {
2568 const cgraph_order_sort *nodea = (const cgraph_order_sort *)a_p;
2569 const cgraph_order_sort *nodeb = (const cgraph_order_sort *)b_p;
2570
2571 return nodea->order - nodeb->order;
2572 }
2573
2574 /* Output all functions, variables, and asm statements in the order
2575 according to their order fields, which is the order in which they
2576 appeared in the file. This implements -fno-toplevel-reorder. In
2577 this mode we may output functions and variables which don't really
2578 need to be output. */
2579
2580 static void
2581 output_in_order (void)
2582 {
2583 int i;
2584 cgraph_node *cnode;
2585 varpool_node *vnode;
2586 asm_node *anode;
2587 auto_vec<cgraph_order_sort> nodes;
2588 cgraph_order_sort *node;
2589
2590 FOR_EACH_DEFINED_FUNCTION (cnode)
2591 if (cnode->process && !cnode->thunk.thunk_p
2592 && !cnode->alias && cnode->no_reorder)
2593 nodes.safe_push (cgraph_order_sort (cnode));
2594
2595 /* There is a similar loop in symbol_table::output_variables.
2596 Please keep them in sync. */
2597 FOR_EACH_VARIABLE (vnode)
2598 if (vnode->no_reorder
2599 && !DECL_HARD_REGISTER (vnode->decl)
2600 && !DECL_HAS_VALUE_EXPR_P (vnode->decl))
2601 nodes.safe_push (cgraph_order_sort (vnode));
2602
2603 for (anode = symtab->first_asm_symbol (); anode; anode = anode->next)
2604 nodes.safe_push (cgraph_order_sort (anode));
2605
2606 /* Sort nodes by order. */
2607 nodes.qsort (cgraph_order_cmp);
2608
2609 /* In toplevel reorder mode we output all statics; mark them as needed. */
2610 FOR_EACH_VEC_ELT (nodes, i, node)
2611 if (node->kind == ORDER_VAR)
2612 node->u.v->finalize_named_section_flags ();
2613
2614 FOR_EACH_VEC_ELT (nodes, i, node)
2615 node->process ();
2616
2617 symtab->clear_asm_symbols ();
2618 }
2619
2620 static void
2621 ipa_passes (void)
2622 {
2623 gcc::pass_manager *passes = g->get_passes ();
2624
2625 set_cfun (NULL);
2626 current_function_decl = NULL;
2627 gimple_register_cfg_hooks ();
2628 bitmap_obstack_initialize (NULL);
2629
2630 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2631
2632 if (!in_lto_p)
2633 {
2634 execute_ipa_pass_list (passes->all_small_ipa_passes);
2635 if (seen_error ())
2636 return;
2637 }
2638
2639 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2640 devirtualization and other changes where removal iterate. */
2641 symtab->remove_unreachable_nodes (symtab->dump_file);
2642
2643 /* If pass_all_early_optimizations was not scheduled, the state of
2644 the cgraph will not be properly updated. Update it now. */
2645 if (symtab->state < IPA_SSA)
2646 symtab->state = IPA_SSA;
2647
2648 if (!in_lto_p)
2649 {
2650 /* Generate coverage variables and constructors. */
2651 coverage_finish ();
2652
2653 /* Process new functions added. */
2654 set_cfun (NULL);
2655 current_function_decl = NULL;
2656 symtab->process_new_functions ();
2657
2658 execute_ipa_summary_passes
2659 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2660 }
2661
2662 /* Some targets need to handle LTO assembler output specially. */
2663 if (flag_generate_lto || flag_generate_offload)
2664 targetm.asm_out.lto_start ();
2665
2666 if (!in_lto_p
2667 || flag_incremental_link == INCREMENTAL_LINK_LTO)
2668 {
2669 if (!quiet_flag)
2670 fprintf (stderr, "Streaming LTO\n");
2671 if (g->have_offload)
2672 {
2673 section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2674 lto_stream_offload_p = true;
2675 ipa_write_summaries ();
2676 lto_stream_offload_p = false;
2677 }
2678 if (flag_lto)
2679 {
2680 section_name_prefix = LTO_SECTION_NAME_PREFIX;
2681 lto_stream_offload_p = false;
2682 ipa_write_summaries ();
2683 }
2684 }
2685
2686 if (flag_generate_lto || flag_generate_offload)
2687 targetm.asm_out.lto_end ();
2688
2689 if (!flag_ltrans
2690 && ((in_lto_p && flag_incremental_link != INCREMENTAL_LINK_LTO)
2691 || !flag_lto || flag_fat_lto_objects))
2692 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2693 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2694
2695 bitmap_obstack_release (NULL);
2696 }
2697
2698
2699 /* Return string alias is alias of. */
2700
2701 static tree
2702 get_alias_symbol (tree decl)
2703 {
2704 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2705 return get_identifier (TREE_STRING_POINTER
2706 (TREE_VALUE (TREE_VALUE (alias))));
2707 }
2708
2709
2710 /* Weakrefs may be associated to external decls and thus not output
2711 at expansion time. Emit all necessary aliases. */
2712
2713 void
2714 symbol_table::output_weakrefs (void)
2715 {
2716 symtab_node *node;
2717 FOR_EACH_SYMBOL (node)
2718 if (node->alias
2719 && !TREE_ASM_WRITTEN (node->decl)
2720 && node->weakref)
2721 {
2722 tree target;
2723
2724 /* Weakrefs are special by not requiring target definition in current
2725 compilation unit. It is thus bit hard to work out what we want to
2726 alias.
2727 When alias target is defined, we need to fetch it from symtab reference,
2728 otherwise it is pointed to by alias_target. */
2729 if (node->alias_target)
2730 target = (DECL_P (node->alias_target)
2731 ? DECL_ASSEMBLER_NAME (node->alias_target)
2732 : node->alias_target);
2733 else if (node->analyzed)
2734 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2735 else
2736 {
2737 gcc_unreachable ();
2738 target = get_alias_symbol (node->decl);
2739 }
2740 do_assemble_alias (node->decl, target);
2741 }
2742 }
2743
2744 /* Perform simple optimizations based on callgraph. */
2745
2746 void
2747 symbol_table::compile (void)
2748 {
2749 if (seen_error ())
2750 return;
2751
2752 symtab_node::checking_verify_symtab_nodes ();
2753
2754 timevar_push (TV_CGRAPHOPT);
2755 if (pre_ipa_mem_report)
2756 dump_memory_report ("Memory consumption before IPA");
2757 if (!quiet_flag)
2758 fprintf (stderr, "Performing interprocedural optimizations\n");
2759 state = IPA;
2760
2761 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2762 if (flag_generate_lto || flag_generate_offload)
2763 lto_streamer_hooks_init ();
2764
2765 /* Don't run the IPA passes if there was any error or sorry messages. */
2766 if (!seen_error ())
2767 {
2768 timevar_start (TV_CGRAPH_IPA_PASSES);
2769 ipa_passes ();
2770 timevar_stop (TV_CGRAPH_IPA_PASSES);
2771 }
2772 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2773 if (seen_error ()
2774 || ((!in_lto_p || flag_incremental_link == INCREMENTAL_LINK_LTO)
2775 && flag_lto && !flag_fat_lto_objects))
2776 {
2777 timevar_pop (TV_CGRAPHOPT);
2778 return;
2779 }
2780
2781 global_info_ready = true;
2782 if (dump_file)
2783 {
2784 fprintf (dump_file, "Optimized ");
2785 symtab->dump (dump_file);
2786 }
2787 if (post_ipa_mem_report)
2788 dump_memory_report ("Memory consumption after IPA");
2789 timevar_pop (TV_CGRAPHOPT);
2790
2791 /* Output everything. */
2792 switch_to_section (text_section);
2793 (*debug_hooks->assembly_start) ();
2794 if (!quiet_flag)
2795 fprintf (stderr, "Assembling functions:\n");
2796 symtab_node::checking_verify_symtab_nodes ();
2797
2798 bitmap_obstack_initialize (NULL);
2799 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2800 bitmap_obstack_release (NULL);
2801 mark_functions_to_output ();
2802
2803 /* When weakref support is missing, we automatically translate all
2804 references to NODE to references to its ultimate alias target.
2805 The renaming mechanism uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2806 TREE_CHAIN.
2807
2808 Set up this mapping before we output any assembler but once we are sure
2809 that all symbol renaming is done.
2810
2811 FIXME: All this ugliness can go away if we just do renaming at gimple
2812 level by physically rewriting the IL. At the moment we can only redirect
2813 calls, so we need infrastructure for renaming references as well. */
2814 #ifndef ASM_OUTPUT_WEAKREF
2815 symtab_node *node;
2816
2817 FOR_EACH_SYMBOL (node)
2818 if (node->alias
2819 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2820 {
2821 IDENTIFIER_TRANSPARENT_ALIAS
2822 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2823 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2824 = (node->alias_target ? node->alias_target
2825 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2826 }
2827 #endif
2828
2829 state = EXPANSION;
2830
2831 /* Output first asm statements and anything ordered. The process
2832 flag is cleared for these nodes, so we skip them later. */
2833 output_in_order ();
2834
2835 timevar_start (TV_CGRAPH_FUNC_EXPANSION);
2836 expand_all_functions ();
2837 timevar_stop (TV_CGRAPH_FUNC_EXPANSION);
2838
2839 output_variables ();
2840
2841 process_new_functions ();
2842 state = FINISHED;
2843 output_weakrefs ();
2844
2845 if (dump_file)
2846 {
2847 fprintf (dump_file, "\nFinal ");
2848 symtab->dump (dump_file);
2849 }
2850 if (!flag_checking)
2851 return;
2852 symtab_node::verify_symtab_nodes ();
2853 /* Double check that all inline clones are gone and that all
2854 function bodies have been released from memory. */
2855 if (!seen_error ())
2856 {
2857 cgraph_node *node;
2858 bool error_found = false;
2859
2860 FOR_EACH_DEFINED_FUNCTION (node)
2861 if (node->inlined_to
2862 || gimple_has_body_p (node->decl))
2863 {
2864 error_found = true;
2865 node->debug ();
2866 }
2867 if (error_found)
2868 internal_error ("nodes with unreleased memory found");
2869 }
2870 }
2871
2872 /* Earlydebug dump file, flags, and number. */
2873
2874 static int debuginfo_early_dump_nr;
2875 static FILE *debuginfo_early_dump_file;
2876 static dump_flags_t debuginfo_early_dump_flags;
2877
2878 /* Debug dump file, flags, and number. */
2879
2880 static int debuginfo_dump_nr;
2881 static FILE *debuginfo_dump_file;
2882 static dump_flags_t debuginfo_dump_flags;
2883
2884 /* Register the debug and earlydebug dump files. */
2885
2886 void
2887 debuginfo_early_init (void)
2888 {
2889 gcc::dump_manager *dumps = g->get_dumps ();
2890 debuginfo_early_dump_nr = dumps->dump_register (".earlydebug", "earlydebug",
2891 "earlydebug", DK_tree,
2892 OPTGROUP_NONE,
2893 false);
2894 debuginfo_dump_nr = dumps->dump_register (".debug", "debug",
2895 "debug", DK_tree,
2896 OPTGROUP_NONE,
2897 false);
2898 }
2899
2900 /* Initialize the debug and earlydebug dump files. */
2901
2902 void
2903 debuginfo_init (void)
2904 {
2905 gcc::dump_manager *dumps = g->get_dumps ();
2906 debuginfo_dump_file = dump_begin (debuginfo_dump_nr, NULL);
2907 debuginfo_dump_flags = dumps->get_dump_file_info (debuginfo_dump_nr)->pflags;
2908 debuginfo_early_dump_file = dump_begin (debuginfo_early_dump_nr, NULL);
2909 debuginfo_early_dump_flags
2910 = dumps->get_dump_file_info (debuginfo_early_dump_nr)->pflags;
2911 }
2912
2913 /* Finalize the debug and earlydebug dump files. */
2914
2915 void
2916 debuginfo_fini (void)
2917 {
2918 if (debuginfo_dump_file)
2919 dump_end (debuginfo_dump_nr, debuginfo_dump_file);
2920 if (debuginfo_early_dump_file)
2921 dump_end (debuginfo_early_dump_nr, debuginfo_early_dump_file);
2922 }
2923
2924 /* Set dump_file to the debug dump file. */
2925
2926 void
2927 debuginfo_start (void)
2928 {
2929 set_dump_file (debuginfo_dump_file);
2930 }
2931
2932 /* Undo setting dump_file to the debug dump file. */
2933
2934 void
2935 debuginfo_stop (void)
2936 {
2937 set_dump_file (NULL);
2938 }
2939
2940 /* Set dump_file to the earlydebug dump file. */
2941
2942 void
2943 debuginfo_early_start (void)
2944 {
2945 set_dump_file (debuginfo_early_dump_file);
2946 }
2947
2948 /* Undo setting dump_file to the earlydebug dump file. */
2949
2950 void
2951 debuginfo_early_stop (void)
2952 {
2953 set_dump_file (NULL);
2954 }
2955
2956 /* Analyze the whole compilation unit once it is parsed completely. */
2957
2958 void
2959 symbol_table::finalize_compilation_unit (void)
2960 {
2961 timevar_push (TV_CGRAPH);
2962
2963 /* If we're here there's no current function anymore. Some frontends
2964 are lazy in clearing these. */
2965 current_function_decl = NULL;
2966 set_cfun (NULL);
2967
2968 /* Do not skip analyzing the functions if there were errors, we
2969 miss diagnostics for following functions otherwise. */
2970
2971 /* Emit size functions we didn't inline. */
2972 finalize_size_functions ();
2973
2974 /* Mark alias targets necessary and emit diagnostics. */
2975 handle_alias_pairs ();
2976
2977 if (!quiet_flag)
2978 {
2979 fprintf (stderr, "\nAnalyzing compilation unit\n");
2980 fflush (stderr);
2981 }
2982
2983 if (flag_dump_passes)
2984 dump_passes ();
2985
2986 /* Gimplify and lower all functions, compute reachability and
2987 remove unreachable nodes. */
2988 analyze_functions (/*first_time=*/true);
2989
2990 /* Mark alias targets necessary and emit diagnostics. */
2991 handle_alias_pairs ();
2992
2993 /* Gimplify and lower thunks. */
2994 analyze_functions (/*first_time=*/false);
2995
2996 /* Offloading requires LTO infrastructure. */
2997 if (!in_lto_p && g->have_offload)
2998 flag_generate_offload = 1;
2999
3000 if (!seen_error ())
3001 {
3002 /* Give the frontends the chance to emit early debug based on
3003 what is still reachable in the TU. */
3004 (*lang_hooks.finalize_early_debug) ();
3005
3006 /* Clean up anything that needs cleaning up after initial debug
3007 generation. */
3008 debuginfo_early_start ();
3009 (*debug_hooks->early_finish) (main_input_filename);
3010 debuginfo_early_stop ();
3011 }
3012
3013 /* Finally drive the pass manager. */
3014 compile ();
3015
3016 timevar_pop (TV_CGRAPH);
3017 }
3018
3019 /* Reset all state within cgraphunit.c so that we can rerun the compiler
3020 within the same process. For use by toplev::finalize. */
3021
3022 void
3023 cgraphunit_c_finalize (void)
3024 {
3025 gcc_assert (cgraph_new_nodes.length () == 0);
3026 cgraph_new_nodes.truncate (0);
3027
3028 vtable_entry_type = NULL;
3029 queued_nodes = &symtab_terminator;
3030
3031 first_analyzed = NULL;
3032 first_analyzed_var = NULL;
3033 }
3034
3035 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
3036 kind of wrapper method. */
3037
3038 void
3039 cgraph_node::create_wrapper (cgraph_node *target)
3040 {
3041 /* Preserve DECL_RESULT so we get right by reference flag. */
3042 tree decl_result = DECL_RESULT (decl);
3043
3044 /* Remove the function's body but keep arguments to be reused
3045 for thunk. */
3046 release_body (true);
3047 reset ();
3048
3049 DECL_UNINLINABLE (decl) = false;
3050 DECL_RESULT (decl) = decl_result;
3051 DECL_INITIAL (decl) = NULL;
3052 allocate_struct_function (decl, false);
3053 set_cfun (NULL);
3054
3055 /* Turn alias into thunk and expand it into GIMPLE representation. */
3056 definition = true;
3057
3058 memset (&thunk, 0, sizeof (cgraph_thunk_info));
3059 thunk.thunk_p = true;
3060 create_edge (target, NULL, count);
3061 callees->can_throw_external = !TREE_NOTHROW (target->decl);
3062
3063 tree arguments = DECL_ARGUMENTS (decl);
3064
3065 while (arguments)
3066 {
3067 TREE_ADDRESSABLE (arguments) = false;
3068 arguments = TREE_CHAIN (arguments);
3069 }
3070
3071 expand_thunk (false, true);
3072
3073 /* Inline summary set-up. */
3074 analyze ();
3075 inline_analyze_function (this);
3076 }
3077
3078 #include "gt-cgraphunit.h"