]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraphunit.c
Materialize clones on demand
[thirdparty/gcc.git] / gcc / cgraphunit.c
1 /* Driver of optimization process
2 Copyright (C) 2003-2020 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimizations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transactional memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO streaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multiple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functions (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158 */
159
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "backend.h"
164 #include "target.h"
165 #include "rtl.h"
166 #include "tree.h"
167 #include "gimple.h"
168 #include "cfghooks.h"
169 #include "regset.h" /* FIXME: For reg_obstack. */
170 #include "alloc-pool.h"
171 #include "tree-pass.h"
172 #include "stringpool.h"
173 #include "gimple-ssa.h"
174 #include "cgraph.h"
175 #include "coverage.h"
176 #include "lto-streamer.h"
177 #include "fold-const.h"
178 #include "varasm.h"
179 #include "stor-layout.h"
180 #include "output.h"
181 #include "cfgcleanup.h"
182 #include "gimple-fold.h"
183 #include "gimplify.h"
184 #include "gimple-iterator.h"
185 #include "gimplify-me.h"
186 #include "tree-cfg.h"
187 #include "tree-into-ssa.h"
188 #include "tree-ssa.h"
189 #include "langhooks.h"
190 #include "toplev.h"
191 #include "debug.h"
192 #include "symbol-summary.h"
193 #include "tree-vrp.h"
194 #include "ipa-prop.h"
195 #include "gimple-pretty-print.h"
196 #include "plugin.h"
197 #include "ipa-fnsummary.h"
198 #include "ipa-utils.h"
199 #include "except.h"
200 #include "cfgloop.h"
201 #include "context.h"
202 #include "pass_manager.h"
203 #include "tree-nested.h"
204 #include "dbgcnt.h"
205 #include "lto-section-names.h"
206 #include "stringpool.h"
207 #include "attribs.h"
208 #include "ipa-inline.h"
209 #include "omp-offload.h"
210
211 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
212 secondary queue used during optimization to accommodate passes that
213 may generate new functions that need to be optimized and expanded. */
214 vec<cgraph_node *> cgraph_new_nodes;
215
216 static void expand_all_functions (void);
217 static void mark_functions_to_output (void);
218 static void handle_alias_pairs (void);
219
220 /* Used for vtable lookup in thunk adjusting. */
221 static GTY (()) tree vtable_entry_type;
222
223 /* Return true if this symbol is a function from the C frontend specified
224 directly in RTL form (with "__RTL"). */
225
226 bool
227 symtab_node::native_rtl_p () const
228 {
229 if (TREE_CODE (decl) != FUNCTION_DECL)
230 return false;
231 if (!DECL_STRUCT_FUNCTION (decl))
232 return false;
233 return DECL_STRUCT_FUNCTION (decl)->curr_properties & PROP_rtl;
234 }
235
236 /* Determine if symbol declaration is needed. That is, visible to something
237 either outside this translation unit, something magic in the system
238 configury */
239 bool
240 symtab_node::needed_p (void)
241 {
242 /* Double check that no one output the function into assembly file
243 early. */
244 if (!native_rtl_p ())
245 gcc_checking_assert
246 (!DECL_ASSEMBLER_NAME_SET_P (decl)
247 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
248
249 if (!definition)
250 return false;
251
252 if (DECL_EXTERNAL (decl))
253 return false;
254
255 /* If the user told us it is used, then it must be so. */
256 if (force_output)
257 return true;
258
259 /* ABI forced symbols are needed when they are external. */
260 if (forced_by_abi && TREE_PUBLIC (decl))
261 return true;
262
263 /* Keep constructors, destructors and virtual functions. */
264 if (TREE_CODE (decl) == FUNCTION_DECL
265 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
266 return true;
267
268 /* Externally visible variables must be output. The exception is
269 COMDAT variables that must be output only when they are needed. */
270 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
271 return true;
272
273 return false;
274 }
275
276 /* Head and terminator of the queue of nodes to be processed while building
277 callgraph. */
278
279 static symtab_node symtab_terminator (SYMTAB_SYMBOL);
280 static symtab_node *queued_nodes = &symtab_terminator;
281
282 /* Add NODE to queue starting at QUEUED_NODES.
283 The queue is linked via AUX pointers and terminated by pointer to 1. */
284
285 static void
286 enqueue_node (symtab_node *node)
287 {
288 if (node->aux)
289 return;
290 gcc_checking_assert (queued_nodes);
291 node->aux = queued_nodes;
292 queued_nodes = node;
293 }
294
295 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
296 functions into callgraph in a way so they look like ordinary reachable
297 functions inserted into callgraph already at construction time. */
298
299 void
300 symbol_table::process_new_functions (void)
301 {
302 tree fndecl;
303
304 if (!cgraph_new_nodes.exists ())
305 return;
306
307 handle_alias_pairs ();
308 /* Note that this queue may grow as its being processed, as the new
309 functions may generate new ones. */
310 for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
311 {
312 cgraph_node *node = cgraph_new_nodes[i];
313 fndecl = node->decl;
314 switch (state)
315 {
316 case CONSTRUCTION:
317 /* At construction time we just need to finalize function and move
318 it into reachable functions list. */
319
320 cgraph_node::finalize_function (fndecl, false);
321 call_cgraph_insertion_hooks (node);
322 enqueue_node (node);
323 break;
324
325 case IPA:
326 case IPA_SSA:
327 case IPA_SSA_AFTER_INLINING:
328 /* When IPA optimization already started, do all essential
329 transformations that has been already performed on the whole
330 cgraph but not on this function. */
331
332 gimple_register_cfg_hooks ();
333 if (!node->analyzed)
334 node->analyze ();
335 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
336 if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
337 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
338 {
339 bool summaried_computed = ipa_fn_summaries != NULL;
340 g->get_passes ()->execute_early_local_passes ();
341 /* Early passes compute inline parameters to do inlining
342 and splitting. This is redundant for functions added late.
343 Just throw away whatever it did. */
344 if (!summaried_computed)
345 {
346 ipa_free_fn_summary ();
347 ipa_free_size_summary ();
348 }
349 }
350 else if (ipa_fn_summaries != NULL)
351 compute_fn_summary (node, true);
352 free_dominance_info (CDI_POST_DOMINATORS);
353 free_dominance_info (CDI_DOMINATORS);
354 pop_cfun ();
355 call_cgraph_insertion_hooks (node);
356 break;
357
358 case EXPANSION:
359 /* Functions created during expansion shall be compiled
360 directly. */
361 node->process = 0;
362 call_cgraph_insertion_hooks (node);
363 node->expand ();
364 break;
365
366 default:
367 gcc_unreachable ();
368 break;
369 }
370 }
371
372 cgraph_new_nodes.release ();
373 }
374
375 /* As an GCC extension we allow redefinition of the function. The
376 semantics when both copies of bodies differ is not well defined.
377 We replace the old body with new body so in unit at a time mode
378 we always use new body, while in normal mode we may end up with
379 old body inlined into some functions and new body expanded and
380 inlined in others.
381
382 ??? It may make more sense to use one body for inlining and other
383 body for expanding the function but this is difficult to do. */
384
385 void
386 cgraph_node::reset (void)
387 {
388 /* If process is set, then we have already begun whole-unit analysis.
389 This is *not* testing for whether we've already emitted the function.
390 That case can be sort-of legitimately seen with real function redefinition
391 errors. I would argue that the front end should never present us with
392 such a case, but don't enforce that for now. */
393 gcc_assert (!process);
394
395 /* Reset our data structures so we can analyze the function again. */
396 inlined_to = NULL;
397 memset (&rtl, 0, sizeof (rtl));
398 analyzed = false;
399 definition = false;
400 alias = false;
401 transparent_alias = false;
402 weakref = false;
403 cpp_implicit_alias = false;
404
405 remove_callees ();
406 remove_all_references ();
407 }
408
409 /* Return true when there are references to the node. INCLUDE_SELF is
410 true if a self reference counts as a reference. */
411
412 bool
413 symtab_node::referred_to_p (bool include_self)
414 {
415 ipa_ref *ref = NULL;
416
417 /* See if there are any references at all. */
418 if (iterate_referring (0, ref))
419 return true;
420 /* For functions check also calls. */
421 cgraph_node *cn = dyn_cast <cgraph_node *> (this);
422 if (cn && cn->callers)
423 {
424 if (include_self)
425 return true;
426 for (cgraph_edge *e = cn->callers; e; e = e->next_caller)
427 if (e->caller != this)
428 return true;
429 }
430 return false;
431 }
432
433 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
434 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
435 the garbage collector run at the moment. We would need to either create
436 a new GC context, or just not compile right now. */
437
438 void
439 cgraph_node::finalize_function (tree decl, bool no_collect)
440 {
441 cgraph_node *node = cgraph_node::get_create (decl);
442
443 if (node->definition)
444 {
445 /* Nested functions should only be defined once. */
446 gcc_assert (!DECL_CONTEXT (decl)
447 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
448 node->reset ();
449 node->redefined_extern_inline = true;
450 }
451
452 /* Set definition first before calling notice_global_symbol so that
453 it is available to notice_global_symbol. */
454 node->definition = true;
455 notice_global_symbol (decl);
456 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
457 if (!flag_toplevel_reorder)
458 node->no_reorder = true;
459
460 /* With -fkeep-inline-functions we are keeping all inline functions except
461 for extern inline ones. */
462 if (flag_keep_inline_functions
463 && DECL_DECLARED_INLINE_P (decl)
464 && !DECL_EXTERNAL (decl)
465 && !DECL_DISREGARD_INLINE_LIMITS (decl))
466 node->force_output = 1;
467
468 /* __RTL functions were already output as soon as they were parsed (due
469 to the large amount of global state in the backend).
470 Mark such functions as "force_output" to reflect the fact that they
471 will be in the asm file when considering the symbols they reference.
472 The attempt to output them later on will bail out immediately. */
473 if (node->native_rtl_p ())
474 node->force_output = 1;
475
476 /* When not optimizing, also output the static functions. (see
477 PR24561), but don't do so for always_inline functions, functions
478 declared inline and nested functions. These were optimized out
479 in the original implementation and it is unclear whether we want
480 to change the behavior here. */
481 if (((!opt_for_fn (decl, optimize) || flag_keep_static_functions
482 || node->no_reorder)
483 && !node->cpp_implicit_alias
484 && !DECL_DISREGARD_INLINE_LIMITS (decl)
485 && !DECL_DECLARED_INLINE_P (decl)
486 && !(DECL_CONTEXT (decl)
487 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
488 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
489 node->force_output = 1;
490
491 /* If we've not yet emitted decl, tell the debug info about it. */
492 if (!TREE_ASM_WRITTEN (decl))
493 (*debug_hooks->deferred_inline_function) (decl);
494
495 if (!no_collect)
496 ggc_collect ();
497
498 if (symtab->state == CONSTRUCTION
499 && (node->needed_p () || node->referred_to_p ()))
500 enqueue_node (node);
501 }
502
503 /* Add the function FNDECL to the call graph.
504 Unlike finalize_function, this function is intended to be used
505 by middle end and allows insertion of new function at arbitrary point
506 of compilation. The function can be either in high, low or SSA form
507 GIMPLE.
508
509 The function is assumed to be reachable and have address taken (so no
510 API breaking optimizations are performed on it).
511
512 Main work done by this function is to enqueue the function for later
513 processing to avoid need the passes to be re-entrant. */
514
515 void
516 cgraph_node::add_new_function (tree fndecl, bool lowered)
517 {
518 gcc::pass_manager *passes = g->get_passes ();
519 cgraph_node *node;
520
521 if (dump_file)
522 {
523 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
524 const char *function_type = ((gimple_has_body_p (fndecl))
525 ? (lowered
526 ? (gimple_in_ssa_p (fn)
527 ? "ssa gimple"
528 : "low gimple")
529 : "high gimple")
530 : "to-be-gimplified");
531 fprintf (dump_file,
532 "Added new %s function %s to callgraph\n",
533 function_type,
534 fndecl_name (fndecl));
535 }
536
537 switch (symtab->state)
538 {
539 case PARSING:
540 cgraph_node::finalize_function (fndecl, false);
541 break;
542 case CONSTRUCTION:
543 /* Just enqueue function to be processed at nearest occurrence. */
544 node = cgraph_node::get_create (fndecl);
545 if (lowered)
546 node->lowered = true;
547 cgraph_new_nodes.safe_push (node);
548 break;
549
550 case IPA:
551 case IPA_SSA:
552 case IPA_SSA_AFTER_INLINING:
553 case EXPANSION:
554 /* Bring the function into finalized state and enqueue for later
555 analyzing and compilation. */
556 node = cgraph_node::get_create (fndecl);
557 node->local = false;
558 node->definition = true;
559 node->force_output = true;
560 if (TREE_PUBLIC (fndecl))
561 node->externally_visible = true;
562 if (!lowered && symtab->state == EXPANSION)
563 {
564 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
565 gimple_register_cfg_hooks ();
566 bitmap_obstack_initialize (NULL);
567 execute_pass_list (cfun, passes->all_lowering_passes);
568 passes->execute_early_local_passes ();
569 bitmap_obstack_release (NULL);
570 pop_cfun ();
571
572 lowered = true;
573 }
574 if (lowered)
575 node->lowered = true;
576 cgraph_new_nodes.safe_push (node);
577 break;
578
579 case FINISHED:
580 /* At the very end of compilation we have to do all the work up
581 to expansion. */
582 node = cgraph_node::create (fndecl);
583 if (lowered)
584 node->lowered = true;
585 node->definition = true;
586 node->analyze ();
587 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
588 gimple_register_cfg_hooks ();
589 bitmap_obstack_initialize (NULL);
590 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
591 g->get_passes ()->execute_early_local_passes ();
592 bitmap_obstack_release (NULL);
593 pop_cfun ();
594 node->expand ();
595 break;
596
597 default:
598 gcc_unreachable ();
599 }
600
601 /* Set a personality if required and we already passed EH lowering. */
602 if (lowered
603 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
604 == eh_personality_lang))
605 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
606 }
607
608 /* Analyze the function scheduled to be output. */
609 void
610 cgraph_node::analyze (void)
611 {
612 if (native_rtl_p ())
613 {
614 analyzed = true;
615 return;
616 }
617
618 tree decl = this->decl;
619 location_t saved_loc = input_location;
620 input_location = DECL_SOURCE_LOCATION (decl);
621
622 if (thunk.thunk_p)
623 {
624 cgraph_node *t = cgraph_node::get (thunk.alias);
625
626 create_edge (t, NULL, t->count);
627 callees->can_throw_external = !TREE_NOTHROW (t->decl);
628 /* Target code in expand_thunk may need the thunk's target
629 to be analyzed, so recurse here. */
630 if (!t->analyzed && t->definition)
631 t->analyze ();
632 if (t->alias)
633 {
634 t = t->get_alias_target ();
635 if (!t->analyzed && t->definition)
636 t->analyze ();
637 }
638 bool ret = expand_thunk (false, false);
639 thunk.alias = NULL;
640 if (!ret)
641 return;
642 }
643 if (alias)
644 resolve_alias (cgraph_node::get (alias_target), transparent_alias);
645 else if (dispatcher_function)
646 {
647 /* Generate the dispatcher body of multi-versioned functions. */
648 cgraph_function_version_info *dispatcher_version_info
649 = function_version ();
650 if (dispatcher_version_info != NULL
651 && (dispatcher_version_info->dispatcher_resolver
652 == NULL_TREE))
653 {
654 tree resolver = NULL_TREE;
655 gcc_assert (targetm.generate_version_dispatcher_body);
656 resolver = targetm.generate_version_dispatcher_body (this);
657 gcc_assert (resolver != NULL_TREE);
658 }
659 }
660 else
661 {
662 push_cfun (DECL_STRUCT_FUNCTION (decl));
663
664 assign_assembler_name_if_needed (decl);
665
666 /* Make sure to gimplify bodies only once. During analyzing a
667 function we lower it, which will require gimplified nested
668 functions, so we can end up here with an already gimplified
669 body. */
670 if (!gimple_has_body_p (decl))
671 gimplify_function_tree (decl);
672
673 /* Lower the function. */
674 if (!lowered)
675 {
676 if (first_nested_function (this))
677 lower_nested_functions (decl);
678
679 gimple_register_cfg_hooks ();
680 bitmap_obstack_initialize (NULL);
681 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
682 free_dominance_info (CDI_POST_DOMINATORS);
683 free_dominance_info (CDI_DOMINATORS);
684 compact_blocks ();
685 bitmap_obstack_release (NULL);
686 lowered = true;
687 }
688
689 pop_cfun ();
690 }
691 analyzed = true;
692
693 input_location = saved_loc;
694 }
695
696 /* C++ frontend produce same body aliases all over the place, even before PCH
697 gets streamed out. It relies on us linking the aliases with their function
698 in order to do the fixups, but ipa-ref is not PCH safe. Consequently we
699 first produce aliases without links, but once C++ FE is sure he won't stream
700 PCH we build the links via this function. */
701
702 void
703 symbol_table::process_same_body_aliases (void)
704 {
705 symtab_node *node;
706 FOR_EACH_SYMBOL (node)
707 if (node->cpp_implicit_alias && !node->analyzed)
708 node->resolve_alias
709 (VAR_P (node->alias_target)
710 ? (symtab_node *)varpool_node::get_create (node->alias_target)
711 : (symtab_node *)cgraph_node::get_create (node->alias_target));
712 cpp_implicit_aliases_done = true;
713 }
714
715 /* Process a symver attribute. */
716
717 static void
718 process_symver_attribute (symtab_node *n)
719 {
720 tree value = lookup_attribute ("symver", DECL_ATTRIBUTES (n->decl));
721
722 for (; value != NULL; value = TREE_CHAIN (value))
723 {
724 /* Starting from bintuils 2.35 gas supports:
725 # Assign foo to bar@V1 and baz@V2.
726 .symver foo, bar@V1
727 .symver foo, baz@V2
728 */
729 const char *purpose = IDENTIFIER_POINTER (TREE_PURPOSE (value));
730 if (strcmp (purpose, "symver") != 0)
731 continue;
732
733 tree symver = get_identifier_with_length
734 (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (value))),
735 TREE_STRING_LENGTH (TREE_VALUE (TREE_VALUE (value))));
736 symtab_node *def = symtab_node::get_for_asmname (symver);
737
738 if (def)
739 {
740 error_at (DECL_SOURCE_LOCATION (n->decl),
741 "duplicate definition of a symbol version");
742 inform (DECL_SOURCE_LOCATION (def->decl),
743 "same version was previously defined here");
744 return;
745 }
746 if (!n->definition)
747 {
748 error_at (DECL_SOURCE_LOCATION (n->decl),
749 "symbol needs to be defined to have a version");
750 return;
751 }
752 if (DECL_COMMON (n->decl))
753 {
754 error_at (DECL_SOURCE_LOCATION (n->decl),
755 "common symbol cannot be versioned");
756 return;
757 }
758 if (DECL_COMDAT (n->decl))
759 {
760 error_at (DECL_SOURCE_LOCATION (n->decl),
761 "comdat symbol cannot be versioned");
762 return;
763 }
764 if (n->weakref)
765 {
766 error_at (DECL_SOURCE_LOCATION (n->decl),
767 "%<weakref%> cannot be versioned");
768 return;
769 }
770 if (!TREE_PUBLIC (n->decl))
771 {
772 error_at (DECL_SOURCE_LOCATION (n->decl),
773 "versioned symbol must be public");
774 return;
775 }
776 if (DECL_VISIBILITY (n->decl) != VISIBILITY_DEFAULT)
777 {
778 error_at (DECL_SOURCE_LOCATION (n->decl),
779 "versioned symbol must have default visibility");
780 return;
781 }
782
783 /* Create new symbol table entry representing the version. */
784 tree new_decl = copy_node (n->decl);
785
786 DECL_INITIAL (new_decl) = NULL_TREE;
787 if (TREE_CODE (new_decl) == FUNCTION_DECL)
788 DECL_STRUCT_FUNCTION (new_decl) = NULL;
789 SET_DECL_ASSEMBLER_NAME (new_decl, symver);
790 TREE_PUBLIC (new_decl) = 1;
791 DECL_ATTRIBUTES (new_decl) = NULL;
792
793 symtab_node *symver_node = symtab_node::get_create (new_decl);
794 symver_node->alias = true;
795 symver_node->definition = true;
796 symver_node->symver = true;
797 symver_node->create_reference (n, IPA_REF_ALIAS, NULL);
798 symver_node->analyzed = true;
799 }
800 }
801
802 /* Process attributes common for vars and functions. */
803
804 static void
805 process_common_attributes (symtab_node *node, tree decl)
806 {
807 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
808
809 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
810 {
811 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
812 "%<weakref%> attribute should be accompanied with"
813 " an %<alias%> attribute");
814 DECL_WEAK (decl) = 0;
815 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
816 DECL_ATTRIBUTES (decl));
817 }
818
819 if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
820 node->no_reorder = 1;
821 process_symver_attribute (node);
822 }
823
824 /* Look for externally_visible and used attributes and mark cgraph nodes
825 accordingly.
826
827 We cannot mark the nodes at the point the attributes are processed (in
828 handle_*_attribute) because the copy of the declarations available at that
829 point may not be canonical. For example, in:
830
831 void f();
832 void f() __attribute__((used));
833
834 the declaration we see in handle_used_attribute will be the second
835 declaration -- but the front end will subsequently merge that declaration
836 with the original declaration and discard the second declaration.
837
838 Furthermore, we can't mark these nodes in finalize_function because:
839
840 void f() {}
841 void f() __attribute__((externally_visible));
842
843 is valid.
844
845 So, we walk the nodes at the end of the translation unit, applying the
846 attributes at that point. */
847
848 static void
849 process_function_and_variable_attributes (cgraph_node *first,
850 varpool_node *first_var)
851 {
852 cgraph_node *node;
853 varpool_node *vnode;
854
855 for (node = symtab->first_function (); node != first;
856 node = symtab->next_function (node))
857 {
858 tree decl = node->decl;
859
860 if (node->alias
861 && lookup_attribute ("flatten", DECL_ATTRIBUTES (decl)))
862 {
863 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
864 "%<flatten%> attribute is ignored on aliases");
865 }
866 if (DECL_PRESERVE_P (decl))
867 node->mark_force_output ();
868 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
869 {
870 if (! TREE_PUBLIC (node->decl))
871 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
872 "%<externally_visible%>"
873 " attribute have effect only on public objects");
874 }
875 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
876 && node->definition
877 && (!node->alias || DECL_INITIAL (decl) != error_mark_node))
878 {
879 /* NODE->DEFINITION && NODE->ALIAS is nonzero for valid weakref
880 function declarations; DECL_INITIAL is non-null for invalid
881 weakref functions that are also defined. */
882 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
883 "%<weakref%> attribute ignored"
884 " because function is defined");
885 DECL_WEAK (decl) = 0;
886 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
887 DECL_ATTRIBUTES (decl));
888 DECL_ATTRIBUTES (decl) = remove_attribute ("alias",
889 DECL_ATTRIBUTES (decl));
890 node->alias = false;
891 node->weakref = false;
892 node->transparent_alias = false;
893 }
894 else if (lookup_attribute ("alias", DECL_ATTRIBUTES (decl))
895 && node->definition
896 && !node->alias)
897 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
898 "%<alias%> attribute ignored"
899 " because function is defined");
900
901 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
902 && !DECL_DECLARED_INLINE_P (decl)
903 /* redefining extern inline function makes it DECL_UNINLINABLE. */
904 && !DECL_UNINLINABLE (decl))
905 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
906 "%<always_inline%> function might not be inlinable");
907
908 process_common_attributes (node, decl);
909 }
910 for (vnode = symtab->first_variable (); vnode != first_var;
911 vnode = symtab->next_variable (vnode))
912 {
913 tree decl = vnode->decl;
914 if (DECL_EXTERNAL (decl)
915 && DECL_INITIAL (decl))
916 varpool_node::finalize_decl (decl);
917 if (DECL_PRESERVE_P (decl))
918 vnode->force_output = true;
919 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
920 {
921 if (! TREE_PUBLIC (vnode->decl))
922 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
923 "%<externally_visible%>"
924 " attribute have effect only on public objects");
925 }
926 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
927 && vnode->definition
928 && DECL_INITIAL (decl))
929 {
930 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
931 "%<weakref%> attribute ignored"
932 " because variable is initialized");
933 DECL_WEAK (decl) = 0;
934 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
935 DECL_ATTRIBUTES (decl));
936 }
937 process_common_attributes (vnode, decl);
938 }
939 }
940
941 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
942 middle end to output the variable to asm file, if needed or externally
943 visible. */
944
945 void
946 varpool_node::finalize_decl (tree decl)
947 {
948 varpool_node *node = varpool_node::get_create (decl);
949
950 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
951
952 if (node->definition)
953 return;
954 /* Set definition first before calling notice_global_symbol so that
955 it is available to notice_global_symbol. */
956 node->definition = true;
957 notice_global_symbol (decl);
958 if (!flag_toplevel_reorder)
959 node->no_reorder = true;
960 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
961 /* Traditionally we do not eliminate static variables when not
962 optimizing and when not doing toplevel reorder. */
963 || (node->no_reorder && !DECL_COMDAT (node->decl)
964 && !DECL_ARTIFICIAL (node->decl)))
965 node->force_output = true;
966
967 if (symtab->state == CONSTRUCTION
968 && (node->needed_p () || node->referred_to_p ()))
969 enqueue_node (node);
970 if (symtab->state >= IPA_SSA)
971 node->analyze ();
972 /* Some frontends produce various interface variables after compilation
973 finished. */
974 if (symtab->state == FINISHED
975 || (node->no_reorder
976 && symtab->state == EXPANSION))
977 node->assemble_decl ();
978 }
979
980 /* EDGE is an polymorphic call. Mark all possible targets as reachable
981 and if there is only one target, perform trivial devirtualization.
982 REACHABLE_CALL_TARGETS collects target lists we already walked to
983 avoid duplicate work. */
984
985 static void
986 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
987 cgraph_edge *edge)
988 {
989 unsigned int i;
990 void *cache_token;
991 bool final;
992 vec <cgraph_node *>targets
993 = possible_polymorphic_call_targets
994 (edge, &final, &cache_token);
995
996 if (!reachable_call_targets->add (cache_token))
997 {
998 if (symtab->dump_file)
999 dump_possible_polymorphic_call_targets
1000 (symtab->dump_file, edge);
1001
1002 for (i = 0; i < targets.length (); i++)
1003 {
1004 /* Do not bother to mark virtual methods in anonymous namespace;
1005 either we will find use of virtual table defining it, or it is
1006 unused. */
1007 if (targets[i]->definition
1008 && TREE_CODE
1009 (TREE_TYPE (targets[i]->decl))
1010 == METHOD_TYPE
1011 && !type_in_anonymous_namespace_p
1012 (TYPE_METHOD_BASETYPE (TREE_TYPE (targets[i]->decl))))
1013 enqueue_node (targets[i]);
1014 }
1015 }
1016
1017 /* Very trivial devirtualization; when the type is
1018 final or anonymous (so we know all its derivation)
1019 and there is only one possible virtual call target,
1020 make the edge direct. */
1021 if (final)
1022 {
1023 if (targets.length () <= 1 && dbg_cnt (devirt))
1024 {
1025 cgraph_node *target;
1026 if (targets.length () == 1)
1027 target = targets[0];
1028 else
1029 target = cgraph_node::create
1030 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
1031
1032 if (symtab->dump_file)
1033 {
1034 fprintf (symtab->dump_file,
1035 "Devirtualizing call: ");
1036 print_gimple_stmt (symtab->dump_file,
1037 edge->call_stmt, 0,
1038 TDF_SLIM);
1039 }
1040 if (dump_enabled_p ())
1041 {
1042 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, edge->call_stmt,
1043 "devirtualizing call in %s to %s\n",
1044 edge->caller->dump_name (),
1045 target->dump_name ());
1046 }
1047
1048 edge = cgraph_edge::make_direct (edge, target);
1049 gimple *new_call = cgraph_edge::redirect_call_stmt_to_callee (edge);
1050
1051 if (symtab->dump_file)
1052 {
1053 fprintf (symtab->dump_file, "Devirtualized as: ");
1054 print_gimple_stmt (symtab->dump_file, new_call, 0, TDF_SLIM);
1055 }
1056 }
1057 }
1058 }
1059
1060 /* Issue appropriate warnings for the global declaration DECL. */
1061
1062 static void
1063 check_global_declaration (symtab_node *snode)
1064 {
1065 const char *decl_file;
1066 tree decl = snode->decl;
1067
1068 /* Warn about any function declared static but not defined. We don't
1069 warn about variables, because many programs have static variables
1070 that exist only to get some text into the object file. */
1071 if (TREE_CODE (decl) == FUNCTION_DECL
1072 && DECL_INITIAL (decl) == 0
1073 && DECL_EXTERNAL (decl)
1074 && ! DECL_ARTIFICIAL (decl)
1075 && ! TREE_PUBLIC (decl))
1076 {
1077 if (TREE_NO_WARNING (decl))
1078 ;
1079 else if (snode->referred_to_p (/*include_self=*/false))
1080 pedwarn (input_location, 0, "%q+F used but never defined", decl);
1081 else
1082 warning (OPT_Wunused_function, "%q+F declared %<static%> but never "
1083 "defined", decl);
1084 /* This symbol is effectively an "extern" declaration now. */
1085 TREE_PUBLIC (decl) = 1;
1086 }
1087
1088 /* Warn about static fns or vars defined but not used. */
1089 if (((warn_unused_function && TREE_CODE (decl) == FUNCTION_DECL)
1090 || (((warn_unused_variable && ! TREE_READONLY (decl))
1091 || (warn_unused_const_variable > 0 && TREE_READONLY (decl)
1092 && (warn_unused_const_variable == 2
1093 || (main_input_filename != NULL
1094 && (decl_file = DECL_SOURCE_FILE (decl)) != NULL
1095 && filename_cmp (main_input_filename,
1096 decl_file) == 0))))
1097 && VAR_P (decl)))
1098 && ! DECL_IN_SYSTEM_HEADER (decl)
1099 && ! snode->referred_to_p (/*include_self=*/false)
1100 /* This TREE_USED check is needed in addition to referred_to_p
1101 above, because the `__unused__' attribute is not being
1102 considered for referred_to_p. */
1103 && ! TREE_USED (decl)
1104 /* The TREE_USED bit for file-scope decls is kept in the identifier,
1105 to handle multiple external decls in different scopes. */
1106 && ! (DECL_NAME (decl) && TREE_USED (DECL_NAME (decl)))
1107 && ! DECL_EXTERNAL (decl)
1108 && ! DECL_ARTIFICIAL (decl)
1109 && ! DECL_ABSTRACT_ORIGIN (decl)
1110 && ! TREE_PUBLIC (decl)
1111 /* A volatile variable might be used in some non-obvious way. */
1112 && (! VAR_P (decl) || ! TREE_THIS_VOLATILE (decl))
1113 /* Global register variables must be declared to reserve them. */
1114 && ! (VAR_P (decl) && DECL_REGISTER (decl))
1115 /* Global ctors and dtors are called by the runtime. */
1116 && (TREE_CODE (decl) != FUNCTION_DECL
1117 || (!DECL_STATIC_CONSTRUCTOR (decl)
1118 && !DECL_STATIC_DESTRUCTOR (decl)))
1119 /* Otherwise, ask the language. */
1120 && lang_hooks.decls.warn_unused_global (decl))
1121 warning_at (DECL_SOURCE_LOCATION (decl),
1122 (TREE_CODE (decl) == FUNCTION_DECL)
1123 ? OPT_Wunused_function
1124 : (TREE_READONLY (decl)
1125 ? OPT_Wunused_const_variable_
1126 : OPT_Wunused_variable),
1127 "%qD defined but not used", decl);
1128 }
1129
1130 /* Discover all functions and variables that are trivially needed, analyze
1131 them as well as all functions and variables referred by them */
1132 static cgraph_node *first_analyzed;
1133 static varpool_node *first_analyzed_var;
1134
1135 /* FIRST_TIME is set to TRUE for the first time we are called for a
1136 translation unit from finalize_compilation_unit() or false
1137 otherwise. */
1138
1139 static void
1140 analyze_functions (bool first_time)
1141 {
1142 /* Keep track of already processed nodes when called multiple times for
1143 intermodule optimization. */
1144 cgraph_node *first_handled = first_analyzed;
1145 varpool_node *first_handled_var = first_analyzed_var;
1146 hash_set<void *> reachable_call_targets;
1147
1148 symtab_node *node;
1149 symtab_node *next;
1150 int i;
1151 ipa_ref *ref;
1152 bool changed = true;
1153 location_t saved_loc = input_location;
1154
1155 bitmap_obstack_initialize (NULL);
1156 symtab->state = CONSTRUCTION;
1157 input_location = UNKNOWN_LOCATION;
1158
1159 /* Ugly, but the fixup cannot happen at a time same body alias is created;
1160 C++ FE is confused about the COMDAT groups being right. */
1161 if (symtab->cpp_implicit_aliases_done)
1162 FOR_EACH_SYMBOL (node)
1163 if (node->cpp_implicit_alias)
1164 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
1165 build_type_inheritance_graph ();
1166
1167 if (flag_openmp && first_time)
1168 omp_discover_implicit_declare_target ();
1169
1170 /* Analysis adds static variables that in turn adds references to new functions.
1171 So we need to iterate the process until it stabilize. */
1172 while (changed)
1173 {
1174 changed = false;
1175 process_function_and_variable_attributes (first_analyzed,
1176 first_analyzed_var);
1177
1178 /* First identify the trivially needed symbols. */
1179 for (node = symtab->first_symbol ();
1180 node != first_analyzed
1181 && node != first_analyzed_var; node = node->next)
1182 {
1183 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
1184 node->get_comdat_group_id ();
1185 if (node->needed_p ())
1186 {
1187 enqueue_node (node);
1188 if (!changed && symtab->dump_file)
1189 fprintf (symtab->dump_file, "Trivially needed symbols:");
1190 changed = true;
1191 if (symtab->dump_file)
1192 fprintf (symtab->dump_file, " %s", node->dump_asm_name ());
1193 if (!changed && symtab->dump_file)
1194 fprintf (symtab->dump_file, "\n");
1195 }
1196 if (node == first_analyzed
1197 || node == first_analyzed_var)
1198 break;
1199 }
1200 symtab->process_new_functions ();
1201 first_analyzed_var = symtab->first_variable ();
1202 first_analyzed = symtab->first_function ();
1203
1204 if (changed && symtab->dump_file)
1205 fprintf (symtab->dump_file, "\n");
1206
1207 /* Lower representation, build callgraph edges and references for all trivially
1208 needed symbols and all symbols referred by them. */
1209 while (queued_nodes != &symtab_terminator)
1210 {
1211 changed = true;
1212 node = queued_nodes;
1213 queued_nodes = (symtab_node *)queued_nodes->aux;
1214 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1215 if (cnode && cnode->definition)
1216 {
1217 cgraph_edge *edge;
1218 tree decl = cnode->decl;
1219
1220 /* ??? It is possible to create extern inline function
1221 and later using weak alias attribute to kill its body.
1222 See gcc.c-torture/compile/20011119-1.c */
1223 if (!DECL_STRUCT_FUNCTION (decl)
1224 && !cnode->alias
1225 && !cnode->thunk.thunk_p
1226 && !cnode->dispatcher_function)
1227 {
1228 cnode->reset ();
1229 cnode->redefined_extern_inline = true;
1230 continue;
1231 }
1232
1233 if (!cnode->analyzed)
1234 cnode->analyze ();
1235
1236 for (edge = cnode->callees; edge; edge = edge->next_callee)
1237 if (edge->callee->definition
1238 && (!DECL_EXTERNAL (edge->callee->decl)
1239 /* When not optimizing, do not try to analyze extern
1240 inline functions. Doing so is pointless. */
1241 || opt_for_fn (edge->callee->decl, optimize)
1242 /* Weakrefs needs to be preserved. */
1243 || edge->callee->alias
1244 /* always_inline functions are inlined even at -O0. */
1245 || lookup_attribute
1246 ("always_inline",
1247 DECL_ATTRIBUTES (edge->callee->decl))
1248 /* Multiversioned functions needs the dispatcher to
1249 be produced locally even for extern functions. */
1250 || edge->callee->function_version ()))
1251 enqueue_node (edge->callee);
1252 if (opt_for_fn (cnode->decl, optimize)
1253 && opt_for_fn (cnode->decl, flag_devirtualize))
1254 {
1255 cgraph_edge *next;
1256
1257 for (edge = cnode->indirect_calls; edge; edge = next)
1258 {
1259 next = edge->next_callee;
1260 if (edge->indirect_info->polymorphic)
1261 walk_polymorphic_call_targets (&reachable_call_targets,
1262 edge);
1263 }
1264 }
1265
1266 /* If decl is a clone of an abstract function,
1267 mark that abstract function so that we don't release its body.
1268 The DECL_INITIAL() of that abstract function declaration
1269 will be later needed to output debug info. */
1270 if (DECL_ABSTRACT_ORIGIN (decl))
1271 {
1272 cgraph_node *origin_node
1273 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1274 origin_node->used_as_abstract_origin = true;
1275 }
1276 /* Preserve a functions function context node. It will
1277 later be needed to output debug info. */
1278 if (tree fn = decl_function_context (decl))
1279 {
1280 cgraph_node *origin_node = cgraph_node::get_create (fn);
1281 enqueue_node (origin_node);
1282 }
1283 }
1284 else
1285 {
1286 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1287 if (vnode && vnode->definition && !vnode->analyzed)
1288 vnode->analyze ();
1289 }
1290
1291 if (node->same_comdat_group)
1292 {
1293 symtab_node *next;
1294 for (next = node->same_comdat_group;
1295 next != node;
1296 next = next->same_comdat_group)
1297 if (!next->comdat_local_p ())
1298 enqueue_node (next);
1299 }
1300 for (i = 0; node->iterate_reference (i, ref); i++)
1301 if (ref->referred->definition
1302 && (!DECL_EXTERNAL (ref->referred->decl)
1303 || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
1304 && optimize)
1305 || (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
1306 && opt_for_fn (ref->referred->decl, optimize))
1307 || node->alias
1308 || ref->referred->alias)))
1309 enqueue_node (ref->referred);
1310 symtab->process_new_functions ();
1311 }
1312 }
1313 update_type_inheritance_graph ();
1314
1315 /* Collect entry points to the unit. */
1316 if (symtab->dump_file)
1317 {
1318 fprintf (symtab->dump_file, "\n\nInitial ");
1319 symtab->dump (symtab->dump_file);
1320 }
1321
1322 if (first_time)
1323 {
1324 symtab_node *snode;
1325 FOR_EACH_SYMBOL (snode)
1326 check_global_declaration (snode);
1327 }
1328
1329 if (symtab->dump_file)
1330 fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1331
1332 for (node = symtab->first_symbol ();
1333 node != first_handled
1334 && node != first_handled_var; node = next)
1335 {
1336 next = node->next;
1337 /* For symbols declared locally we clear TREE_READONLY when emitting
1338 the constructor (if one is needed). For external declarations we can
1339 not safely assume that the type is readonly because we may be called
1340 during its construction. */
1341 if (TREE_CODE (node->decl) == VAR_DECL
1342 && TYPE_P (TREE_TYPE (node->decl))
1343 && TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (node->decl))
1344 && DECL_EXTERNAL (node->decl))
1345 TREE_READONLY (node->decl) = 0;
1346 if (!node->aux && !node->referred_to_p ())
1347 {
1348 if (symtab->dump_file)
1349 fprintf (symtab->dump_file, " %s", node->dump_name ());
1350
1351 /* See if the debugger can use anything before the DECL
1352 passes away. Perhaps it can notice a DECL that is now a
1353 constant and can tag the early DIE with an appropriate
1354 attribute.
1355
1356 Otherwise, this is the last chance the debug_hooks have
1357 at looking at optimized away DECLs, since
1358 late_global_decl will subsequently be called from the
1359 contents of the now pruned symbol table. */
1360 if (VAR_P (node->decl)
1361 && !decl_function_context (node->decl))
1362 {
1363 /* We are reclaiming totally unreachable code and variables
1364 so they effectively appear as readonly. Show that to
1365 the debug machinery. */
1366 TREE_READONLY (node->decl) = 1;
1367 node->definition = false;
1368 (*debug_hooks->late_global_decl) (node->decl);
1369 }
1370
1371 node->remove ();
1372 continue;
1373 }
1374 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1375 {
1376 tree decl = node->decl;
1377
1378 if (cnode->definition && !gimple_has_body_p (decl)
1379 && !cnode->alias
1380 && !cnode->thunk.thunk_p)
1381 cnode->reset ();
1382
1383 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1384 || cnode->alias
1385 || gimple_has_body_p (decl)
1386 || cnode->native_rtl_p ());
1387 gcc_assert (cnode->analyzed == cnode->definition);
1388 }
1389 node->aux = NULL;
1390 }
1391 for (;node; node = node->next)
1392 node->aux = NULL;
1393 first_analyzed = symtab->first_function ();
1394 first_analyzed_var = symtab->first_variable ();
1395 if (symtab->dump_file)
1396 {
1397 fprintf (symtab->dump_file, "\n\nReclaimed ");
1398 symtab->dump (symtab->dump_file);
1399 }
1400 bitmap_obstack_release (NULL);
1401 ggc_collect ();
1402 /* Initialize assembler name hash, in particular we want to trigger C++
1403 mangling and same body alias creation before we free DECL_ARGUMENTS
1404 used by it. */
1405 if (!seen_error ())
1406 symtab->symtab_initialize_asm_name_hash ();
1407
1408 input_location = saved_loc;
1409 }
1410
1411 /* Check declaration of the type of ALIAS for compatibility with its TARGET
1412 (which may be an ifunc resolver) and issue a diagnostic when they are
1413 not compatible according to language rules (plus a C++ extension for
1414 non-static member functions). */
1415
1416 static void
1417 maybe_diag_incompatible_alias (tree alias, tree target)
1418 {
1419 tree altype = TREE_TYPE (alias);
1420 tree targtype = TREE_TYPE (target);
1421
1422 bool ifunc = cgraph_node::get (alias)->ifunc_resolver;
1423 tree funcptr = altype;
1424
1425 if (ifunc)
1426 {
1427 /* Handle attribute ifunc first. */
1428 if (TREE_CODE (altype) == METHOD_TYPE)
1429 {
1430 /* Set FUNCPTR to the type of the alias target. If the type
1431 is a non-static member function of class C, construct a type
1432 of an ordinary function taking C* as the first argument,
1433 followed by the member function argument list, and use it
1434 instead to check for incompatibility. This conversion is
1435 not defined by the language but an extension provided by
1436 G++. */
1437
1438 tree rettype = TREE_TYPE (altype);
1439 tree args = TYPE_ARG_TYPES (altype);
1440 altype = build_function_type (rettype, args);
1441 funcptr = altype;
1442 }
1443
1444 targtype = TREE_TYPE (targtype);
1445
1446 if (POINTER_TYPE_P (targtype))
1447 {
1448 targtype = TREE_TYPE (targtype);
1449
1450 /* Only issue Wattribute-alias for conversions to void* with
1451 -Wextra. */
1452 if (VOID_TYPE_P (targtype) && !extra_warnings)
1453 return;
1454
1455 /* Proceed to handle incompatible ifunc resolvers below. */
1456 }
1457 else
1458 {
1459 funcptr = build_pointer_type (funcptr);
1460
1461 error_at (DECL_SOURCE_LOCATION (target),
1462 "%<ifunc%> resolver for %qD must return %qT",
1463 alias, funcptr);
1464 inform (DECL_SOURCE_LOCATION (alias),
1465 "resolver indirect function declared here");
1466 return;
1467 }
1468 }
1469
1470 if ((!FUNC_OR_METHOD_TYPE_P (targtype)
1471 || (prototype_p (altype)
1472 && prototype_p (targtype)
1473 && !types_compatible_p (altype, targtype))))
1474 {
1475 /* Warn for incompatibilities. Avoid warning for functions
1476 without a prototype to make it possible to declare aliases
1477 without knowing the exact type, as libstdc++ does. */
1478 if (ifunc)
1479 {
1480 funcptr = build_pointer_type (funcptr);
1481
1482 auto_diagnostic_group d;
1483 if (warning_at (DECL_SOURCE_LOCATION (target),
1484 OPT_Wattribute_alias_,
1485 "%<ifunc%> resolver for %qD should return %qT",
1486 alias, funcptr))
1487 inform (DECL_SOURCE_LOCATION (alias),
1488 "resolver indirect function declared here");
1489 }
1490 else
1491 {
1492 auto_diagnostic_group d;
1493 if (warning_at (DECL_SOURCE_LOCATION (alias),
1494 OPT_Wattribute_alias_,
1495 "%qD alias between functions of incompatible "
1496 "types %qT and %qT", alias, altype, targtype))
1497 inform (DECL_SOURCE_LOCATION (target),
1498 "aliased declaration here");
1499 }
1500 }
1501 }
1502
1503 /* Translate the ugly representation of aliases as alias pairs into nice
1504 representation in callgraph. We don't handle all cases yet,
1505 unfortunately. */
1506
1507 static void
1508 handle_alias_pairs (void)
1509 {
1510 alias_pair *p;
1511 unsigned i;
1512
1513 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1514 {
1515 symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1516
1517 /* Weakrefs with target not defined in current unit are easy to handle:
1518 they behave just as external variables except we need to note the
1519 alias flag to later output the weakref pseudo op into asm file. */
1520 if (!target_node
1521 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1522 {
1523 symtab_node *node = symtab_node::get (p->decl);
1524 if (node)
1525 {
1526 node->alias_target = p->target;
1527 node->weakref = true;
1528 node->alias = true;
1529 node->transparent_alias = true;
1530 }
1531 alias_pairs->unordered_remove (i);
1532 continue;
1533 }
1534 else if (!target_node)
1535 {
1536 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1537 symtab_node *node = symtab_node::get (p->decl);
1538 if (node)
1539 node->alias = false;
1540 alias_pairs->unordered_remove (i);
1541 continue;
1542 }
1543
1544 if (DECL_EXTERNAL (target_node->decl)
1545 /* We use local aliases for C++ thunks to force the tailcall
1546 to bind locally. This is a hack - to keep it working do
1547 the following (which is not strictly correct). */
1548 && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1549 || ! DECL_VIRTUAL_P (target_node->decl))
1550 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1551 {
1552 error ("%q+D aliased to external symbol %qE",
1553 p->decl, p->target);
1554 }
1555
1556 if (TREE_CODE (p->decl) == FUNCTION_DECL
1557 && target_node && is_a <cgraph_node *> (target_node))
1558 {
1559 maybe_diag_incompatible_alias (p->decl, target_node->decl);
1560
1561 maybe_diag_alias_attributes (p->decl, target_node->decl);
1562
1563 cgraph_node *src_node = cgraph_node::get (p->decl);
1564 if (src_node && src_node->definition)
1565 src_node->reset ();
1566 cgraph_node::create_alias (p->decl, target_node->decl);
1567 alias_pairs->unordered_remove (i);
1568 }
1569 else if (VAR_P (p->decl)
1570 && target_node && is_a <varpool_node *> (target_node))
1571 {
1572 varpool_node::create_alias (p->decl, target_node->decl);
1573 alias_pairs->unordered_remove (i);
1574 }
1575 else
1576 {
1577 error ("%q+D alias between function and variable is not supported",
1578 p->decl);
1579 inform (DECL_SOURCE_LOCATION (target_node->decl),
1580 "aliased declaration here");
1581
1582 alias_pairs->unordered_remove (i);
1583 }
1584 }
1585 vec_free (alias_pairs);
1586 }
1587
1588
1589 /* Figure out what functions we want to assemble. */
1590
1591 static void
1592 mark_functions_to_output (void)
1593 {
1594 bool check_same_comdat_groups = false;
1595 cgraph_node *node;
1596
1597 if (flag_checking)
1598 FOR_EACH_FUNCTION (node)
1599 gcc_assert (!node->process);
1600
1601 FOR_EACH_FUNCTION (node)
1602 {
1603 tree decl = node->decl;
1604 node->clear_stmts_in_references ();
1605
1606 gcc_assert (!node->process || node->same_comdat_group);
1607 if (node->process)
1608 continue;
1609
1610 /* We need to output all local functions that are used and not
1611 always inlined, as well as those that are reachable from
1612 outside the current compilation unit. */
1613 if (node->analyzed
1614 && !node->thunk.thunk_p
1615 && !node->alias
1616 && !node->inlined_to
1617 && !TREE_ASM_WRITTEN (decl)
1618 && !DECL_EXTERNAL (decl))
1619 {
1620 node->process = 1;
1621 if (node->same_comdat_group)
1622 {
1623 cgraph_node *next;
1624 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1625 next != node;
1626 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1627 if (!next->thunk.thunk_p && !next->alias
1628 && !next->comdat_local_p ())
1629 next->process = 1;
1630 }
1631 }
1632 else if (node->same_comdat_group)
1633 {
1634 if (flag_checking)
1635 check_same_comdat_groups = true;
1636 }
1637 else
1638 {
1639 /* We should've reclaimed all functions that are not needed. */
1640 if (flag_checking
1641 && !node->inlined_to
1642 && gimple_has_body_p (decl)
1643 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1644 are inside partition, we can end up not removing the body since we no longer
1645 have analyzed node pointing to it. */
1646 && !node->in_other_partition
1647 && !node->alias
1648 && !node->clones
1649 && !DECL_EXTERNAL (decl))
1650 {
1651 node->debug ();
1652 internal_error ("failed to reclaim unneeded function");
1653 }
1654 gcc_assert (node->inlined_to
1655 || !gimple_has_body_p (decl)
1656 || node->in_other_partition
1657 || node->clones
1658 || DECL_ARTIFICIAL (decl)
1659 || DECL_EXTERNAL (decl));
1660
1661 }
1662
1663 }
1664 if (flag_checking && check_same_comdat_groups)
1665 FOR_EACH_FUNCTION (node)
1666 if (node->same_comdat_group && !node->process)
1667 {
1668 tree decl = node->decl;
1669 if (!node->inlined_to
1670 && gimple_has_body_p (decl)
1671 /* FIXME: in an ltrans unit when the offline copy is outside a
1672 partition but inline copies are inside a partition, we can
1673 end up not removing the body since we no longer have an
1674 analyzed node pointing to it. */
1675 && !node->in_other_partition
1676 && !node->clones
1677 && !DECL_EXTERNAL (decl))
1678 {
1679 node->debug ();
1680 internal_error ("failed to reclaim unneeded function in same "
1681 "comdat group");
1682 }
1683 }
1684 }
1685
1686 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1687 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1688
1689 Set current_function_decl and cfun to newly constructed empty function body.
1690 return basic block in the function body. */
1691
1692 basic_block
1693 init_lowered_empty_function (tree decl, bool in_ssa, profile_count count)
1694 {
1695 basic_block bb;
1696 edge e;
1697
1698 current_function_decl = decl;
1699 allocate_struct_function (decl, false);
1700 gimple_register_cfg_hooks ();
1701 init_empty_tree_cfg ();
1702 init_tree_ssa (cfun);
1703
1704 if (in_ssa)
1705 {
1706 init_ssa_operands (cfun);
1707 cfun->gimple_df->in_ssa_p = true;
1708 cfun->curr_properties |= PROP_ssa;
1709 }
1710
1711 DECL_INITIAL (decl) = make_node (BLOCK);
1712 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1713
1714 DECL_SAVED_TREE (decl) = error_mark_node;
1715 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1716 | PROP_cfg | PROP_loops);
1717
1718 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1719 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1720 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1721
1722 /* Create BB for body of the function and connect it properly. */
1723 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = count;
1724 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = count;
1725 bb = create_basic_block (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1726 bb->count = count;
1727 e = make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1728 e->probability = profile_probability::always ();
1729 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1730 e->probability = profile_probability::always ();
1731 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1732
1733 return bb;
1734 }
1735
1736 /* Adjust PTR by the constant FIXED_OFFSET, by the vtable offset indicated by
1737 VIRTUAL_OFFSET, and by the indirect offset indicated by INDIRECT_OFFSET, if
1738 it is non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and zero
1739 for a result adjusting thunk. */
1740
1741 tree
1742 thunk_adjust (gimple_stmt_iterator * bsi,
1743 tree ptr, bool this_adjusting,
1744 HOST_WIDE_INT fixed_offset, tree virtual_offset,
1745 HOST_WIDE_INT indirect_offset)
1746 {
1747 gassign *stmt;
1748 tree ret;
1749
1750 if (this_adjusting
1751 && fixed_offset != 0)
1752 {
1753 stmt = gimple_build_assign
1754 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1755 ptr,
1756 fixed_offset));
1757 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1758 }
1759
1760 if (!vtable_entry_type && (virtual_offset || indirect_offset != 0))
1761 {
1762 tree vfunc_type = make_node (FUNCTION_TYPE);
1763 TREE_TYPE (vfunc_type) = integer_type_node;
1764 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1765 layout_type (vfunc_type);
1766
1767 vtable_entry_type = build_pointer_type (vfunc_type);
1768 }
1769
1770 /* If there's a virtual offset, look up that value in the vtable and
1771 adjust the pointer again. */
1772 if (virtual_offset)
1773 {
1774 tree vtabletmp;
1775 tree vtabletmp2;
1776 tree vtabletmp3;
1777
1778 vtabletmp =
1779 create_tmp_reg (build_pointer_type
1780 (build_pointer_type (vtable_entry_type)), "vptr");
1781
1782 /* The vptr is always at offset zero in the object. */
1783 stmt = gimple_build_assign (vtabletmp,
1784 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1785 ptr));
1786 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1787
1788 /* Form the vtable address. */
1789 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1790 "vtableaddr");
1791 stmt = gimple_build_assign (vtabletmp2,
1792 build_simple_mem_ref (vtabletmp));
1793 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1794
1795 /* Find the entry with the vcall offset. */
1796 stmt = gimple_build_assign (vtabletmp2,
1797 fold_build_pointer_plus_loc (input_location,
1798 vtabletmp2,
1799 virtual_offset));
1800 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1801
1802 /* Get the offset itself. */
1803 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1804 "vcalloffset");
1805 stmt = gimple_build_assign (vtabletmp3,
1806 build_simple_mem_ref (vtabletmp2));
1807 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1808
1809 /* Adjust the `this' pointer. */
1810 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1811 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1812 GSI_CONTINUE_LINKING);
1813 }
1814
1815 /* Likewise for an offset that is stored in the object that contains the
1816 vtable. */
1817 if (indirect_offset != 0)
1818 {
1819 tree offset_ptr, offset_tree;
1820
1821 /* Get the address of the offset. */
1822 offset_ptr
1823 = create_tmp_reg (build_pointer_type
1824 (build_pointer_type (vtable_entry_type)),
1825 "offset_ptr");
1826 stmt = gimple_build_assign (offset_ptr,
1827 build1 (NOP_EXPR, TREE_TYPE (offset_ptr),
1828 ptr));
1829 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1830
1831 stmt = gimple_build_assign
1832 (offset_ptr,
1833 fold_build_pointer_plus_hwi_loc (input_location, offset_ptr,
1834 indirect_offset));
1835 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1836
1837 /* Get the offset itself. */
1838 offset_tree = create_tmp_reg (TREE_TYPE (TREE_TYPE (offset_ptr)),
1839 "offset");
1840 stmt = gimple_build_assign (offset_tree,
1841 build_simple_mem_ref (offset_ptr));
1842 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1843
1844 /* Adjust the `this' pointer. */
1845 ptr = fold_build_pointer_plus_loc (input_location, ptr, offset_tree);
1846 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1847 GSI_CONTINUE_LINKING);
1848 }
1849
1850 if (!this_adjusting
1851 && fixed_offset != 0)
1852 /* Adjust the pointer by the constant. */
1853 {
1854 tree ptrtmp;
1855
1856 if (VAR_P (ptr))
1857 ptrtmp = ptr;
1858 else
1859 {
1860 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1861 stmt = gimple_build_assign (ptrtmp, ptr);
1862 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1863 }
1864 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1865 ptrtmp, fixed_offset);
1866 }
1867
1868 /* Emit the statement and gimplify the adjustment expression. */
1869 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1870 stmt = gimple_build_assign (ret, ptr);
1871 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1872
1873 return ret;
1874 }
1875
1876 /* Expand thunk NODE to gimple if possible.
1877 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1878 no assembler is produced.
1879 When OUTPUT_ASM_THUNK is true, also produce assembler for
1880 thunks that are not lowered. */
1881
1882 bool
1883 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1884 {
1885 bool this_adjusting = thunk.this_adjusting;
1886 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1887 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1888 HOST_WIDE_INT indirect_offset = thunk.indirect_offset;
1889 tree virtual_offset = NULL;
1890 tree alias = callees->callee->decl;
1891 tree thunk_fndecl = decl;
1892 tree a;
1893
1894 if (!force_gimple_thunk
1895 && this_adjusting
1896 && indirect_offset == 0
1897 && !DECL_EXTERNAL (alias)
1898 && !DECL_STATIC_CHAIN (alias)
1899 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1900 virtual_value, alias))
1901 {
1902 tree fn_block;
1903 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1904
1905 if (!output_asm_thunks)
1906 {
1907 analyzed = true;
1908 return false;
1909 }
1910
1911 if (in_lto_p)
1912 get_untransformed_body ();
1913 a = DECL_ARGUMENTS (thunk_fndecl);
1914
1915 current_function_decl = thunk_fndecl;
1916
1917 /* Ensure thunks are emitted in their correct sections. */
1918 resolve_unique_section (thunk_fndecl, 0,
1919 flag_function_sections);
1920
1921 DECL_RESULT (thunk_fndecl)
1922 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1923 RESULT_DECL, 0, restype);
1924 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1925
1926 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1927 create one. */
1928 fn_block = make_node (BLOCK);
1929 BLOCK_VARS (fn_block) = a;
1930 DECL_INITIAL (thunk_fndecl) = fn_block;
1931 BLOCK_SUPERCONTEXT (fn_block) = thunk_fndecl;
1932 allocate_struct_function (thunk_fndecl, false);
1933 init_function_start (thunk_fndecl);
1934 cfun->is_thunk = 1;
1935 insn_locations_init ();
1936 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1937 prologue_location = curr_insn_location ();
1938
1939 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1940 fixed_offset, virtual_value, alias);
1941
1942 insn_locations_finalize ();
1943 init_insn_lengths ();
1944 free_after_compilation (cfun);
1945 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1946 thunk.thunk_p = false;
1947 analyzed = false;
1948 }
1949 else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
1950 {
1951 error ("generic thunk code fails for method %qD which uses %<...%>",
1952 thunk_fndecl);
1953 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1954 analyzed = true;
1955 return false;
1956 }
1957 else
1958 {
1959 tree restype;
1960 basic_block bb, then_bb, else_bb, return_bb;
1961 gimple_stmt_iterator bsi;
1962 int nargs = 0;
1963 tree arg;
1964 int i;
1965 tree resdecl;
1966 tree restmp = NULL;
1967
1968 gcall *call;
1969 greturn *ret;
1970 bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
1971
1972 /* We may be called from expand_thunk that releases body except for
1973 DECL_ARGUMENTS. In this case force_gimple_thunk is true. */
1974 if (in_lto_p && !force_gimple_thunk)
1975 get_untransformed_body ();
1976
1977 /* We need to force DECL_IGNORED_P when the thunk is created
1978 after early debug was run. */
1979 if (force_gimple_thunk)
1980 DECL_IGNORED_P (thunk_fndecl) = 1;
1981
1982 a = DECL_ARGUMENTS (thunk_fndecl);
1983
1984 current_function_decl = thunk_fndecl;
1985
1986 /* Ensure thunks are emitted in their correct sections. */
1987 resolve_unique_section (thunk_fndecl, 0,
1988 flag_function_sections);
1989
1990 bitmap_obstack_initialize (NULL);
1991
1992 if (thunk.virtual_offset_p)
1993 virtual_offset = size_int (virtual_value);
1994
1995 /* Build the return declaration for the function. */
1996 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1997 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1998 {
1999 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
2000 DECL_ARTIFICIAL (resdecl) = 1;
2001 DECL_IGNORED_P (resdecl) = 1;
2002 DECL_CONTEXT (resdecl) = thunk_fndecl;
2003 DECL_RESULT (thunk_fndecl) = resdecl;
2004 }
2005 else
2006 resdecl = DECL_RESULT (thunk_fndecl);
2007
2008 profile_count cfg_count = count;
2009 if (!cfg_count.initialized_p ())
2010 cfg_count = profile_count::from_gcov_type (BB_FREQ_MAX).guessed_local ();
2011
2012 bb = then_bb = else_bb = return_bb
2013 = init_lowered_empty_function (thunk_fndecl, true, cfg_count);
2014
2015 bsi = gsi_start_bb (bb);
2016
2017 /* Build call to the function being thunked. */
2018 if (!VOID_TYPE_P (restype)
2019 && (!alias_is_noreturn
2020 || TREE_ADDRESSABLE (restype)
2021 || TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
2022 {
2023 if (DECL_BY_REFERENCE (resdecl))
2024 {
2025 restmp = gimple_fold_indirect_ref (resdecl);
2026 if (!restmp)
2027 restmp = build2 (MEM_REF,
2028 TREE_TYPE (TREE_TYPE (resdecl)),
2029 resdecl,
2030 build_int_cst (TREE_TYPE (resdecl), 0));
2031 }
2032 else if (!is_gimple_reg_type (restype))
2033 {
2034 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
2035 {
2036 restmp = resdecl;
2037
2038 if (VAR_P (restmp))
2039 {
2040 add_local_decl (cfun, restmp);
2041 BLOCK_VARS (DECL_INITIAL (current_function_decl))
2042 = restmp;
2043 }
2044 }
2045 else
2046 restmp = create_tmp_var (restype, "retval");
2047 }
2048 else
2049 restmp = create_tmp_reg (restype, "retval");
2050 }
2051
2052 for (arg = a; arg; arg = DECL_CHAIN (arg))
2053 nargs++;
2054 auto_vec<tree> vargs (nargs);
2055 i = 0;
2056 arg = a;
2057 if (this_adjusting)
2058 {
2059 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
2060 virtual_offset, indirect_offset));
2061 arg = DECL_CHAIN (a);
2062 i = 1;
2063 }
2064
2065 if (nargs)
2066 for (; i < nargs; i++, arg = DECL_CHAIN (arg))
2067 {
2068 tree tmp = arg;
2069 DECL_NOT_GIMPLE_REG_P (arg) = 0;
2070 if (!is_gimple_val (arg))
2071 {
2072 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
2073 (TREE_TYPE (arg)), "arg");
2074 gimple *stmt = gimple_build_assign (tmp, arg);
2075 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2076 }
2077 vargs.quick_push (tmp);
2078 }
2079 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
2080 callees->call_stmt = call;
2081 gimple_call_set_from_thunk (call, true);
2082 if (DECL_STATIC_CHAIN (alias))
2083 {
2084 tree p = DECL_STRUCT_FUNCTION (alias)->static_chain_decl;
2085 tree type = TREE_TYPE (p);
2086 tree decl = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
2087 PARM_DECL, create_tmp_var_name ("CHAIN"),
2088 type);
2089 DECL_ARTIFICIAL (decl) = 1;
2090 DECL_IGNORED_P (decl) = 1;
2091 TREE_USED (decl) = 1;
2092 DECL_CONTEXT (decl) = thunk_fndecl;
2093 DECL_ARG_TYPE (decl) = type;
2094 TREE_READONLY (decl) = 1;
2095
2096 struct function *sf = DECL_STRUCT_FUNCTION (thunk_fndecl);
2097 sf->static_chain_decl = decl;
2098
2099 gimple_call_set_chain (call, decl);
2100 }
2101
2102 /* Return slot optimization is always possible and in fact required to
2103 return values with DECL_BY_REFERENCE. */
2104 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
2105 && (!is_gimple_reg_type (TREE_TYPE (resdecl))
2106 || DECL_BY_REFERENCE (resdecl)))
2107 gimple_call_set_return_slot_opt (call, true);
2108
2109 if (restmp)
2110 {
2111 gimple_call_set_lhs (call, restmp);
2112 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
2113 TREE_TYPE (TREE_TYPE (alias))));
2114 }
2115 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
2116 if (!alias_is_noreturn)
2117 {
2118 if (restmp && !this_adjusting
2119 && (fixed_offset || virtual_offset))
2120 {
2121 tree true_label = NULL_TREE;
2122
2123 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
2124 {
2125 gimple *stmt;
2126 edge e;
2127 /* If the return type is a pointer, we need to
2128 protect against NULL. We know there will be an
2129 adjustment, because that's why we're emitting a
2130 thunk. */
2131 then_bb = create_basic_block (NULL, bb);
2132 then_bb->count = cfg_count - cfg_count.apply_scale (1, 16);
2133 return_bb = create_basic_block (NULL, then_bb);
2134 return_bb->count = cfg_count;
2135 else_bb = create_basic_block (NULL, else_bb);
2136 else_bb->count = cfg_count.apply_scale (1, 16);
2137 add_bb_to_loop (then_bb, bb->loop_father);
2138 add_bb_to_loop (return_bb, bb->loop_father);
2139 add_bb_to_loop (else_bb, bb->loop_father);
2140 remove_edge (single_succ_edge (bb));
2141 true_label = gimple_block_label (then_bb);
2142 stmt = gimple_build_cond (NE_EXPR, restmp,
2143 build_zero_cst (TREE_TYPE (restmp)),
2144 NULL_TREE, NULL_TREE);
2145 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2146 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
2147 e->probability = profile_probability::guessed_always ()
2148 .apply_scale (1, 16);
2149 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
2150 e->probability = profile_probability::guessed_always ()
2151 .apply_scale (1, 16);
2152 make_single_succ_edge (return_bb,
2153 EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
2154 make_single_succ_edge (then_bb, return_bb, EDGE_FALLTHRU);
2155 e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
2156 e->probability = profile_probability::always ();
2157 bsi = gsi_last_bb (then_bb);
2158 }
2159
2160 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
2161 fixed_offset, virtual_offset,
2162 indirect_offset);
2163 if (true_label)
2164 {
2165 gimple *stmt;
2166 bsi = gsi_last_bb (else_bb);
2167 stmt = gimple_build_assign (restmp,
2168 build_zero_cst (TREE_TYPE (restmp)));
2169 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2170 bsi = gsi_last_bb (return_bb);
2171 }
2172 }
2173 else
2174 {
2175 gimple_call_set_tail (call, true);
2176 cfun->tail_call_marked = true;
2177 }
2178
2179 /* Build return value. */
2180 if (!DECL_BY_REFERENCE (resdecl))
2181 ret = gimple_build_return (restmp);
2182 else
2183 ret = gimple_build_return (resdecl);
2184
2185 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
2186 }
2187 else
2188 {
2189 gimple_call_set_tail (call, true);
2190 cfun->tail_call_marked = true;
2191 remove_edge (single_succ_edge (bb));
2192 }
2193
2194 cfun->gimple_df->in_ssa_p = true;
2195 update_max_bb_count ();
2196 profile_status_for_fn (cfun)
2197 = cfg_count.initialized_p () && cfg_count.ipa_p ()
2198 ? PROFILE_READ : PROFILE_GUESSED;
2199 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
2200 TREE_ASM_WRITTEN (thunk_fndecl) = false;
2201 delete_unreachable_blocks ();
2202 update_ssa (TODO_update_ssa);
2203 checking_verify_flow_info ();
2204 free_dominance_info (CDI_DOMINATORS);
2205
2206 /* Since we want to emit the thunk, we explicitly mark its name as
2207 referenced. */
2208 thunk.thunk_p = false;
2209 lowered = true;
2210 bitmap_obstack_release (NULL);
2211 }
2212 current_function_decl = NULL;
2213 set_cfun (NULL);
2214 return true;
2215 }
2216
2217 /* Assemble thunks and aliases associated to node. */
2218
2219 void
2220 cgraph_node::assemble_thunks_and_aliases (void)
2221 {
2222 cgraph_edge *e;
2223 ipa_ref *ref;
2224
2225 for (e = callers; e;)
2226 if (e->caller->thunk.thunk_p
2227 && !e->caller->inlined_to)
2228 {
2229 cgraph_node *thunk = e->caller;
2230
2231 e = e->next_caller;
2232 thunk->expand_thunk (true, false);
2233 thunk->assemble_thunks_and_aliases ();
2234 }
2235 else
2236 e = e->next_caller;
2237
2238 FOR_EACH_ALIAS (this, ref)
2239 {
2240 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2241 if (!alias->transparent_alias)
2242 {
2243 bool saved_written = TREE_ASM_WRITTEN (decl);
2244
2245 /* Force assemble_alias to really output the alias this time instead
2246 of buffering it in same alias pairs. */
2247 TREE_ASM_WRITTEN (decl) = 1;
2248 if (alias->symver)
2249 do_assemble_symver (alias->decl,
2250 DECL_ASSEMBLER_NAME (decl));
2251 else
2252 do_assemble_alias (alias->decl,
2253 DECL_ASSEMBLER_NAME (decl));
2254 alias->assemble_thunks_and_aliases ();
2255 TREE_ASM_WRITTEN (decl) = saved_written;
2256 }
2257 }
2258 }
2259
2260 /* Expand function specified by node. */
2261
2262 void
2263 cgraph_node::expand (void)
2264 {
2265 location_t saved_loc;
2266
2267 /* We ought to not compile any inline clones. */
2268 gcc_assert (!inlined_to);
2269
2270 /* __RTL functions are compiled as soon as they are parsed, so don't
2271 do it again. */
2272 if (native_rtl_p ())
2273 return;
2274
2275 announce_function (decl);
2276 process = 0;
2277 gcc_assert (lowered);
2278
2279 /* Initialize the default bitmap obstack. */
2280 bitmap_obstack_initialize (NULL);
2281 get_untransformed_body ();
2282
2283 /* Generate RTL for the body of DECL. */
2284
2285 timevar_push (TV_REST_OF_COMPILATION);
2286
2287 gcc_assert (symtab->global_info_ready);
2288
2289 /* Initialize the RTL code for the function. */
2290 saved_loc = input_location;
2291 input_location = DECL_SOURCE_LOCATION (decl);
2292
2293 gcc_assert (DECL_STRUCT_FUNCTION (decl));
2294 push_cfun (DECL_STRUCT_FUNCTION (decl));
2295 init_function_start (decl);
2296
2297 gimple_register_cfg_hooks ();
2298
2299 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
2300
2301 update_ssa (TODO_update_ssa_only_virtuals);
2302 if (ipa_transforms_to_apply.exists ())
2303 execute_all_ipa_transforms (false);
2304
2305 /* Perform all tree transforms and optimizations. */
2306
2307 /* Signal the start of passes. */
2308 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
2309
2310 execute_pass_list (cfun, g->get_passes ()->all_passes);
2311
2312 /* Signal the end of passes. */
2313 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
2314
2315 bitmap_obstack_release (&reg_obstack);
2316
2317 /* Release the default bitmap obstack. */
2318 bitmap_obstack_release (NULL);
2319
2320 /* If requested, warn about function definitions where the function will
2321 return a value (usually of some struct or union type) which itself will
2322 take up a lot of stack space. */
2323 if (!DECL_EXTERNAL (decl) && TREE_TYPE (decl))
2324 {
2325 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
2326
2327 if (ret_type && TYPE_SIZE_UNIT (ret_type)
2328 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
2329 && compare_tree_int (TYPE_SIZE_UNIT (ret_type),
2330 warn_larger_than_size) > 0)
2331 {
2332 unsigned int size_as_int
2333 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
2334
2335 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
2336 warning (OPT_Wlarger_than_,
2337 "size of return value of %q+D is %u bytes",
2338 decl, size_as_int);
2339 else
2340 warning (OPT_Wlarger_than_,
2341 "size of return value of %q+D is larger than %wu bytes",
2342 decl, warn_larger_than_size);
2343 }
2344 }
2345
2346 gimple_set_body (decl, NULL);
2347 if (DECL_STRUCT_FUNCTION (decl) == 0)
2348 {
2349 /* Stop pointing to the local nodes about to be freed.
2350 But DECL_INITIAL must remain nonzero so we know this
2351 was an actual function definition. */
2352 if (DECL_INITIAL (decl) != 0)
2353 DECL_INITIAL (decl) = error_mark_node;
2354 }
2355
2356 input_location = saved_loc;
2357
2358 ggc_collect ();
2359 timevar_pop (TV_REST_OF_COMPILATION);
2360
2361 /* Make sure that BE didn't give up on compiling. */
2362 gcc_assert (TREE_ASM_WRITTEN (decl));
2363 if (cfun)
2364 pop_cfun ();
2365
2366 /* It would make a lot more sense to output thunks before function body to
2367 get more forward and fewer backward jumps. This however would need
2368 solving problem with comdats. See PR48668. Also aliases must come after
2369 function itself to make one pass assemblers, like one on AIX, happy.
2370 See PR 50689.
2371 FIXME: Perhaps thunks should be move before function IFF they are not in
2372 comdat groups. */
2373 assemble_thunks_and_aliases ();
2374 release_body ();
2375 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
2376 points to the dead function body. */
2377 remove_callees ();
2378 remove_all_references ();
2379 }
2380
2381 /* Node comparator that is responsible for the order that corresponds
2382 to time when a function was launched for the first time. */
2383
2384 int
2385 tp_first_run_node_cmp (const void *pa, const void *pb)
2386 {
2387 const cgraph_node *a = *(const cgraph_node * const *) pa;
2388 const cgraph_node *b = *(const cgraph_node * const *) pb;
2389 unsigned int tp_first_run_a = a->tp_first_run;
2390 unsigned int tp_first_run_b = b->tp_first_run;
2391
2392 if (!opt_for_fn (a->decl, flag_profile_reorder_functions)
2393 || a->no_reorder)
2394 tp_first_run_a = 0;
2395 if (!opt_for_fn (b->decl, flag_profile_reorder_functions)
2396 || b->no_reorder)
2397 tp_first_run_b = 0;
2398
2399 if (tp_first_run_a == tp_first_run_b)
2400 return a->order - b->order;
2401
2402 /* Functions with time profile must be before these without profile. */
2403 tp_first_run_a = (tp_first_run_a - 1) & INT_MAX;
2404 tp_first_run_b = (tp_first_run_b - 1) & INT_MAX;
2405
2406 return tp_first_run_a - tp_first_run_b;
2407 }
2408
2409 /* Expand all functions that must be output.
2410
2411 Attempt to topologically sort the nodes so function is output when
2412 all called functions are already assembled to allow data to be
2413 propagated across the callgraph. Use a stack to get smaller distance
2414 between a function and its callees (later we may choose to use a more
2415 sophisticated algorithm for function reordering; we will likely want
2416 to use subsections to make the output functions appear in top-down
2417 order). */
2418
2419 static void
2420 expand_all_functions (void)
2421 {
2422 cgraph_node *node;
2423 cgraph_node **order = XCNEWVEC (cgraph_node *,
2424 symtab->cgraph_count);
2425 cgraph_node **tp_first_run_order = XCNEWVEC (cgraph_node *,
2426 symtab->cgraph_count);
2427 unsigned int expanded_func_count = 0, profiled_func_count = 0;
2428 int order_pos, tp_first_run_order_pos = 0, new_order_pos = 0;
2429 int i;
2430
2431 order_pos = ipa_reverse_postorder (order);
2432 gcc_assert (order_pos == symtab->cgraph_count);
2433
2434 /* Garbage collector may remove inline clones we eliminate during
2435 optimization. So we must be sure to not reference them. */
2436 for (i = 0; i < order_pos; i++)
2437 if (order[i]->process)
2438 {
2439 if (order[i]->tp_first_run
2440 && opt_for_fn (order[i]->decl, flag_profile_reorder_functions))
2441 tp_first_run_order[tp_first_run_order_pos++] = order[i];
2442 else
2443 order[new_order_pos++] = order[i];
2444 }
2445
2446 /* First output functions with time profile in specified order. */
2447 qsort (tp_first_run_order, tp_first_run_order_pos,
2448 sizeof (cgraph_node *), tp_first_run_node_cmp);
2449 for (i = 0; i < tp_first_run_order_pos; i++)
2450 {
2451 node = tp_first_run_order[i];
2452
2453 if (node->process)
2454 {
2455 expanded_func_count++;
2456 profiled_func_count++;
2457
2458 if (symtab->dump_file)
2459 fprintf (symtab->dump_file,
2460 "Time profile order in expand_all_functions:%s:%d\n",
2461 node->dump_asm_name (), node->tp_first_run);
2462 node->process = 0;
2463 node->expand ();
2464 }
2465 }
2466
2467 /* Output functions in RPO so callees get optimized before callers. This
2468 makes ipa-ra and other propagators to work.
2469 FIXME: This is far from optimal code layout. */
2470 for (i = new_order_pos - 1; i >= 0; i--)
2471 {
2472 node = order[i];
2473
2474 if (node->process)
2475 {
2476 expanded_func_count++;
2477 node->process = 0;
2478 node->expand ();
2479 }
2480 }
2481
2482 if (dump_file)
2483 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
2484 main_input_filename, profiled_func_count, expanded_func_count);
2485
2486 if (symtab->dump_file && tp_first_run_order_pos)
2487 fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
2488 profiled_func_count, expanded_func_count);
2489
2490 symtab->process_new_functions ();
2491 free_gimplify_stack ();
2492 delete ipa_saved_clone_sources;
2493 ipa_saved_clone_sources = NULL;
2494 free (order);
2495 free (tp_first_run_order);
2496 }
2497
2498 /* This is used to sort the node types by the cgraph order number. */
2499
2500 enum cgraph_order_sort_kind
2501 {
2502 ORDER_FUNCTION,
2503 ORDER_VAR,
2504 ORDER_VAR_UNDEF,
2505 ORDER_ASM
2506 };
2507
2508 struct cgraph_order_sort
2509 {
2510 /* Construct from a cgraph_node. */
2511 cgraph_order_sort (cgraph_node *node)
2512 : kind (ORDER_FUNCTION), order (node->order)
2513 {
2514 u.f = node;
2515 }
2516
2517 /* Construct from a varpool_node. */
2518 cgraph_order_sort (varpool_node *node)
2519 : kind (node->definition ? ORDER_VAR : ORDER_VAR_UNDEF), order (node->order)
2520 {
2521 u.v = node;
2522 }
2523
2524 /* Construct from a asm_node. */
2525 cgraph_order_sort (asm_node *node)
2526 : kind (ORDER_ASM), order (node->order)
2527 {
2528 u.a = node;
2529 }
2530
2531 /* Assembly cgraph_order_sort based on its type. */
2532 void process ();
2533
2534 enum cgraph_order_sort_kind kind;
2535 union
2536 {
2537 cgraph_node *f;
2538 varpool_node *v;
2539 asm_node *a;
2540 } u;
2541 int order;
2542 };
2543
2544 /* Assembly cgraph_order_sort based on its type. */
2545
2546 void
2547 cgraph_order_sort::process ()
2548 {
2549 switch (kind)
2550 {
2551 case ORDER_FUNCTION:
2552 u.f->process = 0;
2553 u.f->expand ();
2554 break;
2555 case ORDER_VAR:
2556 u.v->assemble_decl ();
2557 break;
2558 case ORDER_VAR_UNDEF:
2559 assemble_undefined_decl (u.v->decl);
2560 break;
2561 case ORDER_ASM:
2562 assemble_asm (u.a->asm_str);
2563 break;
2564 default:
2565 gcc_unreachable ();
2566 }
2567 }
2568
2569 /* Compare cgraph_order_sort by order. */
2570
2571 static int
2572 cgraph_order_cmp (const void *a_p, const void *b_p)
2573 {
2574 const cgraph_order_sort *nodea = (const cgraph_order_sort *)a_p;
2575 const cgraph_order_sort *nodeb = (const cgraph_order_sort *)b_p;
2576
2577 return nodea->order - nodeb->order;
2578 }
2579
2580 /* Output all functions, variables, and asm statements in the order
2581 according to their order fields, which is the order in which they
2582 appeared in the file. This implements -fno-toplevel-reorder. In
2583 this mode we may output functions and variables which don't really
2584 need to be output. */
2585
2586 static void
2587 output_in_order (void)
2588 {
2589 int i;
2590 cgraph_node *cnode;
2591 varpool_node *vnode;
2592 asm_node *anode;
2593 auto_vec<cgraph_order_sort> nodes;
2594 cgraph_order_sort *node;
2595
2596 FOR_EACH_DEFINED_FUNCTION (cnode)
2597 if (cnode->process && !cnode->thunk.thunk_p
2598 && !cnode->alias && cnode->no_reorder)
2599 nodes.safe_push (cgraph_order_sort (cnode));
2600
2601 /* There is a similar loop in symbol_table::output_variables.
2602 Please keep them in sync. */
2603 FOR_EACH_VARIABLE (vnode)
2604 if (vnode->no_reorder
2605 && !DECL_HARD_REGISTER (vnode->decl)
2606 && !DECL_HAS_VALUE_EXPR_P (vnode->decl))
2607 nodes.safe_push (cgraph_order_sort (vnode));
2608
2609 for (anode = symtab->first_asm_symbol (); anode; anode = anode->next)
2610 nodes.safe_push (cgraph_order_sort (anode));
2611
2612 /* Sort nodes by order. */
2613 nodes.qsort (cgraph_order_cmp);
2614
2615 /* In toplevel reorder mode we output all statics; mark them as needed. */
2616 FOR_EACH_VEC_ELT (nodes, i, node)
2617 if (node->kind == ORDER_VAR)
2618 node->u.v->finalize_named_section_flags ();
2619
2620 FOR_EACH_VEC_ELT (nodes, i, node)
2621 node->process ();
2622
2623 symtab->clear_asm_symbols ();
2624 }
2625
2626 static void
2627 ipa_passes (void)
2628 {
2629 gcc::pass_manager *passes = g->get_passes ();
2630
2631 set_cfun (NULL);
2632 current_function_decl = NULL;
2633 gimple_register_cfg_hooks ();
2634 bitmap_obstack_initialize (NULL);
2635
2636 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2637
2638 if (!in_lto_p)
2639 {
2640 execute_ipa_pass_list (passes->all_small_ipa_passes);
2641 if (seen_error ())
2642 return;
2643 }
2644
2645 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2646 devirtualization and other changes where removal iterate. */
2647 symtab->remove_unreachable_nodes (symtab->dump_file);
2648
2649 /* If pass_all_early_optimizations was not scheduled, the state of
2650 the cgraph will not be properly updated. Update it now. */
2651 if (symtab->state < IPA_SSA)
2652 symtab->state = IPA_SSA;
2653
2654 if (!in_lto_p)
2655 {
2656 /* Generate coverage variables and constructors. */
2657 coverage_finish ();
2658
2659 /* Process new functions added. */
2660 set_cfun (NULL);
2661 current_function_decl = NULL;
2662 symtab->process_new_functions ();
2663
2664 execute_ipa_summary_passes
2665 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2666 }
2667
2668 /* Some targets need to handle LTO assembler output specially. */
2669 if (flag_generate_lto || flag_generate_offload)
2670 targetm.asm_out.lto_start ();
2671
2672 if (!in_lto_p
2673 || flag_incremental_link == INCREMENTAL_LINK_LTO)
2674 {
2675 if (!quiet_flag)
2676 fprintf (stderr, "Streaming LTO\n");
2677 if (g->have_offload)
2678 {
2679 section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2680 lto_stream_offload_p = true;
2681 ipa_write_summaries ();
2682 lto_stream_offload_p = false;
2683 }
2684 if (flag_lto)
2685 {
2686 section_name_prefix = LTO_SECTION_NAME_PREFIX;
2687 lto_stream_offload_p = false;
2688 ipa_write_summaries ();
2689 }
2690 }
2691
2692 if (flag_generate_lto || flag_generate_offload)
2693 targetm.asm_out.lto_end ();
2694
2695 if (!flag_ltrans
2696 && ((in_lto_p && flag_incremental_link != INCREMENTAL_LINK_LTO)
2697 || !flag_lto || flag_fat_lto_objects))
2698 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2699 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2700
2701 bitmap_obstack_release (NULL);
2702 }
2703
2704
2705 /* Return string alias is alias of. */
2706
2707 static tree
2708 get_alias_symbol (tree decl)
2709 {
2710 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2711 return get_identifier (TREE_STRING_POINTER
2712 (TREE_VALUE (TREE_VALUE (alias))));
2713 }
2714
2715
2716 /* Weakrefs may be associated to external decls and thus not output
2717 at expansion time. Emit all necessary aliases. */
2718
2719 void
2720 symbol_table::output_weakrefs (void)
2721 {
2722 symtab_node *node;
2723 FOR_EACH_SYMBOL (node)
2724 if (node->alias
2725 && !TREE_ASM_WRITTEN (node->decl)
2726 && node->weakref)
2727 {
2728 tree target;
2729
2730 /* Weakrefs are special by not requiring target definition in current
2731 compilation unit. It is thus bit hard to work out what we want to
2732 alias.
2733 When alias target is defined, we need to fetch it from symtab reference,
2734 otherwise it is pointed to by alias_target. */
2735 if (node->alias_target)
2736 target = (DECL_P (node->alias_target)
2737 ? DECL_ASSEMBLER_NAME (node->alias_target)
2738 : node->alias_target);
2739 else if (node->analyzed)
2740 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2741 else
2742 {
2743 gcc_unreachable ();
2744 target = get_alias_symbol (node->decl);
2745 }
2746 do_assemble_alias (node->decl, target);
2747 }
2748 }
2749
2750 /* Perform simple optimizations based on callgraph. */
2751
2752 void
2753 symbol_table::compile (void)
2754 {
2755 if (seen_error ())
2756 return;
2757
2758 symtab_node::checking_verify_symtab_nodes ();
2759
2760 timevar_push (TV_CGRAPHOPT);
2761 if (pre_ipa_mem_report)
2762 dump_memory_report ("Memory consumption before IPA");
2763 if (!quiet_flag)
2764 fprintf (stderr, "Performing interprocedural optimizations\n");
2765 state = IPA;
2766
2767 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2768 if (flag_generate_lto || flag_generate_offload)
2769 lto_streamer_hooks_init ();
2770
2771 /* Don't run the IPA passes if there was any error or sorry messages. */
2772 if (!seen_error ())
2773 {
2774 timevar_start (TV_CGRAPH_IPA_PASSES);
2775 ipa_passes ();
2776 timevar_stop (TV_CGRAPH_IPA_PASSES);
2777 }
2778 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2779 if (seen_error ()
2780 || ((!in_lto_p || flag_incremental_link == INCREMENTAL_LINK_LTO)
2781 && flag_lto && !flag_fat_lto_objects))
2782 {
2783 timevar_pop (TV_CGRAPHOPT);
2784 return;
2785 }
2786
2787 global_info_ready = true;
2788 if (dump_file)
2789 {
2790 fprintf (dump_file, "Optimized ");
2791 symtab->dump (dump_file);
2792 }
2793 if (post_ipa_mem_report)
2794 dump_memory_report ("Memory consumption after IPA");
2795 timevar_pop (TV_CGRAPHOPT);
2796
2797 /* Output everything. */
2798 switch_to_section (text_section);
2799 (*debug_hooks->assembly_start) ();
2800 if (!quiet_flag)
2801 fprintf (stderr, "Assembling functions:\n");
2802 symtab_node::checking_verify_symtab_nodes ();
2803
2804 bitmap_obstack_initialize (NULL);
2805 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2806 bitmap_obstack_release (NULL);
2807 mark_functions_to_output ();
2808
2809 /* When weakref support is missing, we automatically translate all
2810 references to NODE to references to its ultimate alias target.
2811 The renaming mechanism uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2812 TREE_CHAIN.
2813
2814 Set up this mapping before we output any assembler but once we are sure
2815 that all symbol renaming is done.
2816
2817 FIXME: All this ugliness can go away if we just do renaming at gimple
2818 level by physically rewriting the IL. At the moment we can only redirect
2819 calls, so we need infrastructure for renaming references as well. */
2820 #ifndef ASM_OUTPUT_WEAKREF
2821 symtab_node *node;
2822
2823 FOR_EACH_SYMBOL (node)
2824 if (node->alias
2825 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2826 {
2827 IDENTIFIER_TRANSPARENT_ALIAS
2828 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2829 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2830 = (node->alias_target ? node->alias_target
2831 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2832 }
2833 #endif
2834
2835 state = EXPANSION;
2836
2837 /* Output first asm statements and anything ordered. The process
2838 flag is cleared for these nodes, so we skip them later. */
2839 output_in_order ();
2840
2841 timevar_start (TV_CGRAPH_FUNC_EXPANSION);
2842 expand_all_functions ();
2843 timevar_stop (TV_CGRAPH_FUNC_EXPANSION);
2844
2845 output_variables ();
2846
2847 process_new_functions ();
2848 state = FINISHED;
2849 output_weakrefs ();
2850
2851 if (dump_file)
2852 {
2853 fprintf (dump_file, "\nFinal ");
2854 symtab->dump (dump_file);
2855 }
2856 if (!flag_checking)
2857 return;
2858 symtab_node::verify_symtab_nodes ();
2859 /* Double check that all inline clones are gone and that all
2860 function bodies have been released from memory. */
2861 if (!seen_error ())
2862 {
2863 cgraph_node *node;
2864 bool error_found = false;
2865
2866 FOR_EACH_DEFINED_FUNCTION (node)
2867 if (node->inlined_to
2868 || gimple_has_body_p (node->decl))
2869 {
2870 error_found = true;
2871 node->debug ();
2872 }
2873 if (error_found)
2874 internal_error ("nodes with unreleased memory found");
2875 }
2876 }
2877
2878 /* Earlydebug dump file, flags, and number. */
2879
2880 static int debuginfo_early_dump_nr;
2881 static FILE *debuginfo_early_dump_file;
2882 static dump_flags_t debuginfo_early_dump_flags;
2883
2884 /* Debug dump file, flags, and number. */
2885
2886 static int debuginfo_dump_nr;
2887 static FILE *debuginfo_dump_file;
2888 static dump_flags_t debuginfo_dump_flags;
2889
2890 /* Register the debug and earlydebug dump files. */
2891
2892 void
2893 debuginfo_early_init (void)
2894 {
2895 gcc::dump_manager *dumps = g->get_dumps ();
2896 debuginfo_early_dump_nr = dumps->dump_register (".earlydebug", "earlydebug",
2897 "earlydebug", DK_tree,
2898 OPTGROUP_NONE,
2899 false);
2900 debuginfo_dump_nr = dumps->dump_register (".debug", "debug",
2901 "debug", DK_tree,
2902 OPTGROUP_NONE,
2903 false);
2904 }
2905
2906 /* Initialize the debug and earlydebug dump files. */
2907
2908 void
2909 debuginfo_init (void)
2910 {
2911 gcc::dump_manager *dumps = g->get_dumps ();
2912 debuginfo_dump_file = dump_begin (debuginfo_dump_nr, NULL);
2913 debuginfo_dump_flags = dumps->get_dump_file_info (debuginfo_dump_nr)->pflags;
2914 debuginfo_early_dump_file = dump_begin (debuginfo_early_dump_nr, NULL);
2915 debuginfo_early_dump_flags
2916 = dumps->get_dump_file_info (debuginfo_early_dump_nr)->pflags;
2917 }
2918
2919 /* Finalize the debug and earlydebug dump files. */
2920
2921 void
2922 debuginfo_fini (void)
2923 {
2924 if (debuginfo_dump_file)
2925 dump_end (debuginfo_dump_nr, debuginfo_dump_file);
2926 if (debuginfo_early_dump_file)
2927 dump_end (debuginfo_early_dump_nr, debuginfo_early_dump_file);
2928 }
2929
2930 /* Set dump_file to the debug dump file. */
2931
2932 void
2933 debuginfo_start (void)
2934 {
2935 set_dump_file (debuginfo_dump_file);
2936 }
2937
2938 /* Undo setting dump_file to the debug dump file. */
2939
2940 void
2941 debuginfo_stop (void)
2942 {
2943 set_dump_file (NULL);
2944 }
2945
2946 /* Set dump_file to the earlydebug dump file. */
2947
2948 void
2949 debuginfo_early_start (void)
2950 {
2951 set_dump_file (debuginfo_early_dump_file);
2952 }
2953
2954 /* Undo setting dump_file to the earlydebug dump file. */
2955
2956 void
2957 debuginfo_early_stop (void)
2958 {
2959 set_dump_file (NULL);
2960 }
2961
2962 /* Analyze the whole compilation unit once it is parsed completely. */
2963
2964 void
2965 symbol_table::finalize_compilation_unit (void)
2966 {
2967 timevar_push (TV_CGRAPH);
2968
2969 /* If we're here there's no current function anymore. Some frontends
2970 are lazy in clearing these. */
2971 current_function_decl = NULL;
2972 set_cfun (NULL);
2973
2974 /* Do not skip analyzing the functions if there were errors, we
2975 miss diagnostics for following functions otherwise. */
2976
2977 /* Emit size functions we didn't inline. */
2978 finalize_size_functions ();
2979
2980 /* Mark alias targets necessary and emit diagnostics. */
2981 handle_alias_pairs ();
2982
2983 if (!quiet_flag)
2984 {
2985 fprintf (stderr, "\nAnalyzing compilation unit\n");
2986 fflush (stderr);
2987 }
2988
2989 if (flag_dump_passes)
2990 dump_passes ();
2991
2992 /* Gimplify and lower all functions, compute reachability and
2993 remove unreachable nodes. */
2994 analyze_functions (/*first_time=*/true);
2995
2996 /* Mark alias targets necessary and emit diagnostics. */
2997 handle_alias_pairs ();
2998
2999 /* Gimplify and lower thunks. */
3000 analyze_functions (/*first_time=*/false);
3001
3002 /* All nested functions should be lowered now. */
3003 nested_function_info::release ();
3004
3005 /* Offloading requires LTO infrastructure. */
3006 if (!in_lto_p && g->have_offload)
3007 flag_generate_offload = 1;
3008
3009 if (!seen_error ())
3010 {
3011 /* Give the frontends the chance to emit early debug based on
3012 what is still reachable in the TU. */
3013 (*lang_hooks.finalize_early_debug) ();
3014
3015 /* Clean up anything that needs cleaning up after initial debug
3016 generation. */
3017 debuginfo_early_start ();
3018 (*debug_hooks->early_finish) (main_input_filename);
3019 debuginfo_early_stop ();
3020 }
3021
3022 /* Finally drive the pass manager. */
3023 compile ();
3024
3025 timevar_pop (TV_CGRAPH);
3026 }
3027
3028 /* Reset all state within cgraphunit.c so that we can rerun the compiler
3029 within the same process. For use by toplev::finalize. */
3030
3031 void
3032 cgraphunit_c_finalize (void)
3033 {
3034 gcc_assert (cgraph_new_nodes.length () == 0);
3035 cgraph_new_nodes.truncate (0);
3036
3037 vtable_entry_type = NULL;
3038 queued_nodes = &symtab_terminator;
3039
3040 first_analyzed = NULL;
3041 first_analyzed_var = NULL;
3042 }
3043
3044 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
3045 kind of wrapper method. */
3046
3047 void
3048 cgraph_node::create_wrapper (cgraph_node *target)
3049 {
3050 /* Preserve DECL_RESULT so we get right by reference flag. */
3051 tree decl_result = DECL_RESULT (decl);
3052
3053 /* Remove the function's body but keep arguments to be reused
3054 for thunk. */
3055 release_body (true);
3056 reset ();
3057
3058 DECL_UNINLINABLE (decl) = false;
3059 DECL_RESULT (decl) = decl_result;
3060 DECL_INITIAL (decl) = NULL;
3061 allocate_struct_function (decl, false);
3062 set_cfun (NULL);
3063
3064 /* Turn alias into thunk and expand it into GIMPLE representation. */
3065 definition = true;
3066
3067 memset (&thunk, 0, sizeof (cgraph_thunk_info));
3068 thunk.thunk_p = true;
3069 create_edge (target, NULL, count);
3070 callees->can_throw_external = !TREE_NOTHROW (target->decl);
3071
3072 tree arguments = DECL_ARGUMENTS (decl);
3073
3074 while (arguments)
3075 {
3076 TREE_ADDRESSABLE (arguments) = false;
3077 arguments = TREE_CHAIN (arguments);
3078 }
3079
3080 expand_thunk (false, true);
3081
3082 /* Inline summary set-up. */
3083 analyze ();
3084 inline_analyze_function (this);
3085 }
3086
3087 #include "gt-cgraphunit.h"