]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraphunit.c
[Ada] Improved support for aspect alignment in CCG
[thirdparty/gcc.git] / gcc / cgraphunit.c
1 /* Driver of optimization process
2 Copyright (C) 2003-2020 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimizations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transactional memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO streaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multiple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functions (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158 */
159
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "backend.h"
164 #include "target.h"
165 #include "rtl.h"
166 #include "tree.h"
167 #include "gimple.h"
168 #include "cfghooks.h"
169 #include "regset.h" /* FIXME: For reg_obstack. */
170 #include "alloc-pool.h"
171 #include "tree-pass.h"
172 #include "stringpool.h"
173 #include "gimple-ssa.h"
174 #include "cgraph.h"
175 #include "coverage.h"
176 #include "lto-streamer.h"
177 #include "fold-const.h"
178 #include "varasm.h"
179 #include "stor-layout.h"
180 #include "output.h"
181 #include "cfgcleanup.h"
182 #include "gimple-fold.h"
183 #include "gimplify.h"
184 #include "gimple-iterator.h"
185 #include "gimplify-me.h"
186 #include "tree-cfg.h"
187 #include "tree-into-ssa.h"
188 #include "tree-ssa.h"
189 #include "langhooks.h"
190 #include "toplev.h"
191 #include "debug.h"
192 #include "symbol-summary.h"
193 #include "tree-vrp.h"
194 #include "ipa-prop.h"
195 #include "gimple-pretty-print.h"
196 #include "plugin.h"
197 #include "ipa-fnsummary.h"
198 #include "ipa-utils.h"
199 #include "except.h"
200 #include "cfgloop.h"
201 #include "context.h"
202 #include "pass_manager.h"
203 #include "tree-nested.h"
204 #include "dbgcnt.h"
205 #include "lto-section-names.h"
206 #include "stringpool.h"
207 #include "attribs.h"
208 #include "ipa-inline.h"
209 #include "omp-offload.h"
210
211 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
212 secondary queue used during optimization to accommodate passes that
213 may generate new functions that need to be optimized and expanded. */
214 vec<cgraph_node *> cgraph_new_nodes;
215
216 static void expand_all_functions (void);
217 static void mark_functions_to_output (void);
218 static void handle_alias_pairs (void);
219
220 /* Used for vtable lookup in thunk adjusting. */
221 static GTY (()) tree vtable_entry_type;
222
223 /* Return true if this symbol is a function from the C frontend specified
224 directly in RTL form (with "__RTL"). */
225
226 bool
227 symtab_node::native_rtl_p () const
228 {
229 if (TREE_CODE (decl) != FUNCTION_DECL)
230 return false;
231 if (!DECL_STRUCT_FUNCTION (decl))
232 return false;
233 return DECL_STRUCT_FUNCTION (decl)->curr_properties & PROP_rtl;
234 }
235
236 /* Determine if symbol declaration is needed. That is, visible to something
237 either outside this translation unit, something magic in the system
238 configury */
239 bool
240 symtab_node::needed_p (void)
241 {
242 /* Double check that no one output the function into assembly file
243 early. */
244 if (!native_rtl_p ())
245 gcc_checking_assert
246 (!DECL_ASSEMBLER_NAME_SET_P (decl)
247 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
248
249 if (!definition)
250 return false;
251
252 if (DECL_EXTERNAL (decl))
253 return false;
254
255 /* If the user told us it is used, then it must be so. */
256 if (force_output)
257 return true;
258
259 /* ABI forced symbols are needed when they are external. */
260 if (forced_by_abi && TREE_PUBLIC (decl))
261 return true;
262
263 /* Keep constructors, destructors and virtual functions. */
264 if (TREE_CODE (decl) == FUNCTION_DECL
265 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
266 return true;
267
268 /* Externally visible variables must be output. The exception is
269 COMDAT variables that must be output only when they are needed. */
270 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
271 return true;
272
273 return false;
274 }
275
276 /* Head and terminator of the queue of nodes to be processed while building
277 callgraph. */
278
279 static symtab_node symtab_terminator (SYMTAB_SYMBOL);
280 static symtab_node *queued_nodes = &symtab_terminator;
281
282 /* Add NODE to queue starting at QUEUED_NODES.
283 The queue is linked via AUX pointers and terminated by pointer to 1. */
284
285 static void
286 enqueue_node (symtab_node *node)
287 {
288 if (node->aux)
289 return;
290 gcc_checking_assert (queued_nodes);
291 node->aux = queued_nodes;
292 queued_nodes = node;
293 }
294
295 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
296 functions into callgraph in a way so they look like ordinary reachable
297 functions inserted into callgraph already at construction time. */
298
299 void
300 symbol_table::process_new_functions (void)
301 {
302 tree fndecl;
303
304 if (!cgraph_new_nodes.exists ())
305 return;
306
307 handle_alias_pairs ();
308 /* Note that this queue may grow as its being processed, as the new
309 functions may generate new ones. */
310 for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
311 {
312 cgraph_node *node = cgraph_new_nodes[i];
313 fndecl = node->decl;
314 switch (state)
315 {
316 case CONSTRUCTION:
317 /* At construction time we just need to finalize function and move
318 it into reachable functions list. */
319
320 cgraph_node::finalize_function (fndecl, false);
321 call_cgraph_insertion_hooks (node);
322 enqueue_node (node);
323 break;
324
325 case IPA:
326 case IPA_SSA:
327 case IPA_SSA_AFTER_INLINING:
328 /* When IPA optimization already started, do all essential
329 transformations that has been already performed on the whole
330 cgraph but not on this function. */
331
332 gimple_register_cfg_hooks ();
333 if (!node->analyzed)
334 node->analyze ();
335 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
336 if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
337 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
338 {
339 bool summaried_computed = ipa_fn_summaries != NULL;
340 g->get_passes ()->execute_early_local_passes ();
341 /* Early passes compute inline parameters to do inlining
342 and splitting. This is redundant for functions added late.
343 Just throw away whatever it did. */
344 if (!summaried_computed)
345 {
346 ipa_free_fn_summary ();
347 ipa_free_size_summary ();
348 }
349 }
350 else if (ipa_fn_summaries != NULL)
351 compute_fn_summary (node, true);
352 free_dominance_info (CDI_POST_DOMINATORS);
353 free_dominance_info (CDI_DOMINATORS);
354 pop_cfun ();
355 call_cgraph_insertion_hooks (node);
356 break;
357
358 case EXPANSION:
359 /* Functions created during expansion shall be compiled
360 directly. */
361 node->process = 0;
362 call_cgraph_insertion_hooks (node);
363 node->expand ();
364 break;
365
366 default:
367 gcc_unreachable ();
368 break;
369 }
370 }
371
372 cgraph_new_nodes.release ();
373 }
374
375 /* As an GCC extension we allow redefinition of the function. The
376 semantics when both copies of bodies differ is not well defined.
377 We replace the old body with new body so in unit at a time mode
378 we always use new body, while in normal mode we may end up with
379 old body inlined into some functions and new body expanded and
380 inlined in others.
381
382 ??? It may make more sense to use one body for inlining and other
383 body for expanding the function but this is difficult to do. */
384
385 void
386 cgraph_node::reset (void)
387 {
388 /* If process is set, then we have already begun whole-unit analysis.
389 This is *not* testing for whether we've already emitted the function.
390 That case can be sort-of legitimately seen with real function redefinition
391 errors. I would argue that the front end should never present us with
392 such a case, but don't enforce that for now. */
393 gcc_assert (!process);
394
395 /* Reset our data structures so we can analyze the function again. */
396 inlined_to = NULL;
397 memset (&rtl, 0, sizeof (rtl));
398 analyzed = false;
399 definition = false;
400 alias = false;
401 transparent_alias = false;
402 weakref = false;
403 cpp_implicit_alias = false;
404
405 remove_callees ();
406 remove_all_references ();
407 }
408
409 /* Return true when there are references to the node. INCLUDE_SELF is
410 true if a self reference counts as a reference. */
411
412 bool
413 symtab_node::referred_to_p (bool include_self)
414 {
415 ipa_ref *ref = NULL;
416
417 /* See if there are any references at all. */
418 if (iterate_referring (0, ref))
419 return true;
420 /* For functions check also calls. */
421 cgraph_node *cn = dyn_cast <cgraph_node *> (this);
422 if (cn && cn->callers)
423 {
424 if (include_self)
425 return true;
426 for (cgraph_edge *e = cn->callers; e; e = e->next_caller)
427 if (e->caller != this)
428 return true;
429 }
430 return false;
431 }
432
433 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
434 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
435 the garbage collector run at the moment. We would need to either create
436 a new GC context, or just not compile right now. */
437
438 void
439 cgraph_node::finalize_function (tree decl, bool no_collect)
440 {
441 cgraph_node *node = cgraph_node::get_create (decl);
442
443 if (node->definition)
444 {
445 /* Nested functions should only be defined once. */
446 gcc_assert (!DECL_CONTEXT (decl)
447 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
448 node->reset ();
449 node->redefined_extern_inline = true;
450 }
451
452 /* Set definition first before calling notice_global_symbol so that
453 it is available to notice_global_symbol. */
454 node->definition = true;
455 notice_global_symbol (decl);
456 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
457 if (!flag_toplevel_reorder)
458 node->no_reorder = true;
459
460 /* With -fkeep-inline-functions we are keeping all inline functions except
461 for extern inline ones. */
462 if (flag_keep_inline_functions
463 && DECL_DECLARED_INLINE_P (decl)
464 && !DECL_EXTERNAL (decl)
465 && !DECL_DISREGARD_INLINE_LIMITS (decl))
466 node->force_output = 1;
467
468 /* __RTL functions were already output as soon as they were parsed (due
469 to the large amount of global state in the backend).
470 Mark such functions as "force_output" to reflect the fact that they
471 will be in the asm file when considering the symbols they reference.
472 The attempt to output them later on will bail out immediately. */
473 if (node->native_rtl_p ())
474 node->force_output = 1;
475
476 /* When not optimizing, also output the static functions. (see
477 PR24561), but don't do so for always_inline functions, functions
478 declared inline and nested functions. These were optimized out
479 in the original implementation and it is unclear whether we want
480 to change the behavior here. */
481 if (((!opt_for_fn (decl, optimize) || flag_keep_static_functions
482 || node->no_reorder)
483 && !node->cpp_implicit_alias
484 && !DECL_DISREGARD_INLINE_LIMITS (decl)
485 && !DECL_DECLARED_INLINE_P (decl)
486 && !(DECL_CONTEXT (decl)
487 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
488 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
489 node->force_output = 1;
490
491 /* If we've not yet emitted decl, tell the debug info about it. */
492 if (!TREE_ASM_WRITTEN (decl))
493 (*debug_hooks->deferred_inline_function) (decl);
494
495 if (!no_collect)
496 ggc_collect ();
497
498 if (symtab->state == CONSTRUCTION
499 && (node->needed_p () || node->referred_to_p ()))
500 enqueue_node (node);
501 }
502
503 /* Add the function FNDECL to the call graph.
504 Unlike finalize_function, this function is intended to be used
505 by middle end and allows insertion of new function at arbitrary point
506 of compilation. The function can be either in high, low or SSA form
507 GIMPLE.
508
509 The function is assumed to be reachable and have address taken (so no
510 API breaking optimizations are performed on it).
511
512 Main work done by this function is to enqueue the function for later
513 processing to avoid need the passes to be re-entrant. */
514
515 void
516 cgraph_node::add_new_function (tree fndecl, bool lowered)
517 {
518 gcc::pass_manager *passes = g->get_passes ();
519 cgraph_node *node;
520
521 if (dump_file)
522 {
523 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
524 const char *function_type = ((gimple_has_body_p (fndecl))
525 ? (lowered
526 ? (gimple_in_ssa_p (fn)
527 ? "ssa gimple"
528 : "low gimple")
529 : "high gimple")
530 : "to-be-gimplified");
531 fprintf (dump_file,
532 "Added new %s function %s to callgraph\n",
533 function_type,
534 fndecl_name (fndecl));
535 }
536
537 switch (symtab->state)
538 {
539 case PARSING:
540 cgraph_node::finalize_function (fndecl, false);
541 break;
542 case CONSTRUCTION:
543 /* Just enqueue function to be processed at nearest occurrence. */
544 node = cgraph_node::get_create (fndecl);
545 if (lowered)
546 node->lowered = true;
547 cgraph_new_nodes.safe_push (node);
548 break;
549
550 case IPA:
551 case IPA_SSA:
552 case IPA_SSA_AFTER_INLINING:
553 case EXPANSION:
554 /* Bring the function into finalized state and enqueue for later
555 analyzing and compilation. */
556 node = cgraph_node::get_create (fndecl);
557 node->local = false;
558 node->definition = true;
559 node->force_output = true;
560 if (TREE_PUBLIC (fndecl))
561 node->externally_visible = true;
562 if (!lowered && symtab->state == EXPANSION)
563 {
564 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
565 gimple_register_cfg_hooks ();
566 bitmap_obstack_initialize (NULL);
567 execute_pass_list (cfun, passes->all_lowering_passes);
568 passes->execute_early_local_passes ();
569 bitmap_obstack_release (NULL);
570 pop_cfun ();
571
572 lowered = true;
573 }
574 if (lowered)
575 node->lowered = true;
576 cgraph_new_nodes.safe_push (node);
577 break;
578
579 case FINISHED:
580 /* At the very end of compilation we have to do all the work up
581 to expansion. */
582 node = cgraph_node::create (fndecl);
583 if (lowered)
584 node->lowered = true;
585 node->definition = true;
586 node->analyze ();
587 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
588 gimple_register_cfg_hooks ();
589 bitmap_obstack_initialize (NULL);
590 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
591 g->get_passes ()->execute_early_local_passes ();
592 bitmap_obstack_release (NULL);
593 pop_cfun ();
594 node->expand ();
595 break;
596
597 default:
598 gcc_unreachable ();
599 }
600
601 /* Set a personality if required and we already passed EH lowering. */
602 if (lowered
603 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
604 == eh_personality_lang))
605 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
606 }
607
608 /* Analyze the function scheduled to be output. */
609 void
610 cgraph_node::analyze (void)
611 {
612 if (native_rtl_p ())
613 {
614 analyzed = true;
615 return;
616 }
617
618 tree decl = this->decl;
619 location_t saved_loc = input_location;
620 input_location = DECL_SOURCE_LOCATION (decl);
621
622 if (thunk.thunk_p)
623 {
624 cgraph_node *t = cgraph_node::get (thunk.alias);
625
626 create_edge (t, NULL, t->count);
627 callees->can_throw_external = !TREE_NOTHROW (t->decl);
628 /* Target code in expand_thunk may need the thunk's target
629 to be analyzed, so recurse here. */
630 if (!t->analyzed && t->definition)
631 t->analyze ();
632 if (t->alias)
633 {
634 t = t->get_alias_target ();
635 if (!t->analyzed && t->definition)
636 t->analyze ();
637 }
638 bool ret = expand_thunk (false, false);
639 thunk.alias = NULL;
640 if (!ret)
641 return;
642 }
643 if (alias)
644 resolve_alias (cgraph_node::get (alias_target), transparent_alias);
645 else if (dispatcher_function)
646 {
647 /* Generate the dispatcher body of multi-versioned functions. */
648 cgraph_function_version_info *dispatcher_version_info
649 = function_version ();
650 if (dispatcher_version_info != NULL
651 && (dispatcher_version_info->dispatcher_resolver
652 == NULL_TREE))
653 {
654 tree resolver = NULL_TREE;
655 gcc_assert (targetm.generate_version_dispatcher_body);
656 resolver = targetm.generate_version_dispatcher_body (this);
657 gcc_assert (resolver != NULL_TREE);
658 }
659 }
660 else
661 {
662 push_cfun (DECL_STRUCT_FUNCTION (decl));
663
664 assign_assembler_name_if_needed (decl);
665
666 /* Make sure to gimplify bodies only once. During analyzing a
667 function we lower it, which will require gimplified nested
668 functions, so we can end up here with an already gimplified
669 body. */
670 if (!gimple_has_body_p (decl))
671 gimplify_function_tree (decl);
672
673 /* Lower the function. */
674 if (!lowered)
675 {
676 if (nested)
677 lower_nested_functions (decl);
678 gcc_assert (!nested);
679
680 gimple_register_cfg_hooks ();
681 bitmap_obstack_initialize (NULL);
682 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
683 free_dominance_info (CDI_POST_DOMINATORS);
684 free_dominance_info (CDI_DOMINATORS);
685 compact_blocks ();
686 bitmap_obstack_release (NULL);
687 lowered = true;
688 }
689
690 pop_cfun ();
691 }
692 analyzed = true;
693
694 input_location = saved_loc;
695 }
696
697 /* C++ frontend produce same body aliases all over the place, even before PCH
698 gets streamed out. It relies on us linking the aliases with their function
699 in order to do the fixups, but ipa-ref is not PCH safe. Consequently we
700 first produce aliases without links, but once C++ FE is sure he won't stream
701 PCH we build the links via this function. */
702
703 void
704 symbol_table::process_same_body_aliases (void)
705 {
706 symtab_node *node;
707 FOR_EACH_SYMBOL (node)
708 if (node->cpp_implicit_alias && !node->analyzed)
709 node->resolve_alias
710 (VAR_P (node->alias_target)
711 ? (symtab_node *)varpool_node::get_create (node->alias_target)
712 : (symtab_node *)cgraph_node::get_create (node->alias_target));
713 cpp_implicit_aliases_done = true;
714 }
715
716 /* Process a symver attribute. */
717
718 static void
719 process_symver_attribute (symtab_node *n)
720 {
721 tree value = lookup_attribute ("symver", DECL_ATTRIBUTES (n->decl));
722
723 if (!value)
724 return;
725 if (lookup_attribute ("symver", TREE_CHAIN (value)))
726 {
727 error_at (DECL_SOURCE_LOCATION (n->decl),
728 "multiple versions for one symbol");
729 return;
730 }
731 tree symver = get_identifier_with_length
732 (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (value))),
733 TREE_STRING_LENGTH (TREE_VALUE (TREE_VALUE (value))));
734 symtab_node *def = symtab_node::get_for_asmname (symver);
735
736 if (def)
737 {
738 error_at (DECL_SOURCE_LOCATION (n->decl),
739 "duplicate definition of a symbol version");
740 inform (DECL_SOURCE_LOCATION (def->decl),
741 "same version was previously defined here");
742 return;
743 }
744 if (!n->definition)
745 {
746 error_at (DECL_SOURCE_LOCATION (n->decl),
747 "symbol needs to be defined to have a version");
748 return;
749 }
750 if (DECL_COMMON (n->decl))
751 {
752 error_at (DECL_SOURCE_LOCATION (n->decl),
753 "common symbol cannot be versioned");
754 return;
755 }
756 if (DECL_COMDAT (n->decl))
757 {
758 error_at (DECL_SOURCE_LOCATION (n->decl),
759 "comdat symbol cannot be versioned");
760 return;
761 }
762 if (n->weakref)
763 {
764 error_at (DECL_SOURCE_LOCATION (n->decl),
765 "weakref cannot be versioned");
766 return;
767 }
768 if (!TREE_PUBLIC (n->decl))
769 {
770 error_at (DECL_SOURCE_LOCATION (n->decl),
771 "versioned symbol must be public");
772 return;
773 }
774 if (DECL_VISIBILITY (n->decl) != VISIBILITY_DEFAULT)
775 {
776 error_at (DECL_SOURCE_LOCATION (n->decl),
777 "versioned symbol must have default visibility");
778 return;
779 }
780
781 /* Create new symbol table entry representing the version. */
782 tree new_decl = copy_node (n->decl);
783
784 DECL_INITIAL (new_decl) = NULL_TREE;
785 if (TREE_CODE (new_decl) == FUNCTION_DECL)
786 DECL_STRUCT_FUNCTION (new_decl) = NULL;
787 SET_DECL_ASSEMBLER_NAME (new_decl, symver);
788 TREE_PUBLIC (new_decl) = 1;
789 DECL_ATTRIBUTES (new_decl) = NULL;
790
791 symtab_node *symver_node = symtab_node::get_create (new_decl);
792 symver_node->alias = true;
793 symver_node->definition = true;
794 symver_node->symver = true;
795 symver_node->create_reference (n, IPA_REF_ALIAS, NULL);
796 symver_node->analyzed = true;
797 }
798
799 /* Process attributes common for vars and functions. */
800
801 static void
802 process_common_attributes (symtab_node *node, tree decl)
803 {
804 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
805
806 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
807 {
808 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
809 "%<weakref%> attribute should be accompanied with"
810 " an %<alias%> attribute");
811 DECL_WEAK (decl) = 0;
812 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
813 DECL_ATTRIBUTES (decl));
814 }
815
816 if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
817 node->no_reorder = 1;
818 process_symver_attribute (node);
819 }
820
821 /* Look for externally_visible and used attributes and mark cgraph nodes
822 accordingly.
823
824 We cannot mark the nodes at the point the attributes are processed (in
825 handle_*_attribute) because the copy of the declarations available at that
826 point may not be canonical. For example, in:
827
828 void f();
829 void f() __attribute__((used));
830
831 the declaration we see in handle_used_attribute will be the second
832 declaration -- but the front end will subsequently merge that declaration
833 with the original declaration and discard the second declaration.
834
835 Furthermore, we can't mark these nodes in finalize_function because:
836
837 void f() {}
838 void f() __attribute__((externally_visible));
839
840 is valid.
841
842 So, we walk the nodes at the end of the translation unit, applying the
843 attributes at that point. */
844
845 static void
846 process_function_and_variable_attributes (cgraph_node *first,
847 varpool_node *first_var)
848 {
849 cgraph_node *node;
850 varpool_node *vnode;
851
852 for (node = symtab->first_function (); node != first;
853 node = symtab->next_function (node))
854 {
855 tree decl = node->decl;
856
857 if (node->alias
858 && lookup_attribute ("flatten", DECL_ATTRIBUTES (decl)))
859 {
860 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
861 "%<flatten%> attribute is ignored on aliases");
862 }
863 if (DECL_PRESERVE_P (decl))
864 node->mark_force_output ();
865 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
866 {
867 if (! TREE_PUBLIC (node->decl))
868 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
869 "%<externally_visible%>"
870 " attribute have effect only on public objects");
871 }
872 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
873 && node->definition
874 && (!node->alias || DECL_INITIAL (decl) != error_mark_node))
875 {
876 /* NODE->DEFINITION && NODE->ALIAS is nonzero for valid weakref
877 function declarations; DECL_INITIAL is non-null for invalid
878 weakref functions that are also defined. */
879 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
880 "%<weakref%> attribute ignored"
881 " because function is defined");
882 DECL_WEAK (decl) = 0;
883 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
884 DECL_ATTRIBUTES (decl));
885 DECL_ATTRIBUTES (decl) = remove_attribute ("alias",
886 DECL_ATTRIBUTES (decl));
887 node->alias = false;
888 node->weakref = false;
889 node->transparent_alias = false;
890 }
891 else if (lookup_attribute ("alias", DECL_ATTRIBUTES (decl))
892 && node->definition
893 && !node->alias)
894 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
895 "%<alias%> attribute ignored"
896 " because function is defined");
897
898 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
899 && !DECL_DECLARED_INLINE_P (decl)
900 /* redefining extern inline function makes it DECL_UNINLINABLE. */
901 && !DECL_UNINLINABLE (decl))
902 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
903 "%<always_inline%> function might not be inlinable");
904
905 process_common_attributes (node, decl);
906 }
907 for (vnode = symtab->first_variable (); vnode != first_var;
908 vnode = symtab->next_variable (vnode))
909 {
910 tree decl = vnode->decl;
911 if (DECL_EXTERNAL (decl)
912 && DECL_INITIAL (decl))
913 varpool_node::finalize_decl (decl);
914 if (DECL_PRESERVE_P (decl))
915 vnode->force_output = true;
916 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
917 {
918 if (! TREE_PUBLIC (vnode->decl))
919 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
920 "%<externally_visible%>"
921 " attribute have effect only on public objects");
922 }
923 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
924 && vnode->definition
925 && DECL_INITIAL (decl))
926 {
927 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
928 "%<weakref%> attribute ignored"
929 " because variable is initialized");
930 DECL_WEAK (decl) = 0;
931 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
932 DECL_ATTRIBUTES (decl));
933 }
934 process_common_attributes (vnode, decl);
935 }
936 }
937
938 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
939 middle end to output the variable to asm file, if needed or externally
940 visible. */
941
942 void
943 varpool_node::finalize_decl (tree decl)
944 {
945 varpool_node *node = varpool_node::get_create (decl);
946
947 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
948
949 if (node->definition)
950 return;
951 /* Set definition first before calling notice_global_symbol so that
952 it is available to notice_global_symbol. */
953 node->definition = true;
954 notice_global_symbol (decl);
955 if (!flag_toplevel_reorder)
956 node->no_reorder = true;
957 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
958 /* Traditionally we do not eliminate static variables when not
959 optimizing and when not doing toplevel reorder. */
960 || (node->no_reorder && !DECL_COMDAT (node->decl)
961 && !DECL_ARTIFICIAL (node->decl)))
962 node->force_output = true;
963
964 if (symtab->state == CONSTRUCTION
965 && (node->needed_p () || node->referred_to_p ()))
966 enqueue_node (node);
967 if (symtab->state >= IPA_SSA)
968 node->analyze ();
969 /* Some frontends produce various interface variables after compilation
970 finished. */
971 if (symtab->state == FINISHED
972 || (node->no_reorder
973 && symtab->state == EXPANSION))
974 node->assemble_decl ();
975 }
976
977 /* EDGE is an polymorphic call. Mark all possible targets as reachable
978 and if there is only one target, perform trivial devirtualization.
979 REACHABLE_CALL_TARGETS collects target lists we already walked to
980 avoid duplicate work. */
981
982 static void
983 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
984 cgraph_edge *edge)
985 {
986 unsigned int i;
987 void *cache_token;
988 bool final;
989 vec <cgraph_node *>targets
990 = possible_polymorphic_call_targets
991 (edge, &final, &cache_token);
992
993 if (!reachable_call_targets->add (cache_token))
994 {
995 if (symtab->dump_file)
996 dump_possible_polymorphic_call_targets
997 (symtab->dump_file, edge);
998
999 for (i = 0; i < targets.length (); i++)
1000 {
1001 /* Do not bother to mark virtual methods in anonymous namespace;
1002 either we will find use of virtual table defining it, or it is
1003 unused. */
1004 if (targets[i]->definition
1005 && TREE_CODE
1006 (TREE_TYPE (targets[i]->decl))
1007 == METHOD_TYPE
1008 && !type_in_anonymous_namespace_p
1009 (TYPE_METHOD_BASETYPE (TREE_TYPE (targets[i]->decl))))
1010 enqueue_node (targets[i]);
1011 }
1012 }
1013
1014 /* Very trivial devirtualization; when the type is
1015 final or anonymous (so we know all its derivation)
1016 and there is only one possible virtual call target,
1017 make the edge direct. */
1018 if (final)
1019 {
1020 if (targets.length () <= 1 && dbg_cnt (devirt))
1021 {
1022 cgraph_node *target;
1023 if (targets.length () == 1)
1024 target = targets[0];
1025 else
1026 target = cgraph_node::create
1027 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
1028
1029 if (symtab->dump_file)
1030 {
1031 fprintf (symtab->dump_file,
1032 "Devirtualizing call: ");
1033 print_gimple_stmt (symtab->dump_file,
1034 edge->call_stmt, 0,
1035 TDF_SLIM);
1036 }
1037 if (dump_enabled_p ())
1038 {
1039 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, edge->call_stmt,
1040 "devirtualizing call in %s to %s\n",
1041 edge->caller->dump_name (),
1042 target->dump_name ());
1043 }
1044
1045 edge = cgraph_edge::make_direct (edge, target);
1046 gimple *new_call = cgraph_edge::redirect_call_stmt_to_callee (edge);
1047
1048 if (symtab->dump_file)
1049 {
1050 fprintf (symtab->dump_file, "Devirtualized as: ");
1051 print_gimple_stmt (symtab->dump_file, new_call, 0, TDF_SLIM);
1052 }
1053 }
1054 }
1055 }
1056
1057 /* Issue appropriate warnings for the global declaration DECL. */
1058
1059 static void
1060 check_global_declaration (symtab_node *snode)
1061 {
1062 const char *decl_file;
1063 tree decl = snode->decl;
1064
1065 /* Warn about any function declared static but not defined. We don't
1066 warn about variables, because many programs have static variables
1067 that exist only to get some text into the object file. */
1068 if (TREE_CODE (decl) == FUNCTION_DECL
1069 && DECL_INITIAL (decl) == 0
1070 && DECL_EXTERNAL (decl)
1071 && ! DECL_ARTIFICIAL (decl)
1072 && ! TREE_PUBLIC (decl))
1073 {
1074 if (TREE_NO_WARNING (decl))
1075 ;
1076 else if (snode->referred_to_p (/*include_self=*/false))
1077 pedwarn (input_location, 0, "%q+F used but never defined", decl);
1078 else
1079 warning (OPT_Wunused_function, "%q+F declared %<static%> but never "
1080 "defined", decl);
1081 /* This symbol is effectively an "extern" declaration now. */
1082 TREE_PUBLIC (decl) = 1;
1083 }
1084
1085 /* Warn about static fns or vars defined but not used. */
1086 if (((warn_unused_function && TREE_CODE (decl) == FUNCTION_DECL)
1087 || (((warn_unused_variable && ! TREE_READONLY (decl))
1088 || (warn_unused_const_variable > 0 && TREE_READONLY (decl)
1089 && (warn_unused_const_variable == 2
1090 || (main_input_filename != NULL
1091 && (decl_file = DECL_SOURCE_FILE (decl)) != NULL
1092 && filename_cmp (main_input_filename,
1093 decl_file) == 0))))
1094 && VAR_P (decl)))
1095 && ! DECL_IN_SYSTEM_HEADER (decl)
1096 && ! snode->referred_to_p (/*include_self=*/false)
1097 /* This TREE_USED check is needed in addition to referred_to_p
1098 above, because the `__unused__' attribute is not being
1099 considered for referred_to_p. */
1100 && ! TREE_USED (decl)
1101 /* The TREE_USED bit for file-scope decls is kept in the identifier,
1102 to handle multiple external decls in different scopes. */
1103 && ! (DECL_NAME (decl) && TREE_USED (DECL_NAME (decl)))
1104 && ! DECL_EXTERNAL (decl)
1105 && ! DECL_ARTIFICIAL (decl)
1106 && ! DECL_ABSTRACT_ORIGIN (decl)
1107 && ! TREE_PUBLIC (decl)
1108 /* A volatile variable might be used in some non-obvious way. */
1109 && (! VAR_P (decl) || ! TREE_THIS_VOLATILE (decl))
1110 /* Global register variables must be declared to reserve them. */
1111 && ! (VAR_P (decl) && DECL_REGISTER (decl))
1112 /* Global ctors and dtors are called by the runtime. */
1113 && (TREE_CODE (decl) != FUNCTION_DECL
1114 || (!DECL_STATIC_CONSTRUCTOR (decl)
1115 && !DECL_STATIC_DESTRUCTOR (decl)))
1116 /* Otherwise, ask the language. */
1117 && lang_hooks.decls.warn_unused_global (decl))
1118 warning_at (DECL_SOURCE_LOCATION (decl),
1119 (TREE_CODE (decl) == FUNCTION_DECL)
1120 ? OPT_Wunused_function
1121 : (TREE_READONLY (decl)
1122 ? OPT_Wunused_const_variable_
1123 : OPT_Wunused_variable),
1124 "%qD defined but not used", decl);
1125 }
1126
1127 /* Discover all functions and variables that are trivially needed, analyze
1128 them as well as all functions and variables referred by them */
1129 static cgraph_node *first_analyzed;
1130 static varpool_node *first_analyzed_var;
1131
1132 /* FIRST_TIME is set to TRUE for the first time we are called for a
1133 translation unit from finalize_compilation_unit() or false
1134 otherwise. */
1135
1136 static void
1137 analyze_functions (bool first_time)
1138 {
1139 /* Keep track of already processed nodes when called multiple times for
1140 intermodule optimization. */
1141 cgraph_node *first_handled = first_analyzed;
1142 varpool_node *first_handled_var = first_analyzed_var;
1143 hash_set<void *> reachable_call_targets;
1144
1145 symtab_node *node;
1146 symtab_node *next;
1147 int i;
1148 ipa_ref *ref;
1149 bool changed = true;
1150 location_t saved_loc = input_location;
1151
1152 bitmap_obstack_initialize (NULL);
1153 symtab->state = CONSTRUCTION;
1154 input_location = UNKNOWN_LOCATION;
1155
1156 /* Ugly, but the fixup cannot happen at a time same body alias is created;
1157 C++ FE is confused about the COMDAT groups being right. */
1158 if (symtab->cpp_implicit_aliases_done)
1159 FOR_EACH_SYMBOL (node)
1160 if (node->cpp_implicit_alias)
1161 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
1162 build_type_inheritance_graph ();
1163
1164 if (flag_openmp && first_time)
1165 omp_discover_implicit_declare_target ();
1166
1167 /* Analysis adds static variables that in turn adds references to new functions.
1168 So we need to iterate the process until it stabilize. */
1169 while (changed)
1170 {
1171 changed = false;
1172 process_function_and_variable_attributes (first_analyzed,
1173 first_analyzed_var);
1174
1175 /* First identify the trivially needed symbols. */
1176 for (node = symtab->first_symbol ();
1177 node != first_analyzed
1178 && node != first_analyzed_var; node = node->next)
1179 {
1180 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
1181 node->get_comdat_group_id ();
1182 if (node->needed_p ())
1183 {
1184 enqueue_node (node);
1185 if (!changed && symtab->dump_file)
1186 fprintf (symtab->dump_file, "Trivially needed symbols:");
1187 changed = true;
1188 if (symtab->dump_file)
1189 fprintf (symtab->dump_file, " %s", node->dump_asm_name ());
1190 if (!changed && symtab->dump_file)
1191 fprintf (symtab->dump_file, "\n");
1192 }
1193 if (node == first_analyzed
1194 || node == first_analyzed_var)
1195 break;
1196 }
1197 symtab->process_new_functions ();
1198 first_analyzed_var = symtab->first_variable ();
1199 first_analyzed = symtab->first_function ();
1200
1201 if (changed && symtab->dump_file)
1202 fprintf (symtab->dump_file, "\n");
1203
1204 /* Lower representation, build callgraph edges and references for all trivially
1205 needed symbols and all symbols referred by them. */
1206 while (queued_nodes != &symtab_terminator)
1207 {
1208 changed = true;
1209 node = queued_nodes;
1210 queued_nodes = (symtab_node *)queued_nodes->aux;
1211 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1212 if (cnode && cnode->definition)
1213 {
1214 cgraph_edge *edge;
1215 tree decl = cnode->decl;
1216
1217 /* ??? It is possible to create extern inline function
1218 and later using weak alias attribute to kill its body.
1219 See gcc.c-torture/compile/20011119-1.c */
1220 if (!DECL_STRUCT_FUNCTION (decl)
1221 && !cnode->alias
1222 && !cnode->thunk.thunk_p
1223 && !cnode->dispatcher_function)
1224 {
1225 cnode->reset ();
1226 cnode->redefined_extern_inline = true;
1227 continue;
1228 }
1229
1230 if (!cnode->analyzed)
1231 cnode->analyze ();
1232
1233 for (edge = cnode->callees; edge; edge = edge->next_callee)
1234 if (edge->callee->definition
1235 && (!DECL_EXTERNAL (edge->callee->decl)
1236 /* When not optimizing, do not try to analyze extern
1237 inline functions. Doing so is pointless. */
1238 || opt_for_fn (edge->callee->decl, optimize)
1239 /* Weakrefs needs to be preserved. */
1240 || edge->callee->alias
1241 /* always_inline functions are inlined even at -O0. */
1242 || lookup_attribute
1243 ("always_inline",
1244 DECL_ATTRIBUTES (edge->callee->decl))
1245 /* Multiversioned functions needs the dispatcher to
1246 be produced locally even for extern functions. */
1247 || edge->callee->function_version ()))
1248 enqueue_node (edge->callee);
1249 if (opt_for_fn (cnode->decl, optimize)
1250 && opt_for_fn (cnode->decl, flag_devirtualize))
1251 {
1252 cgraph_edge *next;
1253
1254 for (edge = cnode->indirect_calls; edge; edge = next)
1255 {
1256 next = edge->next_callee;
1257 if (edge->indirect_info->polymorphic)
1258 walk_polymorphic_call_targets (&reachable_call_targets,
1259 edge);
1260 }
1261 }
1262
1263 /* If decl is a clone of an abstract function,
1264 mark that abstract function so that we don't release its body.
1265 The DECL_INITIAL() of that abstract function declaration
1266 will be later needed to output debug info. */
1267 if (DECL_ABSTRACT_ORIGIN (decl))
1268 {
1269 cgraph_node *origin_node
1270 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1271 origin_node->used_as_abstract_origin = true;
1272 }
1273 /* Preserve a functions function context node. It will
1274 later be needed to output debug info. */
1275 if (tree fn = decl_function_context (decl))
1276 {
1277 cgraph_node *origin_node = cgraph_node::get_create (fn);
1278 enqueue_node (origin_node);
1279 }
1280 }
1281 else
1282 {
1283 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1284 if (vnode && vnode->definition && !vnode->analyzed)
1285 vnode->analyze ();
1286 }
1287
1288 if (node->same_comdat_group)
1289 {
1290 symtab_node *next;
1291 for (next = node->same_comdat_group;
1292 next != node;
1293 next = next->same_comdat_group)
1294 if (!next->comdat_local_p ())
1295 enqueue_node (next);
1296 }
1297 for (i = 0; node->iterate_reference (i, ref); i++)
1298 if (ref->referred->definition
1299 && (!DECL_EXTERNAL (ref->referred->decl)
1300 || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
1301 && optimize)
1302 || (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
1303 && opt_for_fn (ref->referred->decl, optimize))
1304 || node->alias
1305 || ref->referred->alias)))
1306 enqueue_node (ref->referred);
1307 symtab->process_new_functions ();
1308 }
1309 }
1310 update_type_inheritance_graph ();
1311
1312 /* Collect entry points to the unit. */
1313 if (symtab->dump_file)
1314 {
1315 fprintf (symtab->dump_file, "\n\nInitial ");
1316 symtab->dump (symtab->dump_file);
1317 }
1318
1319 if (first_time)
1320 {
1321 symtab_node *snode;
1322 FOR_EACH_SYMBOL (snode)
1323 check_global_declaration (snode);
1324 }
1325
1326 if (symtab->dump_file)
1327 fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1328
1329 for (node = symtab->first_symbol ();
1330 node != first_handled
1331 && node != first_handled_var; node = next)
1332 {
1333 next = node->next;
1334 /* For symbols declared locally we clear TREE_READONLY when emitting
1335 the constructor (if one is needed). For external declarations we can
1336 not safely assume that the type is readonly because we may be called
1337 during its construction. */
1338 if (TREE_CODE (node->decl) == VAR_DECL
1339 && TYPE_P (TREE_TYPE (node->decl))
1340 && TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (node->decl))
1341 && DECL_EXTERNAL (node->decl))
1342 TREE_READONLY (node->decl) = 0;
1343 if (!node->aux && !node->referred_to_p ())
1344 {
1345 if (symtab->dump_file)
1346 fprintf (symtab->dump_file, " %s", node->dump_name ());
1347
1348 /* See if the debugger can use anything before the DECL
1349 passes away. Perhaps it can notice a DECL that is now a
1350 constant and can tag the early DIE with an appropriate
1351 attribute.
1352
1353 Otherwise, this is the last chance the debug_hooks have
1354 at looking at optimized away DECLs, since
1355 late_global_decl will subsequently be called from the
1356 contents of the now pruned symbol table. */
1357 if (VAR_P (node->decl)
1358 && !decl_function_context (node->decl))
1359 {
1360 /* We are reclaiming totally unreachable code and variables
1361 so they effectively appear as readonly. Show that to
1362 the debug machinery. */
1363 TREE_READONLY (node->decl) = 1;
1364 node->definition = false;
1365 (*debug_hooks->late_global_decl) (node->decl);
1366 }
1367
1368 node->remove ();
1369 continue;
1370 }
1371 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1372 {
1373 tree decl = node->decl;
1374
1375 if (cnode->definition && !gimple_has_body_p (decl)
1376 && !cnode->alias
1377 && !cnode->thunk.thunk_p)
1378 cnode->reset ();
1379
1380 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1381 || cnode->alias
1382 || gimple_has_body_p (decl)
1383 || cnode->native_rtl_p ());
1384 gcc_assert (cnode->analyzed == cnode->definition);
1385 }
1386 node->aux = NULL;
1387 }
1388 for (;node; node = node->next)
1389 node->aux = NULL;
1390 first_analyzed = symtab->first_function ();
1391 first_analyzed_var = symtab->first_variable ();
1392 if (symtab->dump_file)
1393 {
1394 fprintf (symtab->dump_file, "\n\nReclaimed ");
1395 symtab->dump (symtab->dump_file);
1396 }
1397 bitmap_obstack_release (NULL);
1398 ggc_collect ();
1399 /* Initialize assembler name hash, in particular we want to trigger C++
1400 mangling and same body alias creation before we free DECL_ARGUMENTS
1401 used by it. */
1402 if (!seen_error ())
1403 symtab->symtab_initialize_asm_name_hash ();
1404
1405 input_location = saved_loc;
1406 }
1407
1408 /* Check declaration of the type of ALIAS for compatibility with its TARGET
1409 (which may be an ifunc resolver) and issue a diagnostic when they are
1410 not compatible according to language rules (plus a C++ extension for
1411 non-static member functions). */
1412
1413 static void
1414 maybe_diag_incompatible_alias (tree alias, tree target)
1415 {
1416 tree altype = TREE_TYPE (alias);
1417 tree targtype = TREE_TYPE (target);
1418
1419 bool ifunc = cgraph_node::get (alias)->ifunc_resolver;
1420 tree funcptr = altype;
1421
1422 if (ifunc)
1423 {
1424 /* Handle attribute ifunc first. */
1425 if (TREE_CODE (altype) == METHOD_TYPE)
1426 {
1427 /* Set FUNCPTR to the type of the alias target. If the type
1428 is a non-static member function of class C, construct a type
1429 of an ordinary function taking C* as the first argument,
1430 followed by the member function argument list, and use it
1431 instead to check for incompatibility. This conversion is
1432 not defined by the language but an extension provided by
1433 G++. */
1434
1435 tree rettype = TREE_TYPE (altype);
1436 tree args = TYPE_ARG_TYPES (altype);
1437 altype = build_function_type (rettype, args);
1438 funcptr = altype;
1439 }
1440
1441 targtype = TREE_TYPE (targtype);
1442
1443 if (POINTER_TYPE_P (targtype))
1444 {
1445 targtype = TREE_TYPE (targtype);
1446
1447 /* Only issue Wattribute-alias for conversions to void* with
1448 -Wextra. */
1449 if (VOID_TYPE_P (targtype) && !extra_warnings)
1450 return;
1451
1452 /* Proceed to handle incompatible ifunc resolvers below. */
1453 }
1454 else
1455 {
1456 funcptr = build_pointer_type (funcptr);
1457
1458 error_at (DECL_SOURCE_LOCATION (target),
1459 "%<ifunc%> resolver for %qD must return %qT",
1460 alias, funcptr);
1461 inform (DECL_SOURCE_LOCATION (alias),
1462 "resolver indirect function declared here");
1463 return;
1464 }
1465 }
1466
1467 if ((!FUNC_OR_METHOD_TYPE_P (targtype)
1468 || (prototype_p (altype)
1469 && prototype_p (targtype)
1470 && !types_compatible_p (altype, targtype))))
1471 {
1472 /* Warn for incompatibilities. Avoid warning for functions
1473 without a prototype to make it possible to declare aliases
1474 without knowing the exact type, as libstdc++ does. */
1475 if (ifunc)
1476 {
1477 funcptr = build_pointer_type (funcptr);
1478
1479 auto_diagnostic_group d;
1480 if (warning_at (DECL_SOURCE_LOCATION (target),
1481 OPT_Wattribute_alias_,
1482 "%<ifunc%> resolver for %qD should return %qT",
1483 alias, funcptr))
1484 inform (DECL_SOURCE_LOCATION (alias),
1485 "resolver indirect function declared here");
1486 }
1487 else
1488 {
1489 auto_diagnostic_group d;
1490 if (warning_at (DECL_SOURCE_LOCATION (alias),
1491 OPT_Wattribute_alias_,
1492 "%qD alias between functions of incompatible "
1493 "types %qT and %qT", alias, altype, targtype))
1494 inform (DECL_SOURCE_LOCATION (target),
1495 "aliased declaration here");
1496 }
1497 }
1498 }
1499
1500 /* Translate the ugly representation of aliases as alias pairs into nice
1501 representation in callgraph. We don't handle all cases yet,
1502 unfortunately. */
1503
1504 static void
1505 handle_alias_pairs (void)
1506 {
1507 alias_pair *p;
1508 unsigned i;
1509
1510 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1511 {
1512 symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1513
1514 /* Weakrefs with target not defined in current unit are easy to handle:
1515 they behave just as external variables except we need to note the
1516 alias flag to later output the weakref pseudo op into asm file. */
1517 if (!target_node
1518 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1519 {
1520 symtab_node *node = symtab_node::get (p->decl);
1521 if (node)
1522 {
1523 node->alias_target = p->target;
1524 node->weakref = true;
1525 node->alias = true;
1526 node->transparent_alias = true;
1527 }
1528 alias_pairs->unordered_remove (i);
1529 continue;
1530 }
1531 else if (!target_node)
1532 {
1533 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1534 symtab_node *node = symtab_node::get (p->decl);
1535 if (node)
1536 node->alias = false;
1537 alias_pairs->unordered_remove (i);
1538 continue;
1539 }
1540
1541 if (DECL_EXTERNAL (target_node->decl)
1542 /* We use local aliases for C++ thunks to force the tailcall
1543 to bind locally. This is a hack - to keep it working do
1544 the following (which is not strictly correct). */
1545 && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1546 || ! DECL_VIRTUAL_P (target_node->decl))
1547 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1548 {
1549 error ("%q+D aliased to external symbol %qE",
1550 p->decl, p->target);
1551 }
1552
1553 if (TREE_CODE (p->decl) == FUNCTION_DECL
1554 && target_node && is_a <cgraph_node *> (target_node))
1555 {
1556 maybe_diag_incompatible_alias (p->decl, target_node->decl);
1557
1558 maybe_diag_alias_attributes (p->decl, target_node->decl);
1559
1560 cgraph_node *src_node = cgraph_node::get (p->decl);
1561 if (src_node && src_node->definition)
1562 src_node->reset ();
1563 cgraph_node::create_alias (p->decl, target_node->decl);
1564 alias_pairs->unordered_remove (i);
1565 }
1566 else if (VAR_P (p->decl)
1567 && target_node && is_a <varpool_node *> (target_node))
1568 {
1569 varpool_node::create_alias (p->decl, target_node->decl);
1570 alias_pairs->unordered_remove (i);
1571 }
1572 else
1573 {
1574 error ("%q+D alias between function and variable is not supported",
1575 p->decl);
1576 inform (DECL_SOURCE_LOCATION (target_node->decl),
1577 "aliased declaration here");
1578
1579 alias_pairs->unordered_remove (i);
1580 }
1581 }
1582 vec_free (alias_pairs);
1583 }
1584
1585
1586 /* Figure out what functions we want to assemble. */
1587
1588 static void
1589 mark_functions_to_output (void)
1590 {
1591 bool check_same_comdat_groups = false;
1592 cgraph_node *node;
1593
1594 if (flag_checking)
1595 FOR_EACH_FUNCTION (node)
1596 gcc_assert (!node->process);
1597
1598 FOR_EACH_FUNCTION (node)
1599 {
1600 tree decl = node->decl;
1601
1602 gcc_assert (!node->process || node->same_comdat_group);
1603 if (node->process)
1604 continue;
1605
1606 /* We need to output all local functions that are used and not
1607 always inlined, as well as those that are reachable from
1608 outside the current compilation unit. */
1609 if (node->analyzed
1610 && !node->thunk.thunk_p
1611 && !node->alias
1612 && !node->inlined_to
1613 && !TREE_ASM_WRITTEN (decl)
1614 && !DECL_EXTERNAL (decl))
1615 {
1616 node->process = 1;
1617 if (node->same_comdat_group)
1618 {
1619 cgraph_node *next;
1620 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1621 next != node;
1622 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1623 if (!next->thunk.thunk_p && !next->alias
1624 && !next->comdat_local_p ())
1625 next->process = 1;
1626 }
1627 }
1628 else if (node->same_comdat_group)
1629 {
1630 if (flag_checking)
1631 check_same_comdat_groups = true;
1632 }
1633 else
1634 {
1635 /* We should've reclaimed all functions that are not needed. */
1636 if (flag_checking
1637 && !node->inlined_to
1638 && gimple_has_body_p (decl)
1639 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1640 are inside partition, we can end up not removing the body since we no longer
1641 have analyzed node pointing to it. */
1642 && !node->in_other_partition
1643 && !node->alias
1644 && !node->clones
1645 && !DECL_EXTERNAL (decl))
1646 {
1647 node->debug ();
1648 internal_error ("failed to reclaim unneeded function");
1649 }
1650 gcc_assert (node->inlined_to
1651 || !gimple_has_body_p (decl)
1652 || node->in_other_partition
1653 || node->clones
1654 || DECL_ARTIFICIAL (decl)
1655 || DECL_EXTERNAL (decl));
1656
1657 }
1658
1659 }
1660 if (flag_checking && check_same_comdat_groups)
1661 FOR_EACH_FUNCTION (node)
1662 if (node->same_comdat_group && !node->process)
1663 {
1664 tree decl = node->decl;
1665 if (!node->inlined_to
1666 && gimple_has_body_p (decl)
1667 /* FIXME: in an ltrans unit when the offline copy is outside a
1668 partition but inline copies are inside a partition, we can
1669 end up not removing the body since we no longer have an
1670 analyzed node pointing to it. */
1671 && !node->in_other_partition
1672 && !node->clones
1673 && !DECL_EXTERNAL (decl))
1674 {
1675 node->debug ();
1676 internal_error ("failed to reclaim unneeded function in same "
1677 "comdat group");
1678 }
1679 }
1680 }
1681
1682 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1683 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1684
1685 Set current_function_decl and cfun to newly constructed empty function body.
1686 return basic block in the function body. */
1687
1688 basic_block
1689 init_lowered_empty_function (tree decl, bool in_ssa, profile_count count)
1690 {
1691 basic_block bb;
1692 edge e;
1693
1694 current_function_decl = decl;
1695 allocate_struct_function (decl, false);
1696 gimple_register_cfg_hooks ();
1697 init_empty_tree_cfg ();
1698 init_tree_ssa (cfun);
1699
1700 if (in_ssa)
1701 {
1702 init_ssa_operands (cfun);
1703 cfun->gimple_df->in_ssa_p = true;
1704 cfun->curr_properties |= PROP_ssa;
1705 }
1706
1707 DECL_INITIAL (decl) = make_node (BLOCK);
1708 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1709
1710 DECL_SAVED_TREE (decl) = error_mark_node;
1711 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1712 | PROP_cfg | PROP_loops);
1713
1714 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1715 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1716 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1717
1718 /* Create BB for body of the function and connect it properly. */
1719 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = count;
1720 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = count;
1721 bb = create_basic_block (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1722 bb->count = count;
1723 e = make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1724 e->probability = profile_probability::always ();
1725 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1726 e->probability = profile_probability::always ();
1727 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1728
1729 return bb;
1730 }
1731
1732 /* Adjust PTR by the constant FIXED_OFFSET, by the vtable offset indicated by
1733 VIRTUAL_OFFSET, and by the indirect offset indicated by INDIRECT_OFFSET, if
1734 it is non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and zero
1735 for a result adjusting thunk. */
1736
1737 tree
1738 thunk_adjust (gimple_stmt_iterator * bsi,
1739 tree ptr, bool this_adjusting,
1740 HOST_WIDE_INT fixed_offset, tree virtual_offset,
1741 HOST_WIDE_INT indirect_offset)
1742 {
1743 gassign *stmt;
1744 tree ret;
1745
1746 if (this_adjusting
1747 && fixed_offset != 0)
1748 {
1749 stmt = gimple_build_assign
1750 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1751 ptr,
1752 fixed_offset));
1753 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1754 }
1755
1756 if (!vtable_entry_type && (virtual_offset || indirect_offset != 0))
1757 {
1758 tree vfunc_type = make_node (FUNCTION_TYPE);
1759 TREE_TYPE (vfunc_type) = integer_type_node;
1760 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1761 layout_type (vfunc_type);
1762
1763 vtable_entry_type = build_pointer_type (vfunc_type);
1764 }
1765
1766 /* If there's a virtual offset, look up that value in the vtable and
1767 adjust the pointer again. */
1768 if (virtual_offset)
1769 {
1770 tree vtabletmp;
1771 tree vtabletmp2;
1772 tree vtabletmp3;
1773
1774 vtabletmp =
1775 create_tmp_reg (build_pointer_type
1776 (build_pointer_type (vtable_entry_type)), "vptr");
1777
1778 /* The vptr is always at offset zero in the object. */
1779 stmt = gimple_build_assign (vtabletmp,
1780 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1781 ptr));
1782 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1783
1784 /* Form the vtable address. */
1785 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1786 "vtableaddr");
1787 stmt = gimple_build_assign (vtabletmp2,
1788 build_simple_mem_ref (vtabletmp));
1789 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1790
1791 /* Find the entry with the vcall offset. */
1792 stmt = gimple_build_assign (vtabletmp2,
1793 fold_build_pointer_plus_loc (input_location,
1794 vtabletmp2,
1795 virtual_offset));
1796 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1797
1798 /* Get the offset itself. */
1799 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1800 "vcalloffset");
1801 stmt = gimple_build_assign (vtabletmp3,
1802 build_simple_mem_ref (vtabletmp2));
1803 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1804
1805 /* Adjust the `this' pointer. */
1806 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1807 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1808 GSI_CONTINUE_LINKING);
1809 }
1810
1811 /* Likewise for an offset that is stored in the object that contains the
1812 vtable. */
1813 if (indirect_offset != 0)
1814 {
1815 tree offset_ptr, offset_tree;
1816
1817 /* Get the address of the offset. */
1818 offset_ptr
1819 = create_tmp_reg (build_pointer_type
1820 (build_pointer_type (vtable_entry_type)),
1821 "offset_ptr");
1822 stmt = gimple_build_assign (offset_ptr,
1823 build1 (NOP_EXPR, TREE_TYPE (offset_ptr),
1824 ptr));
1825 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1826
1827 stmt = gimple_build_assign
1828 (offset_ptr,
1829 fold_build_pointer_plus_hwi_loc (input_location, offset_ptr,
1830 indirect_offset));
1831 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1832
1833 /* Get the offset itself. */
1834 offset_tree = create_tmp_reg (TREE_TYPE (TREE_TYPE (offset_ptr)),
1835 "offset");
1836 stmt = gimple_build_assign (offset_tree,
1837 build_simple_mem_ref (offset_ptr));
1838 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1839
1840 /* Adjust the `this' pointer. */
1841 ptr = fold_build_pointer_plus_loc (input_location, ptr, offset_tree);
1842 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1843 GSI_CONTINUE_LINKING);
1844 }
1845
1846 if (!this_adjusting
1847 && fixed_offset != 0)
1848 /* Adjust the pointer by the constant. */
1849 {
1850 tree ptrtmp;
1851
1852 if (VAR_P (ptr))
1853 ptrtmp = ptr;
1854 else
1855 {
1856 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1857 stmt = gimple_build_assign (ptrtmp, ptr);
1858 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1859 }
1860 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1861 ptrtmp, fixed_offset);
1862 }
1863
1864 /* Emit the statement and gimplify the adjustment expression. */
1865 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1866 stmt = gimple_build_assign (ret, ptr);
1867 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1868
1869 return ret;
1870 }
1871
1872 /* Expand thunk NODE to gimple if possible.
1873 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1874 no assembler is produced.
1875 When OUTPUT_ASM_THUNK is true, also produce assembler for
1876 thunks that are not lowered. */
1877
1878 bool
1879 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1880 {
1881 bool this_adjusting = thunk.this_adjusting;
1882 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1883 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1884 HOST_WIDE_INT indirect_offset = thunk.indirect_offset;
1885 tree virtual_offset = NULL;
1886 tree alias = callees->callee->decl;
1887 tree thunk_fndecl = decl;
1888 tree a;
1889
1890 if (!force_gimple_thunk
1891 && this_adjusting
1892 && indirect_offset == 0
1893 && !DECL_EXTERNAL (alias)
1894 && !DECL_STATIC_CHAIN (alias)
1895 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1896 virtual_value, alias))
1897 {
1898 tree fn_block;
1899 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1900
1901 if (!output_asm_thunks)
1902 {
1903 analyzed = true;
1904 return false;
1905 }
1906
1907 if (in_lto_p)
1908 get_untransformed_body ();
1909 a = DECL_ARGUMENTS (thunk_fndecl);
1910
1911 current_function_decl = thunk_fndecl;
1912
1913 /* Ensure thunks are emitted in their correct sections. */
1914 resolve_unique_section (thunk_fndecl, 0,
1915 flag_function_sections);
1916
1917 DECL_RESULT (thunk_fndecl)
1918 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1919 RESULT_DECL, 0, restype);
1920 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1921
1922 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1923 create one. */
1924 fn_block = make_node (BLOCK);
1925 BLOCK_VARS (fn_block) = a;
1926 DECL_INITIAL (thunk_fndecl) = fn_block;
1927 BLOCK_SUPERCONTEXT (fn_block) = thunk_fndecl;
1928 allocate_struct_function (thunk_fndecl, false);
1929 init_function_start (thunk_fndecl);
1930 cfun->is_thunk = 1;
1931 insn_locations_init ();
1932 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1933 prologue_location = curr_insn_location ();
1934
1935 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1936 fixed_offset, virtual_value, alias);
1937
1938 insn_locations_finalize ();
1939 init_insn_lengths ();
1940 free_after_compilation (cfun);
1941 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1942 thunk.thunk_p = false;
1943 analyzed = false;
1944 }
1945 else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
1946 {
1947 error ("generic thunk code fails for method %qD which uses %<...%>",
1948 thunk_fndecl);
1949 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1950 analyzed = true;
1951 return false;
1952 }
1953 else
1954 {
1955 tree restype;
1956 basic_block bb, then_bb, else_bb, return_bb;
1957 gimple_stmt_iterator bsi;
1958 int nargs = 0;
1959 tree arg;
1960 int i;
1961 tree resdecl;
1962 tree restmp = NULL;
1963
1964 gcall *call;
1965 greturn *ret;
1966 bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
1967
1968 /* We may be called from expand_thunk that releases body except for
1969 DECL_ARGUMENTS. In this case force_gimple_thunk is true. */
1970 if (in_lto_p && !force_gimple_thunk)
1971 get_untransformed_body ();
1972
1973 /* We need to force DECL_IGNORED_P when the thunk is created
1974 after early debug was run. */
1975 if (force_gimple_thunk)
1976 DECL_IGNORED_P (thunk_fndecl) = 1;
1977
1978 a = DECL_ARGUMENTS (thunk_fndecl);
1979
1980 current_function_decl = thunk_fndecl;
1981
1982 /* Ensure thunks are emitted in their correct sections. */
1983 resolve_unique_section (thunk_fndecl, 0,
1984 flag_function_sections);
1985
1986 bitmap_obstack_initialize (NULL);
1987
1988 if (thunk.virtual_offset_p)
1989 virtual_offset = size_int (virtual_value);
1990
1991 /* Build the return declaration for the function. */
1992 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1993 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1994 {
1995 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1996 DECL_ARTIFICIAL (resdecl) = 1;
1997 DECL_IGNORED_P (resdecl) = 1;
1998 DECL_CONTEXT (resdecl) = thunk_fndecl;
1999 DECL_RESULT (thunk_fndecl) = resdecl;
2000 }
2001 else
2002 resdecl = DECL_RESULT (thunk_fndecl);
2003
2004 profile_count cfg_count = count;
2005 if (!cfg_count.initialized_p ())
2006 cfg_count = profile_count::from_gcov_type (BB_FREQ_MAX).guessed_local ();
2007
2008 bb = then_bb = else_bb = return_bb
2009 = init_lowered_empty_function (thunk_fndecl, true, cfg_count);
2010
2011 bsi = gsi_start_bb (bb);
2012
2013 /* Build call to the function being thunked. */
2014 if (!VOID_TYPE_P (restype)
2015 && (!alias_is_noreturn
2016 || TREE_ADDRESSABLE (restype)
2017 || TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
2018 {
2019 if (DECL_BY_REFERENCE (resdecl))
2020 {
2021 restmp = gimple_fold_indirect_ref (resdecl);
2022 if (!restmp)
2023 restmp = build2 (MEM_REF,
2024 TREE_TYPE (TREE_TYPE (resdecl)),
2025 resdecl,
2026 build_int_cst (TREE_TYPE (resdecl), 0));
2027 }
2028 else if (!is_gimple_reg_type (restype))
2029 {
2030 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
2031 {
2032 restmp = resdecl;
2033
2034 if (VAR_P (restmp))
2035 {
2036 add_local_decl (cfun, restmp);
2037 BLOCK_VARS (DECL_INITIAL (current_function_decl))
2038 = restmp;
2039 }
2040 }
2041 else
2042 restmp = create_tmp_var (restype, "retval");
2043 }
2044 else
2045 restmp = create_tmp_reg (restype, "retval");
2046 }
2047
2048 for (arg = a; arg; arg = DECL_CHAIN (arg))
2049 nargs++;
2050 auto_vec<tree> vargs (nargs);
2051 i = 0;
2052 arg = a;
2053 if (this_adjusting)
2054 {
2055 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
2056 virtual_offset, indirect_offset));
2057 arg = DECL_CHAIN (a);
2058 i = 1;
2059 }
2060
2061 if (nargs)
2062 for (; i < nargs; i++, arg = DECL_CHAIN (arg))
2063 {
2064 tree tmp = arg;
2065 DECL_NOT_GIMPLE_REG_P (arg) = 0;
2066 if (!is_gimple_val (arg))
2067 {
2068 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
2069 (TREE_TYPE (arg)), "arg");
2070 gimple *stmt = gimple_build_assign (tmp, arg);
2071 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2072 }
2073 vargs.quick_push (tmp);
2074 }
2075 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
2076 callees->call_stmt = call;
2077 gimple_call_set_from_thunk (call, true);
2078 if (DECL_STATIC_CHAIN (alias))
2079 {
2080 tree p = DECL_STRUCT_FUNCTION (alias)->static_chain_decl;
2081 tree type = TREE_TYPE (p);
2082 tree decl = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
2083 PARM_DECL, create_tmp_var_name ("CHAIN"),
2084 type);
2085 DECL_ARTIFICIAL (decl) = 1;
2086 DECL_IGNORED_P (decl) = 1;
2087 TREE_USED (decl) = 1;
2088 DECL_CONTEXT (decl) = thunk_fndecl;
2089 DECL_ARG_TYPE (decl) = type;
2090 TREE_READONLY (decl) = 1;
2091
2092 struct function *sf = DECL_STRUCT_FUNCTION (thunk_fndecl);
2093 sf->static_chain_decl = decl;
2094
2095 gimple_call_set_chain (call, decl);
2096 }
2097
2098 /* Return slot optimization is always possible and in fact required to
2099 return values with DECL_BY_REFERENCE. */
2100 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
2101 && (!is_gimple_reg_type (TREE_TYPE (resdecl))
2102 || DECL_BY_REFERENCE (resdecl)))
2103 gimple_call_set_return_slot_opt (call, true);
2104
2105 if (restmp)
2106 {
2107 gimple_call_set_lhs (call, restmp);
2108 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
2109 TREE_TYPE (TREE_TYPE (alias))));
2110 }
2111 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
2112 if (!alias_is_noreturn)
2113 {
2114 if (restmp && !this_adjusting
2115 && (fixed_offset || virtual_offset))
2116 {
2117 tree true_label = NULL_TREE;
2118
2119 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
2120 {
2121 gimple *stmt;
2122 edge e;
2123 /* If the return type is a pointer, we need to
2124 protect against NULL. We know there will be an
2125 adjustment, because that's why we're emitting a
2126 thunk. */
2127 then_bb = create_basic_block (NULL, bb);
2128 then_bb->count = cfg_count - cfg_count.apply_scale (1, 16);
2129 return_bb = create_basic_block (NULL, then_bb);
2130 return_bb->count = cfg_count;
2131 else_bb = create_basic_block (NULL, else_bb);
2132 else_bb->count = cfg_count.apply_scale (1, 16);
2133 add_bb_to_loop (then_bb, bb->loop_father);
2134 add_bb_to_loop (return_bb, bb->loop_father);
2135 add_bb_to_loop (else_bb, bb->loop_father);
2136 remove_edge (single_succ_edge (bb));
2137 true_label = gimple_block_label (then_bb);
2138 stmt = gimple_build_cond (NE_EXPR, restmp,
2139 build_zero_cst (TREE_TYPE (restmp)),
2140 NULL_TREE, NULL_TREE);
2141 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2142 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
2143 e->probability = profile_probability::guessed_always ()
2144 .apply_scale (1, 16);
2145 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
2146 e->probability = profile_probability::guessed_always ()
2147 .apply_scale (1, 16);
2148 make_single_succ_edge (return_bb,
2149 EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
2150 make_single_succ_edge (then_bb, return_bb, EDGE_FALLTHRU);
2151 e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
2152 e->probability = profile_probability::always ();
2153 bsi = gsi_last_bb (then_bb);
2154 }
2155
2156 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
2157 fixed_offset, virtual_offset,
2158 indirect_offset);
2159 if (true_label)
2160 {
2161 gimple *stmt;
2162 bsi = gsi_last_bb (else_bb);
2163 stmt = gimple_build_assign (restmp,
2164 build_zero_cst (TREE_TYPE (restmp)));
2165 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
2166 bsi = gsi_last_bb (return_bb);
2167 }
2168 }
2169 else
2170 gimple_call_set_tail (call, true);
2171
2172 /* Build return value. */
2173 if (!DECL_BY_REFERENCE (resdecl))
2174 ret = gimple_build_return (restmp);
2175 else
2176 ret = gimple_build_return (resdecl);
2177
2178 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
2179 }
2180 else
2181 {
2182 gimple_call_set_tail (call, true);
2183 remove_edge (single_succ_edge (bb));
2184 }
2185
2186 cfun->gimple_df->in_ssa_p = true;
2187 update_max_bb_count ();
2188 profile_status_for_fn (cfun)
2189 = cfg_count.initialized_p () && cfg_count.ipa_p ()
2190 ? PROFILE_READ : PROFILE_GUESSED;
2191 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
2192 TREE_ASM_WRITTEN (thunk_fndecl) = false;
2193 delete_unreachable_blocks ();
2194 update_ssa (TODO_update_ssa);
2195 checking_verify_flow_info ();
2196 free_dominance_info (CDI_DOMINATORS);
2197
2198 /* Since we want to emit the thunk, we explicitly mark its name as
2199 referenced. */
2200 thunk.thunk_p = false;
2201 lowered = true;
2202 bitmap_obstack_release (NULL);
2203 }
2204 current_function_decl = NULL;
2205 set_cfun (NULL);
2206 return true;
2207 }
2208
2209 /* Assemble thunks and aliases associated to node. */
2210
2211 void
2212 cgraph_node::assemble_thunks_and_aliases (void)
2213 {
2214 cgraph_edge *e;
2215 ipa_ref *ref;
2216
2217 for (e = callers; e;)
2218 if (e->caller->thunk.thunk_p
2219 && !e->caller->inlined_to)
2220 {
2221 cgraph_node *thunk = e->caller;
2222
2223 e = e->next_caller;
2224 thunk->expand_thunk (true, false);
2225 thunk->assemble_thunks_and_aliases ();
2226 }
2227 else
2228 e = e->next_caller;
2229
2230 FOR_EACH_ALIAS (this, ref)
2231 {
2232 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2233 if (!alias->transparent_alias)
2234 {
2235 bool saved_written = TREE_ASM_WRITTEN (decl);
2236
2237 /* Force assemble_alias to really output the alias this time instead
2238 of buffering it in same alias pairs. */
2239 TREE_ASM_WRITTEN (decl) = 1;
2240 if (alias->symver)
2241 do_assemble_symver (alias->decl,
2242 DECL_ASSEMBLER_NAME (decl));
2243 else
2244 do_assemble_alias (alias->decl,
2245 DECL_ASSEMBLER_NAME (decl));
2246 alias->assemble_thunks_and_aliases ();
2247 TREE_ASM_WRITTEN (decl) = saved_written;
2248 }
2249 }
2250 }
2251
2252 /* Expand function specified by node. */
2253
2254 void
2255 cgraph_node::expand (void)
2256 {
2257 location_t saved_loc;
2258
2259 /* We ought to not compile any inline clones. */
2260 gcc_assert (!inlined_to);
2261
2262 /* __RTL functions are compiled as soon as they are parsed, so don't
2263 do it again. */
2264 if (native_rtl_p ())
2265 return;
2266
2267 announce_function (decl);
2268 process = 0;
2269 gcc_assert (lowered);
2270 get_untransformed_body ();
2271
2272 /* Generate RTL for the body of DECL. */
2273
2274 timevar_push (TV_REST_OF_COMPILATION);
2275
2276 gcc_assert (symtab->global_info_ready);
2277
2278 /* Initialize the default bitmap obstack. */
2279 bitmap_obstack_initialize (NULL);
2280
2281 /* Initialize the RTL code for the function. */
2282 saved_loc = input_location;
2283 input_location = DECL_SOURCE_LOCATION (decl);
2284
2285 gcc_assert (DECL_STRUCT_FUNCTION (decl));
2286 push_cfun (DECL_STRUCT_FUNCTION (decl));
2287 init_function_start (decl);
2288
2289 gimple_register_cfg_hooks ();
2290
2291 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
2292
2293 update_ssa (TODO_update_ssa_only_virtuals);
2294 execute_all_ipa_transforms (false);
2295
2296 /* Perform all tree transforms and optimizations. */
2297
2298 /* Signal the start of passes. */
2299 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
2300
2301 execute_pass_list (cfun, g->get_passes ()->all_passes);
2302
2303 /* Signal the end of passes. */
2304 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
2305
2306 bitmap_obstack_release (&reg_obstack);
2307
2308 /* Release the default bitmap obstack. */
2309 bitmap_obstack_release (NULL);
2310
2311 /* If requested, warn about function definitions where the function will
2312 return a value (usually of some struct or union type) which itself will
2313 take up a lot of stack space. */
2314 if (!DECL_EXTERNAL (decl) && TREE_TYPE (decl))
2315 {
2316 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
2317
2318 if (ret_type && TYPE_SIZE_UNIT (ret_type)
2319 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
2320 && compare_tree_int (TYPE_SIZE_UNIT (ret_type),
2321 warn_larger_than_size) > 0)
2322 {
2323 unsigned int size_as_int
2324 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
2325
2326 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
2327 warning (OPT_Wlarger_than_,
2328 "size of return value of %q+D is %u bytes",
2329 decl, size_as_int);
2330 else
2331 warning (OPT_Wlarger_than_,
2332 "size of return value of %q+D is larger than %wu bytes",
2333 decl, warn_larger_than_size);
2334 }
2335 }
2336
2337 gimple_set_body (decl, NULL);
2338 if (DECL_STRUCT_FUNCTION (decl) == 0
2339 && !cgraph_node::get (decl)->origin)
2340 {
2341 /* Stop pointing to the local nodes about to be freed.
2342 But DECL_INITIAL must remain nonzero so we know this
2343 was an actual function definition.
2344 For a nested function, this is done in c_pop_function_context.
2345 If rest_of_compilation set this to 0, leave it 0. */
2346 if (DECL_INITIAL (decl) != 0)
2347 DECL_INITIAL (decl) = error_mark_node;
2348 }
2349
2350 input_location = saved_loc;
2351
2352 ggc_collect ();
2353 timevar_pop (TV_REST_OF_COMPILATION);
2354
2355 /* Make sure that BE didn't give up on compiling. */
2356 gcc_assert (TREE_ASM_WRITTEN (decl));
2357 if (cfun)
2358 pop_cfun ();
2359
2360 /* It would make a lot more sense to output thunks before function body to
2361 get more forward and fewer backward jumps. This however would need
2362 solving problem with comdats. See PR48668. Also aliases must come after
2363 function itself to make one pass assemblers, like one on AIX, happy.
2364 See PR 50689.
2365 FIXME: Perhaps thunks should be move before function IFF they are not in
2366 comdat groups. */
2367 assemble_thunks_and_aliases ();
2368 release_body ();
2369 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
2370 points to the dead function body. */
2371 remove_callees ();
2372 remove_all_references ();
2373 }
2374
2375 /* Node comparator that is responsible for the order that corresponds
2376 to time when a function was launched for the first time. */
2377
2378 int
2379 tp_first_run_node_cmp (const void *pa, const void *pb)
2380 {
2381 const cgraph_node *a = *(const cgraph_node * const *) pa;
2382 const cgraph_node *b = *(const cgraph_node * const *) pb;
2383 unsigned int tp_first_run_a = a->tp_first_run;
2384 unsigned int tp_first_run_b = b->tp_first_run;
2385
2386 if (!opt_for_fn (a->decl, flag_profile_reorder_functions)
2387 || a->no_reorder)
2388 tp_first_run_a = 0;
2389 if (!opt_for_fn (b->decl, flag_profile_reorder_functions)
2390 || b->no_reorder)
2391 tp_first_run_b = 0;
2392
2393 if (tp_first_run_a == tp_first_run_b)
2394 return a->order - b->order;
2395
2396 /* Functions with time profile must be before these without profile. */
2397 tp_first_run_a = (tp_first_run_a - 1) & INT_MAX;
2398 tp_first_run_b = (tp_first_run_b - 1) & INT_MAX;
2399
2400 return tp_first_run_a - tp_first_run_b;
2401 }
2402
2403 /* Expand all functions that must be output.
2404
2405 Attempt to topologically sort the nodes so function is output when
2406 all called functions are already assembled to allow data to be
2407 propagated across the callgraph. Use a stack to get smaller distance
2408 between a function and its callees (later we may choose to use a more
2409 sophisticated algorithm for function reordering; we will likely want
2410 to use subsections to make the output functions appear in top-down
2411 order). */
2412
2413 static void
2414 expand_all_functions (void)
2415 {
2416 cgraph_node *node;
2417 cgraph_node **order = XCNEWVEC (cgraph_node *,
2418 symtab->cgraph_count);
2419 cgraph_node **tp_first_run_order = XCNEWVEC (cgraph_node *,
2420 symtab->cgraph_count);
2421 unsigned int expanded_func_count = 0, profiled_func_count = 0;
2422 int order_pos, tp_first_run_order_pos = 0, new_order_pos = 0;
2423 int i;
2424
2425 order_pos = ipa_reverse_postorder (order);
2426 gcc_assert (order_pos == symtab->cgraph_count);
2427
2428 /* Garbage collector may remove inline clones we eliminate during
2429 optimization. So we must be sure to not reference them. */
2430 for (i = 0; i < order_pos; i++)
2431 if (order[i]->process)
2432 {
2433 if (order[i]->tp_first_run
2434 && opt_for_fn (order[i]->decl, flag_profile_reorder_functions))
2435 tp_first_run_order[tp_first_run_order_pos++] = order[i];
2436 else
2437 order[new_order_pos++] = order[i];
2438 }
2439
2440 /* First output functions with time profile in specified order. */
2441 qsort (tp_first_run_order, tp_first_run_order_pos,
2442 sizeof (cgraph_node *), tp_first_run_node_cmp);
2443 for (i = 0; i < tp_first_run_order_pos; i++)
2444 {
2445 node = tp_first_run_order[i];
2446
2447 if (node->process)
2448 {
2449 expanded_func_count++;
2450 profiled_func_count++;
2451
2452 if (symtab->dump_file)
2453 fprintf (symtab->dump_file,
2454 "Time profile order in expand_all_functions:%s:%d\n",
2455 node->dump_asm_name (), node->tp_first_run);
2456 node->process = 0;
2457 node->expand ();
2458 }
2459 }
2460
2461 /* Output functions in RPO so callees get optimized before callers. This
2462 makes ipa-ra and other propagators to work.
2463 FIXME: This is far from optimal code layout. */
2464 for (i = new_order_pos - 1; i >= 0; i--)
2465 {
2466 node = order[i];
2467
2468 if (node->process)
2469 {
2470 expanded_func_count++;
2471 node->process = 0;
2472 node->expand ();
2473 }
2474 }
2475
2476 if (dump_file)
2477 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
2478 main_input_filename, profiled_func_count, expanded_func_count);
2479
2480 if (symtab->dump_file && tp_first_run_order_pos)
2481 fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
2482 profiled_func_count, expanded_func_count);
2483
2484 symtab->process_new_functions ();
2485 free_gimplify_stack ();
2486 delete ipa_saved_clone_sources;
2487 ipa_saved_clone_sources = NULL;
2488 free (order);
2489 }
2490
2491 /* This is used to sort the node types by the cgraph order number. */
2492
2493 enum cgraph_order_sort_kind
2494 {
2495 ORDER_UNDEFINED = 0,
2496 ORDER_FUNCTION,
2497 ORDER_VAR,
2498 ORDER_VAR_UNDEF,
2499 ORDER_ASM
2500 };
2501
2502 struct cgraph_order_sort
2503 {
2504 enum cgraph_order_sort_kind kind;
2505 union
2506 {
2507 cgraph_node *f;
2508 varpool_node *v;
2509 asm_node *a;
2510 } u;
2511 };
2512
2513 /* Output all functions, variables, and asm statements in the order
2514 according to their order fields, which is the order in which they
2515 appeared in the file. This implements -fno-toplevel-reorder. In
2516 this mode we may output functions and variables which don't really
2517 need to be output. */
2518
2519 static void
2520 output_in_order (void)
2521 {
2522 int max;
2523 cgraph_order_sort *nodes;
2524 int i;
2525 cgraph_node *pf;
2526 varpool_node *pv;
2527 asm_node *pa;
2528 max = symtab->order;
2529 nodes = XCNEWVEC (cgraph_order_sort, max);
2530
2531 FOR_EACH_DEFINED_FUNCTION (pf)
2532 {
2533 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
2534 {
2535 if (!pf->no_reorder)
2536 continue;
2537 i = pf->order;
2538 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2539 nodes[i].kind = ORDER_FUNCTION;
2540 nodes[i].u.f = pf;
2541 }
2542 }
2543
2544 /* There is a similar loop in symbol_table::output_variables.
2545 Please keep them in sync. */
2546 FOR_EACH_VARIABLE (pv)
2547 {
2548 if (!pv->no_reorder)
2549 continue;
2550 if (DECL_HARD_REGISTER (pv->decl)
2551 || DECL_HAS_VALUE_EXPR_P (pv->decl))
2552 continue;
2553 i = pv->order;
2554 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2555 nodes[i].kind = pv->definition ? ORDER_VAR : ORDER_VAR_UNDEF;
2556 nodes[i].u.v = pv;
2557 }
2558
2559 for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
2560 {
2561 i = pa->order;
2562 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2563 nodes[i].kind = ORDER_ASM;
2564 nodes[i].u.a = pa;
2565 }
2566
2567 /* In toplevel reorder mode we output all statics; mark them as needed. */
2568
2569 for (i = 0; i < max; ++i)
2570 if (nodes[i].kind == ORDER_VAR)
2571 nodes[i].u.v->finalize_named_section_flags ();
2572
2573 for (i = 0; i < max; ++i)
2574 {
2575 switch (nodes[i].kind)
2576 {
2577 case ORDER_FUNCTION:
2578 nodes[i].u.f->process = 0;
2579 nodes[i].u.f->expand ();
2580 break;
2581
2582 case ORDER_VAR:
2583 nodes[i].u.v->assemble_decl ();
2584 break;
2585
2586 case ORDER_VAR_UNDEF:
2587 assemble_undefined_decl (nodes[i].u.v->decl);
2588 break;
2589
2590 case ORDER_ASM:
2591 assemble_asm (nodes[i].u.a->asm_str);
2592 break;
2593
2594 case ORDER_UNDEFINED:
2595 break;
2596
2597 default:
2598 gcc_unreachable ();
2599 }
2600 }
2601
2602 symtab->clear_asm_symbols ();
2603
2604 free (nodes);
2605 }
2606
2607 static void
2608 ipa_passes (void)
2609 {
2610 gcc::pass_manager *passes = g->get_passes ();
2611
2612 set_cfun (NULL);
2613 current_function_decl = NULL;
2614 gimple_register_cfg_hooks ();
2615 bitmap_obstack_initialize (NULL);
2616
2617 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2618
2619 if (!in_lto_p)
2620 {
2621 execute_ipa_pass_list (passes->all_small_ipa_passes);
2622 if (seen_error ())
2623 return;
2624 }
2625
2626 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2627 devirtualization and other changes where removal iterate. */
2628 symtab->remove_unreachable_nodes (symtab->dump_file);
2629
2630 /* If pass_all_early_optimizations was not scheduled, the state of
2631 the cgraph will not be properly updated. Update it now. */
2632 if (symtab->state < IPA_SSA)
2633 symtab->state = IPA_SSA;
2634
2635 if (!in_lto_p)
2636 {
2637 /* Generate coverage variables and constructors. */
2638 coverage_finish ();
2639
2640 /* Process new functions added. */
2641 set_cfun (NULL);
2642 current_function_decl = NULL;
2643 symtab->process_new_functions ();
2644
2645 execute_ipa_summary_passes
2646 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2647 }
2648
2649 /* Some targets need to handle LTO assembler output specially. */
2650 if (flag_generate_lto || flag_generate_offload)
2651 targetm.asm_out.lto_start ();
2652
2653 if (!in_lto_p
2654 || flag_incremental_link == INCREMENTAL_LINK_LTO)
2655 {
2656 if (!quiet_flag)
2657 fprintf (stderr, "Streaming LTO\n");
2658 if (g->have_offload)
2659 {
2660 section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2661 lto_stream_offload_p = true;
2662 ipa_write_summaries ();
2663 lto_stream_offload_p = false;
2664 }
2665 if (flag_lto)
2666 {
2667 section_name_prefix = LTO_SECTION_NAME_PREFIX;
2668 lto_stream_offload_p = false;
2669 ipa_write_summaries ();
2670 }
2671 }
2672
2673 if (flag_generate_lto || flag_generate_offload)
2674 targetm.asm_out.lto_end ();
2675
2676 if (!flag_ltrans
2677 && ((in_lto_p && flag_incremental_link != INCREMENTAL_LINK_LTO)
2678 || !flag_lto || flag_fat_lto_objects))
2679 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2680 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2681
2682 bitmap_obstack_release (NULL);
2683 }
2684
2685
2686 /* Return string alias is alias of. */
2687
2688 static tree
2689 get_alias_symbol (tree decl)
2690 {
2691 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2692 return get_identifier (TREE_STRING_POINTER
2693 (TREE_VALUE (TREE_VALUE (alias))));
2694 }
2695
2696
2697 /* Weakrefs may be associated to external decls and thus not output
2698 at expansion time. Emit all necessary aliases. */
2699
2700 void
2701 symbol_table::output_weakrefs (void)
2702 {
2703 symtab_node *node;
2704 FOR_EACH_SYMBOL (node)
2705 if (node->alias
2706 && !TREE_ASM_WRITTEN (node->decl)
2707 && node->weakref)
2708 {
2709 tree target;
2710
2711 /* Weakrefs are special by not requiring target definition in current
2712 compilation unit. It is thus bit hard to work out what we want to
2713 alias.
2714 When alias target is defined, we need to fetch it from symtab reference,
2715 otherwise it is pointed to by alias_target. */
2716 if (node->alias_target)
2717 target = (DECL_P (node->alias_target)
2718 ? DECL_ASSEMBLER_NAME (node->alias_target)
2719 : node->alias_target);
2720 else if (node->analyzed)
2721 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2722 else
2723 {
2724 gcc_unreachable ();
2725 target = get_alias_symbol (node->decl);
2726 }
2727 do_assemble_alias (node->decl, target);
2728 }
2729 }
2730
2731 /* Perform simple optimizations based on callgraph. */
2732
2733 void
2734 symbol_table::compile (void)
2735 {
2736 if (seen_error ())
2737 return;
2738
2739 symtab_node::checking_verify_symtab_nodes ();
2740
2741 timevar_push (TV_CGRAPHOPT);
2742 if (pre_ipa_mem_report)
2743 dump_memory_report ("Memory consumption before IPA");
2744 if (!quiet_flag)
2745 fprintf (stderr, "Performing interprocedural optimizations\n");
2746 state = IPA;
2747
2748 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2749 if (flag_generate_lto || flag_generate_offload)
2750 lto_streamer_hooks_init ();
2751
2752 /* Don't run the IPA passes if there was any error or sorry messages. */
2753 if (!seen_error ())
2754 {
2755 timevar_start (TV_CGRAPH_IPA_PASSES);
2756 ipa_passes ();
2757 timevar_stop (TV_CGRAPH_IPA_PASSES);
2758 }
2759 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2760 if (seen_error ()
2761 || ((!in_lto_p || flag_incremental_link == INCREMENTAL_LINK_LTO)
2762 && flag_lto && !flag_fat_lto_objects))
2763 {
2764 timevar_pop (TV_CGRAPHOPT);
2765 return;
2766 }
2767
2768 global_info_ready = true;
2769 if (dump_file)
2770 {
2771 fprintf (dump_file, "Optimized ");
2772 symtab->dump (dump_file);
2773 }
2774 if (post_ipa_mem_report)
2775 dump_memory_report ("Memory consumption after IPA");
2776 timevar_pop (TV_CGRAPHOPT);
2777
2778 /* Output everything. */
2779 switch_to_section (text_section);
2780 (*debug_hooks->assembly_start) ();
2781 if (!quiet_flag)
2782 fprintf (stderr, "Assembling functions:\n");
2783 symtab_node::checking_verify_symtab_nodes ();
2784
2785 bitmap_obstack_initialize (NULL);
2786 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2787 bitmap_obstack_release (NULL);
2788 mark_functions_to_output ();
2789
2790 /* When weakref support is missing, we automatically translate all
2791 references to NODE to references to its ultimate alias target.
2792 The renaming mechanism uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2793 TREE_CHAIN.
2794
2795 Set up this mapping before we output any assembler but once we are sure
2796 that all symbol renaming is done.
2797
2798 FIXME: All this ugliness can go away if we just do renaming at gimple
2799 level by physically rewriting the IL. At the moment we can only redirect
2800 calls, so we need infrastructure for renaming references as well. */
2801 #ifndef ASM_OUTPUT_WEAKREF
2802 symtab_node *node;
2803
2804 FOR_EACH_SYMBOL (node)
2805 if (node->alias
2806 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2807 {
2808 IDENTIFIER_TRANSPARENT_ALIAS
2809 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2810 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2811 = (node->alias_target ? node->alias_target
2812 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2813 }
2814 #endif
2815
2816 state = EXPANSION;
2817
2818 /* Output first asm statements and anything ordered. The process
2819 flag is cleared for these nodes, so we skip them later. */
2820 output_in_order ();
2821
2822 timevar_start (TV_CGRAPH_FUNC_EXPANSION);
2823 expand_all_functions ();
2824 timevar_stop (TV_CGRAPH_FUNC_EXPANSION);
2825
2826 output_variables ();
2827
2828 process_new_functions ();
2829 state = FINISHED;
2830 output_weakrefs ();
2831
2832 if (dump_file)
2833 {
2834 fprintf (dump_file, "\nFinal ");
2835 symtab->dump (dump_file);
2836 }
2837 if (!flag_checking)
2838 return;
2839 symtab_node::verify_symtab_nodes ();
2840 /* Double check that all inline clones are gone and that all
2841 function bodies have been released from memory. */
2842 if (!seen_error ())
2843 {
2844 cgraph_node *node;
2845 bool error_found = false;
2846
2847 FOR_EACH_DEFINED_FUNCTION (node)
2848 if (node->inlined_to
2849 || gimple_has_body_p (node->decl))
2850 {
2851 error_found = true;
2852 node->debug ();
2853 }
2854 if (error_found)
2855 internal_error ("nodes with unreleased memory found");
2856 }
2857 }
2858
2859 /* Earlydebug dump file, flags, and number. */
2860
2861 static int debuginfo_early_dump_nr;
2862 static FILE *debuginfo_early_dump_file;
2863 static dump_flags_t debuginfo_early_dump_flags;
2864
2865 /* Debug dump file, flags, and number. */
2866
2867 static int debuginfo_dump_nr;
2868 static FILE *debuginfo_dump_file;
2869 static dump_flags_t debuginfo_dump_flags;
2870
2871 /* Register the debug and earlydebug dump files. */
2872
2873 void
2874 debuginfo_early_init (void)
2875 {
2876 gcc::dump_manager *dumps = g->get_dumps ();
2877 debuginfo_early_dump_nr = dumps->dump_register (".earlydebug", "earlydebug",
2878 "earlydebug", DK_tree,
2879 OPTGROUP_NONE,
2880 false);
2881 debuginfo_dump_nr = dumps->dump_register (".debug", "debug",
2882 "debug", DK_tree,
2883 OPTGROUP_NONE,
2884 false);
2885 }
2886
2887 /* Initialize the debug and earlydebug dump files. */
2888
2889 void
2890 debuginfo_init (void)
2891 {
2892 gcc::dump_manager *dumps = g->get_dumps ();
2893 debuginfo_dump_file = dump_begin (debuginfo_dump_nr, NULL);
2894 debuginfo_dump_flags = dumps->get_dump_file_info (debuginfo_dump_nr)->pflags;
2895 debuginfo_early_dump_file = dump_begin (debuginfo_early_dump_nr, NULL);
2896 debuginfo_early_dump_flags
2897 = dumps->get_dump_file_info (debuginfo_early_dump_nr)->pflags;
2898 }
2899
2900 /* Finalize the debug and earlydebug dump files. */
2901
2902 void
2903 debuginfo_fini (void)
2904 {
2905 if (debuginfo_dump_file)
2906 dump_end (debuginfo_dump_nr, debuginfo_dump_file);
2907 if (debuginfo_early_dump_file)
2908 dump_end (debuginfo_early_dump_nr, debuginfo_early_dump_file);
2909 }
2910
2911 /* Set dump_file to the debug dump file. */
2912
2913 void
2914 debuginfo_start (void)
2915 {
2916 set_dump_file (debuginfo_dump_file);
2917 }
2918
2919 /* Undo setting dump_file to the debug dump file. */
2920
2921 void
2922 debuginfo_stop (void)
2923 {
2924 set_dump_file (NULL);
2925 }
2926
2927 /* Set dump_file to the earlydebug dump file. */
2928
2929 void
2930 debuginfo_early_start (void)
2931 {
2932 set_dump_file (debuginfo_early_dump_file);
2933 }
2934
2935 /* Undo setting dump_file to the earlydebug dump file. */
2936
2937 void
2938 debuginfo_early_stop (void)
2939 {
2940 set_dump_file (NULL);
2941 }
2942
2943 /* Analyze the whole compilation unit once it is parsed completely. */
2944
2945 void
2946 symbol_table::finalize_compilation_unit (void)
2947 {
2948 timevar_push (TV_CGRAPH);
2949
2950 /* If we're here there's no current function anymore. Some frontends
2951 are lazy in clearing these. */
2952 current_function_decl = NULL;
2953 set_cfun (NULL);
2954
2955 /* Do not skip analyzing the functions if there were errors, we
2956 miss diagnostics for following functions otherwise. */
2957
2958 /* Emit size functions we didn't inline. */
2959 finalize_size_functions ();
2960
2961 /* Mark alias targets necessary and emit diagnostics. */
2962 handle_alias_pairs ();
2963
2964 if (!quiet_flag)
2965 {
2966 fprintf (stderr, "\nAnalyzing compilation unit\n");
2967 fflush (stderr);
2968 }
2969
2970 if (flag_dump_passes)
2971 dump_passes ();
2972
2973 /* Gimplify and lower all functions, compute reachability and
2974 remove unreachable nodes. */
2975 analyze_functions (/*first_time=*/true);
2976
2977 /* Mark alias targets necessary and emit diagnostics. */
2978 handle_alias_pairs ();
2979
2980 /* Gimplify and lower thunks. */
2981 analyze_functions (/*first_time=*/false);
2982
2983 /* Offloading requires LTO infrastructure. */
2984 if (!in_lto_p && g->have_offload)
2985 flag_generate_offload = 1;
2986
2987 if (!seen_error ())
2988 {
2989 /* Emit early debug for reachable functions, and by consequence,
2990 locally scoped symbols. */
2991 struct cgraph_node *cnode;
2992 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (cnode)
2993 (*debug_hooks->early_global_decl) (cnode->decl);
2994
2995 /* Clean up anything that needs cleaning up after initial debug
2996 generation. */
2997 debuginfo_early_start ();
2998 (*debug_hooks->early_finish) (main_input_filename);
2999 debuginfo_early_stop ();
3000 }
3001
3002 /* Finally drive the pass manager. */
3003 compile ();
3004
3005 timevar_pop (TV_CGRAPH);
3006 }
3007
3008 /* Reset all state within cgraphunit.c so that we can rerun the compiler
3009 within the same process. For use by toplev::finalize. */
3010
3011 void
3012 cgraphunit_c_finalize (void)
3013 {
3014 gcc_assert (cgraph_new_nodes.length () == 0);
3015 cgraph_new_nodes.truncate (0);
3016
3017 vtable_entry_type = NULL;
3018 queued_nodes = &symtab_terminator;
3019
3020 first_analyzed = NULL;
3021 first_analyzed_var = NULL;
3022 }
3023
3024 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
3025 kind of wrapper method. */
3026
3027 void
3028 cgraph_node::create_wrapper (cgraph_node *target)
3029 {
3030 /* Preserve DECL_RESULT so we get right by reference flag. */
3031 tree decl_result = DECL_RESULT (decl);
3032
3033 /* Remove the function's body but keep arguments to be reused
3034 for thunk. */
3035 release_body (true);
3036 reset ();
3037
3038 DECL_UNINLINABLE (decl) = false;
3039 DECL_RESULT (decl) = decl_result;
3040 DECL_INITIAL (decl) = NULL;
3041 allocate_struct_function (decl, false);
3042 set_cfun (NULL);
3043
3044 /* Turn alias into thunk and expand it into GIMPLE representation. */
3045 definition = true;
3046
3047 memset (&thunk, 0, sizeof (cgraph_thunk_info));
3048 thunk.thunk_p = true;
3049 create_edge (target, NULL, count);
3050 callees->can_throw_external = !TREE_NOTHROW (target->decl);
3051
3052 tree arguments = DECL_ARGUMENTS (decl);
3053
3054 while (arguments)
3055 {
3056 TREE_ADDRESSABLE (arguments) = false;
3057 arguments = TREE_CHAIN (arguments);
3058 }
3059
3060 expand_thunk (false, true);
3061
3062 /* Inline summary set-up. */
3063 analyze ();
3064 inline_analyze_function (this);
3065 }
3066
3067 #include "gt-cgraphunit.h"