]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraphunit.c
PR28901 Add two levels for -Wunused-const-variable.
[thirdparty/gcc.git] / gcc / cgraphunit.c
1 /* Driver of optimization process
2 Copyright (C) 2003-2016 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158 */
159
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "backend.h"
164 #include "target.h"
165 #include "rtl.h"
166 #include "tree.h"
167 #include "gimple.h"
168 #include "cfghooks.h"
169 #include "regset.h" /* FIXME: For reg_obstack. */
170 #include "alloc-pool.h"
171 #include "tree-pass.h"
172 #include "stringpool.h"
173 #include "gimple-ssa.h"
174 #include "cgraph.h"
175 #include "coverage.h"
176 #include "lto-streamer.h"
177 #include "fold-const.h"
178 #include "varasm.h"
179 #include "stor-layout.h"
180 #include "output.h"
181 #include "cfgcleanup.h"
182 #include "gimple-fold.h"
183 #include "gimplify.h"
184 #include "gimple-iterator.h"
185 #include "gimplify-me.h"
186 #include "tree-cfg.h"
187 #include "tree-into-ssa.h"
188 #include "tree-ssa.h"
189 #include "langhooks.h"
190 #include "toplev.h"
191 #include "debug.h"
192 #include "symbol-summary.h"
193 #include "ipa-prop.h"
194 #include "gimple-pretty-print.h"
195 #include "plugin.h"
196 #include "ipa-inline.h"
197 #include "ipa-utils.h"
198 #include "except.h"
199 #include "cfgloop.h"
200 #include "context.h"
201 #include "pass_manager.h"
202 #include "tree-nested.h"
203 #include "dbgcnt.h"
204 #include "tree-chkp.h"
205 #include "lto-section-names.h"
206
207 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
208 secondary queue used during optimization to accommodate passes that
209 may generate new functions that need to be optimized and expanded. */
210 vec<cgraph_node *> cgraph_new_nodes;
211
212 static void expand_all_functions (void);
213 static void mark_functions_to_output (void);
214 static void handle_alias_pairs (void);
215
216 /* Used for vtable lookup in thunk adjusting. */
217 static GTY (()) tree vtable_entry_type;
218
219 /* Determine if symbol declaration is needed. That is, visible to something
220 either outside this translation unit, something magic in the system
221 configury */
222 bool
223 symtab_node::needed_p (void)
224 {
225 /* Double check that no one output the function into assembly file
226 early. */
227 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
228 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
229
230 if (!definition)
231 return false;
232
233 if (DECL_EXTERNAL (decl))
234 return false;
235
236 /* If the user told us it is used, then it must be so. */
237 if (force_output)
238 return true;
239
240 /* ABI forced symbols are needed when they are external. */
241 if (forced_by_abi && TREE_PUBLIC (decl))
242 return true;
243
244 /* Keep constructors, destructors and virtual functions. */
245 if (TREE_CODE (decl) == FUNCTION_DECL
246 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
247 return true;
248
249 /* Externally visible variables must be output. The exception is
250 COMDAT variables that must be output only when they are needed. */
251 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
252 return true;
253
254 return false;
255 }
256
257 /* Head and terminator of the queue of nodes to be processed while building
258 callgraph. */
259
260 static symtab_node symtab_terminator;
261 static symtab_node *queued_nodes = &symtab_terminator;
262
263 /* Add NODE to queue starting at QUEUED_NODES.
264 The queue is linked via AUX pointers and terminated by pointer to 1. */
265
266 static void
267 enqueue_node (symtab_node *node)
268 {
269 if (node->aux)
270 return;
271 gcc_checking_assert (queued_nodes);
272 node->aux = queued_nodes;
273 queued_nodes = node;
274 }
275
276 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
277 functions into callgraph in a way so they look like ordinary reachable
278 functions inserted into callgraph already at construction time. */
279
280 void
281 symbol_table::process_new_functions (void)
282 {
283 tree fndecl;
284
285 if (!cgraph_new_nodes.exists ())
286 return;
287
288 handle_alias_pairs ();
289 /* Note that this queue may grow as its being processed, as the new
290 functions may generate new ones. */
291 for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
292 {
293 cgraph_node *node = cgraph_new_nodes[i];
294 fndecl = node->decl;
295 switch (state)
296 {
297 case CONSTRUCTION:
298 /* At construction time we just need to finalize function and move
299 it into reachable functions list. */
300
301 cgraph_node::finalize_function (fndecl, false);
302 call_cgraph_insertion_hooks (node);
303 enqueue_node (node);
304 break;
305
306 case IPA:
307 case IPA_SSA:
308 case IPA_SSA_AFTER_INLINING:
309 /* When IPA optimization already started, do all essential
310 transformations that has been already performed on the whole
311 cgraph but not on this function. */
312
313 gimple_register_cfg_hooks ();
314 if (!node->analyzed)
315 node->analyze ();
316 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
317 if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
318 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
319 g->get_passes ()->execute_early_local_passes ();
320 else if (inline_summaries != NULL)
321 compute_inline_parameters (node, true);
322 free_dominance_info (CDI_POST_DOMINATORS);
323 free_dominance_info (CDI_DOMINATORS);
324 pop_cfun ();
325 call_cgraph_insertion_hooks (node);
326 break;
327
328 case EXPANSION:
329 /* Functions created during expansion shall be compiled
330 directly. */
331 node->process = 0;
332 call_cgraph_insertion_hooks (node);
333 node->expand ();
334 break;
335
336 default:
337 gcc_unreachable ();
338 break;
339 }
340 }
341
342 cgraph_new_nodes.release ();
343 }
344
345 /* As an GCC extension we allow redefinition of the function. The
346 semantics when both copies of bodies differ is not well defined.
347 We replace the old body with new body so in unit at a time mode
348 we always use new body, while in normal mode we may end up with
349 old body inlined into some functions and new body expanded and
350 inlined in others.
351
352 ??? It may make more sense to use one body for inlining and other
353 body for expanding the function but this is difficult to do. */
354
355 void
356 cgraph_node::reset (void)
357 {
358 /* If process is set, then we have already begun whole-unit analysis.
359 This is *not* testing for whether we've already emitted the function.
360 That case can be sort-of legitimately seen with real function redefinition
361 errors. I would argue that the front end should never present us with
362 such a case, but don't enforce that for now. */
363 gcc_assert (!process);
364
365 /* Reset our data structures so we can analyze the function again. */
366 memset (&local, 0, sizeof (local));
367 memset (&global, 0, sizeof (global));
368 memset (&rtl, 0, sizeof (rtl));
369 analyzed = false;
370 definition = false;
371 alias = false;
372 transparent_alias = false;
373 weakref = false;
374 cpp_implicit_alias = false;
375
376 remove_callees ();
377 remove_all_references ();
378 }
379
380 /* Return true when there are references to the node. INCLUDE_SELF is
381 true if a self reference counts as a reference. */
382
383 bool
384 symtab_node::referred_to_p (bool include_self)
385 {
386 ipa_ref *ref = NULL;
387
388 /* See if there are any references at all. */
389 if (iterate_referring (0, ref))
390 return true;
391 /* For functions check also calls. */
392 cgraph_node *cn = dyn_cast <cgraph_node *> (this);
393 if (cn && cn->callers)
394 {
395 if (include_self)
396 return true;
397 for (cgraph_edge *e = cn->callers; e; e = e->next_caller)
398 if (e->caller != this)
399 return true;
400 }
401 return false;
402 }
403
404 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
405 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
406 the garbage collector run at the moment. We would need to either create
407 a new GC context, or just not compile right now. */
408
409 void
410 cgraph_node::finalize_function (tree decl, bool no_collect)
411 {
412 cgraph_node *node = cgraph_node::get_create (decl);
413
414 if (node->definition)
415 {
416 /* Nested functions should only be defined once. */
417 gcc_assert (!DECL_CONTEXT (decl)
418 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
419 node->reset ();
420 node->local.redefined_extern_inline = true;
421 }
422
423 /* Set definition first before calling notice_global_symbol so that
424 it is available to notice_global_symbol. */
425 node->definition = true;
426 notice_global_symbol (decl);
427 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
428
429 /* With -fkeep-inline-functions we are keeping all inline functions except
430 for extern inline ones. */
431 if (flag_keep_inline_functions
432 && DECL_DECLARED_INLINE_P (decl)
433 && !DECL_EXTERNAL (decl)
434 && !DECL_DISREGARD_INLINE_LIMITS (decl))
435 node->force_output = 1;
436
437 /* When not optimizing, also output the static functions. (see
438 PR24561), but don't do so for always_inline functions, functions
439 declared inline and nested functions. These were optimized out
440 in the original implementation and it is unclear whether we want
441 to change the behavior here. */
442 if (((!opt_for_fn (decl, optimize) || flag_keep_static_functions)
443 && !node->cpp_implicit_alias
444 && !DECL_DISREGARD_INLINE_LIMITS (decl)
445 && !DECL_DECLARED_INLINE_P (decl)
446 && !(DECL_CONTEXT (decl)
447 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
448 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
449 node->force_output = 1;
450
451 /* If we've not yet emitted decl, tell the debug info about it. */
452 if (!TREE_ASM_WRITTEN (decl))
453 (*debug_hooks->deferred_inline_function) (decl);
454
455 if (!no_collect)
456 ggc_collect ();
457
458 if (symtab->state == CONSTRUCTION
459 && (node->needed_p () || node->referred_to_p ()))
460 enqueue_node (node);
461 }
462
463 /* Add the function FNDECL to the call graph.
464 Unlike finalize_function, this function is intended to be used
465 by middle end and allows insertion of new function at arbitrary point
466 of compilation. The function can be either in high, low or SSA form
467 GIMPLE.
468
469 The function is assumed to be reachable and have address taken (so no
470 API breaking optimizations are performed on it).
471
472 Main work done by this function is to enqueue the function for later
473 processing to avoid need the passes to be re-entrant. */
474
475 void
476 cgraph_node::add_new_function (tree fndecl, bool lowered)
477 {
478 gcc::pass_manager *passes = g->get_passes ();
479 cgraph_node *node;
480
481 if (dump_file)
482 {
483 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
484 const char *function_type = ((gimple_has_body_p (fndecl))
485 ? (lowered
486 ? (gimple_in_ssa_p (fn)
487 ? "ssa gimple"
488 : "low gimple")
489 : "high gimple")
490 : "to-be-gimplified");
491 fprintf (dump_file,
492 "Added new %s function %s to callgraph\n",
493 function_type,
494 fndecl_name (fndecl));
495 }
496
497 switch (symtab->state)
498 {
499 case PARSING:
500 cgraph_node::finalize_function (fndecl, false);
501 break;
502 case CONSTRUCTION:
503 /* Just enqueue function to be processed at nearest occurrence. */
504 node = cgraph_node::get_create (fndecl);
505 if (lowered)
506 node->lowered = true;
507 cgraph_new_nodes.safe_push (node);
508 break;
509
510 case IPA:
511 case IPA_SSA:
512 case IPA_SSA_AFTER_INLINING:
513 case EXPANSION:
514 /* Bring the function into finalized state and enqueue for later
515 analyzing and compilation. */
516 node = cgraph_node::get_create (fndecl);
517 node->local.local = false;
518 node->definition = true;
519 node->force_output = true;
520 if (!lowered && symtab->state == EXPANSION)
521 {
522 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
523 gimple_register_cfg_hooks ();
524 bitmap_obstack_initialize (NULL);
525 execute_pass_list (cfun, passes->all_lowering_passes);
526 passes->execute_early_local_passes ();
527 bitmap_obstack_release (NULL);
528 pop_cfun ();
529
530 lowered = true;
531 }
532 if (lowered)
533 node->lowered = true;
534 cgraph_new_nodes.safe_push (node);
535 break;
536
537 case FINISHED:
538 /* At the very end of compilation we have to do all the work up
539 to expansion. */
540 node = cgraph_node::create (fndecl);
541 if (lowered)
542 node->lowered = true;
543 node->definition = true;
544 node->analyze ();
545 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
546 gimple_register_cfg_hooks ();
547 bitmap_obstack_initialize (NULL);
548 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
549 g->get_passes ()->execute_early_local_passes ();
550 bitmap_obstack_release (NULL);
551 pop_cfun ();
552 node->expand ();
553 break;
554
555 default:
556 gcc_unreachable ();
557 }
558
559 /* Set a personality if required and we already passed EH lowering. */
560 if (lowered
561 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
562 == eh_personality_lang))
563 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
564 }
565
566 /* Analyze the function scheduled to be output. */
567 void
568 cgraph_node::analyze (void)
569 {
570 tree decl = this->decl;
571 location_t saved_loc = input_location;
572 input_location = DECL_SOURCE_LOCATION (decl);
573
574 if (thunk.thunk_p)
575 {
576 cgraph_node *t = cgraph_node::get (thunk.alias);
577
578 create_edge (t, NULL, 0, CGRAPH_FREQ_BASE);
579 callees->can_throw_external = !TREE_NOTHROW (t->decl);
580 /* Target code in expand_thunk may need the thunk's target
581 to be analyzed, so recurse here. */
582 if (!t->analyzed)
583 t->analyze ();
584 if (t->alias)
585 {
586 t = t->get_alias_target ();
587 if (!t->analyzed)
588 t->analyze ();
589 }
590 if (!expand_thunk (false, false))
591 {
592 thunk.alias = NULL;
593 return;
594 }
595 thunk.alias = NULL;
596 }
597 if (alias)
598 resolve_alias (cgraph_node::get (alias_target), transparent_alias);
599 else if (dispatcher_function)
600 {
601 /* Generate the dispatcher body of multi-versioned functions. */
602 cgraph_function_version_info *dispatcher_version_info
603 = function_version ();
604 if (dispatcher_version_info != NULL
605 && (dispatcher_version_info->dispatcher_resolver
606 == NULL_TREE))
607 {
608 tree resolver = NULL_TREE;
609 gcc_assert (targetm.generate_version_dispatcher_body);
610 resolver = targetm.generate_version_dispatcher_body (this);
611 gcc_assert (resolver != NULL_TREE);
612 }
613 }
614 else
615 {
616 push_cfun (DECL_STRUCT_FUNCTION (decl));
617
618 assign_assembler_name_if_neeeded (decl);
619
620 /* Make sure to gimplify bodies only once. During analyzing a
621 function we lower it, which will require gimplified nested
622 functions, so we can end up here with an already gimplified
623 body. */
624 if (!gimple_has_body_p (decl))
625 gimplify_function_tree (decl);
626
627 /* Lower the function. */
628 if (!lowered)
629 {
630 if (nested)
631 lower_nested_functions (decl);
632 gcc_assert (!nested);
633
634 gimple_register_cfg_hooks ();
635 bitmap_obstack_initialize (NULL);
636 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
637 free_dominance_info (CDI_POST_DOMINATORS);
638 free_dominance_info (CDI_DOMINATORS);
639 compact_blocks ();
640 bitmap_obstack_release (NULL);
641 lowered = true;
642 }
643
644 pop_cfun ();
645 }
646 analyzed = true;
647
648 input_location = saved_loc;
649 }
650
651 /* C++ frontend produce same body aliases all over the place, even before PCH
652 gets streamed out. It relies on us linking the aliases with their function
653 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
654 first produce aliases without links, but once C++ FE is sure he won't sream
655 PCH we build the links via this function. */
656
657 void
658 symbol_table::process_same_body_aliases (void)
659 {
660 symtab_node *node;
661 FOR_EACH_SYMBOL (node)
662 if (node->cpp_implicit_alias && !node->analyzed)
663 node->resolve_alias
664 (TREE_CODE (node->alias_target) == VAR_DECL
665 ? (symtab_node *)varpool_node::get_create (node->alias_target)
666 : (symtab_node *)cgraph_node::get_create (node->alias_target));
667 cpp_implicit_aliases_done = true;
668 }
669
670 /* Process attributes common for vars and functions. */
671
672 static void
673 process_common_attributes (symtab_node *node, tree decl)
674 {
675 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
676
677 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
678 {
679 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
680 "%<weakref%> attribute should be accompanied with"
681 " an %<alias%> attribute");
682 DECL_WEAK (decl) = 0;
683 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
684 DECL_ATTRIBUTES (decl));
685 }
686
687 if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
688 node->no_reorder = 1;
689 }
690
691 /* Look for externally_visible and used attributes and mark cgraph nodes
692 accordingly.
693
694 We cannot mark the nodes at the point the attributes are processed (in
695 handle_*_attribute) because the copy of the declarations available at that
696 point may not be canonical. For example, in:
697
698 void f();
699 void f() __attribute__((used));
700
701 the declaration we see in handle_used_attribute will be the second
702 declaration -- but the front end will subsequently merge that declaration
703 with the original declaration and discard the second declaration.
704
705 Furthermore, we can't mark these nodes in finalize_function because:
706
707 void f() {}
708 void f() __attribute__((externally_visible));
709
710 is valid.
711
712 So, we walk the nodes at the end of the translation unit, applying the
713 attributes at that point. */
714
715 static void
716 process_function_and_variable_attributes (cgraph_node *first,
717 varpool_node *first_var)
718 {
719 cgraph_node *node;
720 varpool_node *vnode;
721
722 for (node = symtab->first_function (); node != first;
723 node = symtab->next_function (node))
724 {
725 tree decl = node->decl;
726 if (DECL_PRESERVE_P (decl))
727 node->mark_force_output ();
728 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
729 {
730 if (! TREE_PUBLIC (node->decl))
731 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
732 "%<externally_visible%>"
733 " attribute have effect only on public objects");
734 }
735 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
736 && (node->definition && !node->alias))
737 {
738 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
739 "%<weakref%> attribute ignored"
740 " because function is defined");
741 DECL_WEAK (decl) = 0;
742 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
743 DECL_ATTRIBUTES (decl));
744 }
745
746 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
747 && !DECL_DECLARED_INLINE_P (decl)
748 /* redefining extern inline function makes it DECL_UNINLINABLE. */
749 && !DECL_UNINLINABLE (decl))
750 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
751 "always_inline function might not be inlinable");
752
753 process_common_attributes (node, decl);
754 }
755 for (vnode = symtab->first_variable (); vnode != first_var;
756 vnode = symtab->next_variable (vnode))
757 {
758 tree decl = vnode->decl;
759 if (DECL_EXTERNAL (decl)
760 && DECL_INITIAL (decl))
761 varpool_node::finalize_decl (decl);
762 if (DECL_PRESERVE_P (decl))
763 vnode->force_output = true;
764 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
765 {
766 if (! TREE_PUBLIC (vnode->decl))
767 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
768 "%<externally_visible%>"
769 " attribute have effect only on public objects");
770 }
771 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
772 && vnode->definition
773 && DECL_INITIAL (decl))
774 {
775 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
776 "%<weakref%> attribute ignored"
777 " because variable is initialized");
778 DECL_WEAK (decl) = 0;
779 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
780 DECL_ATTRIBUTES (decl));
781 }
782 process_common_attributes (vnode, decl);
783 }
784 }
785
786 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
787 middle end to output the variable to asm file, if needed or externally
788 visible. */
789
790 void
791 varpool_node::finalize_decl (tree decl)
792 {
793 varpool_node *node = varpool_node::get_create (decl);
794
795 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
796
797 if (node->definition)
798 return;
799 /* Set definition first before calling notice_global_symbol so that
800 it is available to notice_global_symbol. */
801 node->definition = true;
802 notice_global_symbol (decl);
803 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
804 /* Traditionally we do not eliminate static variables when not
805 optimizing and when not doing toplevel reoder. */
806 || node->no_reorder
807 || ((!flag_toplevel_reorder
808 && !DECL_COMDAT (node->decl)
809 && !DECL_ARTIFICIAL (node->decl))))
810 node->force_output = true;
811
812 if (symtab->state == CONSTRUCTION
813 && (node->needed_p () || node->referred_to_p ()))
814 enqueue_node (node);
815 if (symtab->state >= IPA_SSA)
816 node->analyze ();
817 /* Some frontends produce various interface variables after compilation
818 finished. */
819 if (symtab->state == FINISHED
820 || (!flag_toplevel_reorder
821 && symtab->state == EXPANSION))
822 node->assemble_decl ();
823
824 if (DECL_INITIAL (decl))
825 chkp_register_var_initializer (decl);
826 }
827
828 /* EDGE is an polymorphic call. Mark all possible targets as reachable
829 and if there is only one target, perform trivial devirtualization.
830 REACHABLE_CALL_TARGETS collects target lists we already walked to
831 avoid udplicate work. */
832
833 static void
834 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
835 cgraph_edge *edge)
836 {
837 unsigned int i;
838 void *cache_token;
839 bool final;
840 vec <cgraph_node *>targets
841 = possible_polymorphic_call_targets
842 (edge, &final, &cache_token);
843
844 if (!reachable_call_targets->add (cache_token))
845 {
846 if (symtab->dump_file)
847 dump_possible_polymorphic_call_targets
848 (symtab->dump_file, edge);
849
850 for (i = 0; i < targets.length (); i++)
851 {
852 /* Do not bother to mark virtual methods in anonymous namespace;
853 either we will find use of virtual table defining it, or it is
854 unused. */
855 if (targets[i]->definition
856 && TREE_CODE
857 (TREE_TYPE (targets[i]->decl))
858 == METHOD_TYPE
859 && !type_in_anonymous_namespace_p
860 (TYPE_METHOD_BASETYPE (TREE_TYPE (targets[i]->decl))))
861 enqueue_node (targets[i]);
862 }
863 }
864
865 /* Very trivial devirtualization; when the type is
866 final or anonymous (so we know all its derivation)
867 and there is only one possible virtual call target,
868 make the edge direct. */
869 if (final)
870 {
871 if (targets.length () <= 1 && dbg_cnt (devirt))
872 {
873 cgraph_node *target;
874 if (targets.length () == 1)
875 target = targets[0];
876 else
877 target = cgraph_node::create
878 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
879
880 if (symtab->dump_file)
881 {
882 fprintf (symtab->dump_file,
883 "Devirtualizing call: ");
884 print_gimple_stmt (symtab->dump_file,
885 edge->call_stmt, 0,
886 TDF_SLIM);
887 }
888 if (dump_enabled_p ())
889 {
890 location_t locus = gimple_location_safe (edge->call_stmt);
891 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
892 "devirtualizing call in %s to %s\n",
893 edge->caller->name (), target->name ());
894 }
895
896 edge->make_direct (target);
897 edge->redirect_call_stmt_to_callee ();
898
899 /* Call to __builtin_unreachable shouldn't be instrumented. */
900 if (!targets.length ())
901 gimple_call_set_with_bounds (edge->call_stmt, false);
902
903 if (symtab->dump_file)
904 {
905 fprintf (symtab->dump_file,
906 "Devirtualized as: ");
907 print_gimple_stmt (symtab->dump_file,
908 edge->call_stmt, 0,
909 TDF_SLIM);
910 }
911 }
912 }
913 }
914
915 /* Issue appropriate warnings for the global declaration DECL. */
916
917 static void
918 check_global_declaration (symtab_node *snode)
919 {
920 tree decl = snode->decl;
921
922 /* Warn about any function declared static but not defined. We don't
923 warn about variables, because many programs have static variables
924 that exist only to get some text into the object file. */
925 if (TREE_CODE (decl) == FUNCTION_DECL
926 && DECL_INITIAL (decl) == 0
927 && DECL_EXTERNAL (decl)
928 && ! DECL_ARTIFICIAL (decl)
929 && ! TREE_NO_WARNING (decl)
930 && ! TREE_PUBLIC (decl)
931 && (warn_unused_function
932 || snode->referred_to_p (/*include_self=*/false)))
933 {
934 if (snode->referred_to_p (/*include_self=*/false))
935 pedwarn (input_location, 0, "%q+F used but never defined", decl);
936 else
937 warning (OPT_Wunused_function, "%q+F declared %<static%> but never defined", decl);
938 /* This symbol is effectively an "extern" declaration now. */
939 TREE_PUBLIC (decl) = 1;
940 }
941
942 /* Warn about static fns or vars defined but not used. */
943 if (((warn_unused_function && TREE_CODE (decl) == FUNCTION_DECL)
944 || (((warn_unused_variable && ! TREE_READONLY (decl))
945 || (warn_unused_const_variable > 0 && TREE_READONLY (decl)
946 && (warn_unused_const_variable == 2
947 || filename_cmp (main_input_filename,
948 DECL_SOURCE_FILE (decl)) == 0)))
949 && TREE_CODE (decl) == VAR_DECL))
950 && ! DECL_IN_SYSTEM_HEADER (decl)
951 && ! snode->referred_to_p (/*include_self=*/false)
952 /* This TREE_USED check is needed in addition to referred_to_p
953 above, because the `__unused__' attribute is not being
954 considered for referred_to_p. */
955 && ! TREE_USED (decl)
956 /* The TREE_USED bit for file-scope decls is kept in the identifier,
957 to handle multiple external decls in different scopes. */
958 && ! (DECL_NAME (decl) && TREE_USED (DECL_NAME (decl)))
959 && ! DECL_EXTERNAL (decl)
960 && ! DECL_ARTIFICIAL (decl)
961 && ! DECL_ABSTRACT_ORIGIN (decl)
962 && ! TREE_PUBLIC (decl)
963 /* A volatile variable might be used in some non-obvious way. */
964 && (! VAR_P (decl) || ! TREE_THIS_VOLATILE (decl))
965 /* Global register variables must be declared to reserve them. */
966 && ! (TREE_CODE (decl) == VAR_DECL && DECL_REGISTER (decl))
967 /* Global ctors and dtors are called by the runtime. */
968 && (TREE_CODE (decl) != FUNCTION_DECL
969 || (!DECL_STATIC_CONSTRUCTOR (decl)
970 && !DECL_STATIC_DESTRUCTOR (decl)))
971 /* Otherwise, ask the language. */
972 && lang_hooks.decls.warn_unused_global (decl))
973 warning_at (DECL_SOURCE_LOCATION (decl),
974 (TREE_CODE (decl) == FUNCTION_DECL)
975 ? OPT_Wunused_function
976 : (TREE_READONLY (decl)
977 ? OPT_Wunused_const_variable_
978 : OPT_Wunused_variable),
979 "%qD defined but not used", decl);
980 }
981
982 /* Discover all functions and variables that are trivially needed, analyze
983 them as well as all functions and variables referred by them */
984 static cgraph_node *first_analyzed;
985 static varpool_node *first_analyzed_var;
986
987 /* FIRST_TIME is set to TRUE for the first time we are called for a
988 translation unit from finalize_compilation_unit() or false
989 otherwise. */
990
991 static void
992 analyze_functions (bool first_time)
993 {
994 /* Keep track of already processed nodes when called multiple times for
995 intermodule optimization. */
996 cgraph_node *first_handled = first_analyzed;
997 varpool_node *first_handled_var = first_analyzed_var;
998 hash_set<void *> reachable_call_targets;
999
1000 symtab_node *node;
1001 symtab_node *next;
1002 int i;
1003 ipa_ref *ref;
1004 bool changed = true;
1005 location_t saved_loc = input_location;
1006
1007 bitmap_obstack_initialize (NULL);
1008 symtab->state = CONSTRUCTION;
1009 input_location = UNKNOWN_LOCATION;
1010
1011 /* Ugly, but the fixup can not happen at a time same body alias is created;
1012 C++ FE is confused about the COMDAT groups being right. */
1013 if (symtab->cpp_implicit_aliases_done)
1014 FOR_EACH_SYMBOL (node)
1015 if (node->cpp_implicit_alias)
1016 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
1017 build_type_inheritance_graph ();
1018
1019 /* Analysis adds static variables that in turn adds references to new functions.
1020 So we need to iterate the process until it stabilize. */
1021 while (changed)
1022 {
1023 changed = false;
1024 process_function_and_variable_attributes (first_analyzed,
1025 first_analyzed_var);
1026
1027 /* First identify the trivially needed symbols. */
1028 for (node = symtab->first_symbol ();
1029 node != first_analyzed
1030 && node != first_analyzed_var; node = node->next)
1031 {
1032 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
1033 node->get_comdat_group_id ();
1034 if (node->needed_p ())
1035 {
1036 enqueue_node (node);
1037 if (!changed && symtab->dump_file)
1038 fprintf (symtab->dump_file, "Trivially needed symbols:");
1039 changed = true;
1040 if (symtab->dump_file)
1041 fprintf (symtab->dump_file, " %s", node->asm_name ());
1042 if (!changed && symtab->dump_file)
1043 fprintf (symtab->dump_file, "\n");
1044 }
1045 if (node == first_analyzed
1046 || node == first_analyzed_var)
1047 break;
1048 }
1049 symtab->process_new_functions ();
1050 first_analyzed_var = symtab->first_variable ();
1051 first_analyzed = symtab->first_function ();
1052
1053 if (changed && symtab->dump_file)
1054 fprintf (symtab->dump_file, "\n");
1055
1056 /* Lower representation, build callgraph edges and references for all trivially
1057 needed symbols and all symbols referred by them. */
1058 while (queued_nodes != &symtab_terminator)
1059 {
1060 changed = true;
1061 node = queued_nodes;
1062 queued_nodes = (symtab_node *)queued_nodes->aux;
1063 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1064 if (cnode && cnode->definition)
1065 {
1066 cgraph_edge *edge;
1067 tree decl = cnode->decl;
1068
1069 /* ??? It is possible to create extern inline function
1070 and later using weak alias attribute to kill its body.
1071 See gcc.c-torture/compile/20011119-1.c */
1072 if (!DECL_STRUCT_FUNCTION (decl)
1073 && !cnode->alias
1074 && !cnode->thunk.thunk_p
1075 && !cnode->dispatcher_function)
1076 {
1077 cnode->reset ();
1078 cnode->local.redefined_extern_inline = true;
1079 continue;
1080 }
1081
1082 if (!cnode->analyzed)
1083 cnode->analyze ();
1084
1085 for (edge = cnode->callees; edge; edge = edge->next_callee)
1086 if (edge->callee->definition
1087 && (!DECL_EXTERNAL (edge->callee->decl)
1088 /* When not optimizing, do not try to analyze extern
1089 inline functions. Doing so is pointless. */
1090 || opt_for_fn (edge->callee->decl, optimize)
1091 /* Weakrefs needs to be preserved. */
1092 || edge->callee->alias
1093 /* always_inline functions are inlined aven at -O0. */
1094 || lookup_attribute
1095 ("always_inline",
1096 DECL_ATTRIBUTES (edge->callee->decl))
1097 /* Multiversioned functions needs the dispatcher to
1098 be produced locally even for extern functions. */
1099 || edge->callee->function_version ()))
1100 enqueue_node (edge->callee);
1101 if (opt_for_fn (cnode->decl, optimize)
1102 && opt_for_fn (cnode->decl, flag_devirtualize))
1103 {
1104 cgraph_edge *next;
1105
1106 for (edge = cnode->indirect_calls; edge; edge = next)
1107 {
1108 next = edge->next_callee;
1109 if (edge->indirect_info->polymorphic)
1110 walk_polymorphic_call_targets (&reachable_call_targets,
1111 edge);
1112 }
1113 }
1114
1115 /* If decl is a clone of an abstract function,
1116 mark that abstract function so that we don't release its body.
1117 The DECL_INITIAL() of that abstract function declaration
1118 will be later needed to output debug info. */
1119 if (DECL_ABSTRACT_ORIGIN (decl))
1120 {
1121 cgraph_node *origin_node
1122 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1123 origin_node->used_as_abstract_origin = true;
1124 }
1125 }
1126 else
1127 {
1128 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1129 if (vnode && vnode->definition && !vnode->analyzed)
1130 vnode->analyze ();
1131 }
1132
1133 if (node->same_comdat_group)
1134 {
1135 symtab_node *next;
1136 for (next = node->same_comdat_group;
1137 next != node;
1138 next = next->same_comdat_group)
1139 if (!next->comdat_local_p ())
1140 enqueue_node (next);
1141 }
1142 for (i = 0; node->iterate_reference (i, ref); i++)
1143 if (ref->referred->definition
1144 && (!DECL_EXTERNAL (ref->referred->decl)
1145 || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
1146 && optimize)
1147 || (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
1148 && opt_for_fn (ref->referred->decl, optimize))
1149 || node->alias
1150 || ref->referred->alias)))
1151 enqueue_node (ref->referred);
1152 symtab->process_new_functions ();
1153 }
1154 }
1155 update_type_inheritance_graph ();
1156
1157 /* Collect entry points to the unit. */
1158 if (symtab->dump_file)
1159 {
1160 fprintf (symtab->dump_file, "\n\nInitial ");
1161 symtab_node::dump_table (symtab->dump_file);
1162 }
1163
1164 if (first_time)
1165 {
1166 symtab_node *snode;
1167 FOR_EACH_SYMBOL (snode)
1168 check_global_declaration (snode);
1169 }
1170
1171 if (symtab->dump_file)
1172 fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1173
1174 for (node = symtab->first_symbol ();
1175 node != first_handled
1176 && node != first_handled_var; node = next)
1177 {
1178 next = node->next;
1179 if (!node->aux && !node->referred_to_p ())
1180 {
1181 if (symtab->dump_file)
1182 fprintf (symtab->dump_file, " %s", node->name ());
1183
1184 /* See if the debugger can use anything before the DECL
1185 passes away. Perhaps it can notice a DECL that is now a
1186 constant and can tag the early DIE with an appropriate
1187 attribute.
1188
1189 Otherwise, this is the last chance the debug_hooks have
1190 at looking at optimized away DECLs, since
1191 late_global_decl will subsequently be called from the
1192 contents of the now pruned symbol table. */
1193 if (!decl_function_context (node->decl))
1194 (*debug_hooks->late_global_decl) (node->decl);
1195
1196 node->remove ();
1197 continue;
1198 }
1199 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1200 {
1201 tree decl = node->decl;
1202
1203 if (cnode->definition && !gimple_has_body_p (decl)
1204 && !cnode->alias
1205 && !cnode->thunk.thunk_p)
1206 cnode->reset ();
1207
1208 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1209 || cnode->alias
1210 || gimple_has_body_p (decl));
1211 gcc_assert (cnode->analyzed == cnode->definition);
1212 }
1213 node->aux = NULL;
1214 }
1215 for (;node; node = node->next)
1216 node->aux = NULL;
1217 first_analyzed = symtab->first_function ();
1218 first_analyzed_var = symtab->first_variable ();
1219 if (symtab->dump_file)
1220 {
1221 fprintf (symtab->dump_file, "\n\nReclaimed ");
1222 symtab_node::dump_table (symtab->dump_file);
1223 }
1224 bitmap_obstack_release (NULL);
1225 ggc_collect ();
1226 /* Initialize assembler name hash, in particular we want to trigger C++
1227 mangling and same body alias creation before we free DECL_ARGUMENTS
1228 used by it. */
1229 if (!seen_error ())
1230 symtab->symtab_initialize_asm_name_hash ();
1231
1232 input_location = saved_loc;
1233 }
1234
1235 /* Translate the ugly representation of aliases as alias pairs into nice
1236 representation in callgraph. We don't handle all cases yet,
1237 unfortunately. */
1238
1239 static void
1240 handle_alias_pairs (void)
1241 {
1242 alias_pair *p;
1243 unsigned i;
1244
1245 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1246 {
1247 symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1248
1249 /* Weakrefs with target not defined in current unit are easy to handle:
1250 they behave just as external variables except we need to note the
1251 alias flag to later output the weakref pseudo op into asm file. */
1252 if (!target_node
1253 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1254 {
1255 symtab_node *node = symtab_node::get (p->decl);
1256 if (node)
1257 {
1258 node->alias_target = p->target;
1259 node->weakref = true;
1260 node->alias = true;
1261 node->transparent_alias = true;
1262 }
1263 alias_pairs->unordered_remove (i);
1264 continue;
1265 }
1266 else if (!target_node)
1267 {
1268 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1269 symtab_node *node = symtab_node::get (p->decl);
1270 if (node)
1271 node->alias = false;
1272 alias_pairs->unordered_remove (i);
1273 continue;
1274 }
1275
1276 if (DECL_EXTERNAL (target_node->decl)
1277 /* We use local aliases for C++ thunks to force the tailcall
1278 to bind locally. This is a hack - to keep it working do
1279 the following (which is not strictly correct). */
1280 && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1281 || ! DECL_VIRTUAL_P (target_node->decl))
1282 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1283 {
1284 error ("%q+D aliased to external symbol %qE",
1285 p->decl, p->target);
1286 }
1287
1288 if (TREE_CODE (p->decl) == FUNCTION_DECL
1289 && target_node && is_a <cgraph_node *> (target_node))
1290 {
1291 cgraph_node *src_node = cgraph_node::get (p->decl);
1292 if (src_node && src_node->definition)
1293 src_node->reset ();
1294 cgraph_node::create_alias (p->decl, target_node->decl);
1295 alias_pairs->unordered_remove (i);
1296 }
1297 else if (TREE_CODE (p->decl) == VAR_DECL
1298 && target_node && is_a <varpool_node *> (target_node))
1299 {
1300 varpool_node::create_alias (p->decl, target_node->decl);
1301 alias_pairs->unordered_remove (i);
1302 }
1303 else
1304 {
1305 error ("%q+D alias in between function and variable is not supported",
1306 p->decl);
1307 warning (0, "%q+D aliased declaration",
1308 target_node->decl);
1309 alias_pairs->unordered_remove (i);
1310 }
1311 }
1312 vec_free (alias_pairs);
1313 }
1314
1315
1316 /* Figure out what functions we want to assemble. */
1317
1318 static void
1319 mark_functions_to_output (void)
1320 {
1321 bool check_same_comdat_groups = false;
1322 cgraph_node *node;
1323
1324 if (flag_checking)
1325 FOR_EACH_FUNCTION (node)
1326 gcc_assert (!node->process);
1327
1328 FOR_EACH_FUNCTION (node)
1329 {
1330 tree decl = node->decl;
1331
1332 gcc_assert (!node->process || node->same_comdat_group);
1333 if (node->process)
1334 continue;
1335
1336 /* We need to output all local functions that are used and not
1337 always inlined, as well as those that are reachable from
1338 outside the current compilation unit. */
1339 if (node->analyzed
1340 && !node->thunk.thunk_p
1341 && !node->alias
1342 && !node->global.inlined_to
1343 && !TREE_ASM_WRITTEN (decl)
1344 && !DECL_EXTERNAL (decl))
1345 {
1346 node->process = 1;
1347 if (node->same_comdat_group)
1348 {
1349 cgraph_node *next;
1350 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1351 next != node;
1352 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1353 if (!next->thunk.thunk_p && !next->alias
1354 && !next->comdat_local_p ())
1355 next->process = 1;
1356 }
1357 }
1358 else if (node->same_comdat_group)
1359 {
1360 if (flag_checking)
1361 check_same_comdat_groups = true;
1362 }
1363 else
1364 {
1365 /* We should've reclaimed all functions that are not needed. */
1366 if (flag_checking
1367 && !node->global.inlined_to
1368 && gimple_has_body_p (decl)
1369 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1370 are inside partition, we can end up not removing the body since we no longer
1371 have analyzed node pointing to it. */
1372 && !node->in_other_partition
1373 && !node->alias
1374 && !node->clones
1375 && !DECL_EXTERNAL (decl))
1376 {
1377 node->debug ();
1378 internal_error ("failed to reclaim unneeded function");
1379 }
1380 gcc_assert (node->global.inlined_to
1381 || !gimple_has_body_p (decl)
1382 || node->in_other_partition
1383 || node->clones
1384 || DECL_ARTIFICIAL (decl)
1385 || DECL_EXTERNAL (decl));
1386
1387 }
1388
1389 }
1390 if (flag_checking && check_same_comdat_groups)
1391 FOR_EACH_FUNCTION (node)
1392 if (node->same_comdat_group && !node->process)
1393 {
1394 tree decl = node->decl;
1395 if (!node->global.inlined_to
1396 && gimple_has_body_p (decl)
1397 /* FIXME: in an ltrans unit when the offline copy is outside a
1398 partition but inline copies are inside a partition, we can
1399 end up not removing the body since we no longer have an
1400 analyzed node pointing to it. */
1401 && !node->in_other_partition
1402 && !node->clones
1403 && !DECL_EXTERNAL (decl))
1404 {
1405 node->debug ();
1406 internal_error ("failed to reclaim unneeded function in same "
1407 "comdat group");
1408 }
1409 }
1410 }
1411
1412 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1413 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1414
1415 Set current_function_decl and cfun to newly constructed empty function body.
1416 return basic block in the function body. */
1417
1418 basic_block
1419 init_lowered_empty_function (tree decl, bool in_ssa, gcov_type count)
1420 {
1421 basic_block bb;
1422 edge e;
1423
1424 current_function_decl = decl;
1425 allocate_struct_function (decl, false);
1426 gimple_register_cfg_hooks ();
1427 init_empty_tree_cfg ();
1428
1429 if (in_ssa)
1430 {
1431 init_tree_ssa (cfun);
1432 init_ssa_operands (cfun);
1433 cfun->gimple_df->in_ssa_p = true;
1434 cfun->curr_properties |= PROP_ssa;
1435 }
1436
1437 DECL_INITIAL (decl) = make_node (BLOCK);
1438
1439 DECL_SAVED_TREE (decl) = error_mark_node;
1440 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1441 | PROP_cfg | PROP_loops);
1442
1443 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1444 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1445 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1446
1447 /* Create BB for body of the function and connect it properly. */
1448 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = count;
1449 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = REG_BR_PROB_BASE;
1450 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = count;
1451 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency = REG_BR_PROB_BASE;
1452 bb = create_basic_block (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1453 bb->count = count;
1454 bb->frequency = BB_FREQ_MAX;
1455 e = make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1456 e->count = count;
1457 e->probability = REG_BR_PROB_BASE;
1458 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1459 e->count = count;
1460 e->probability = REG_BR_PROB_BASE;
1461 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1462
1463 return bb;
1464 }
1465
1466 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1467 offset indicated by VIRTUAL_OFFSET, if that is
1468 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1469 zero for a result adjusting thunk. */
1470
1471 static tree
1472 thunk_adjust (gimple_stmt_iterator * bsi,
1473 tree ptr, bool this_adjusting,
1474 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1475 {
1476 gassign *stmt;
1477 tree ret;
1478
1479 if (this_adjusting
1480 && fixed_offset != 0)
1481 {
1482 stmt = gimple_build_assign
1483 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1484 ptr,
1485 fixed_offset));
1486 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1487 }
1488
1489 /* If there's a virtual offset, look up that value in the vtable and
1490 adjust the pointer again. */
1491 if (virtual_offset)
1492 {
1493 tree vtabletmp;
1494 tree vtabletmp2;
1495 tree vtabletmp3;
1496
1497 if (!vtable_entry_type)
1498 {
1499 tree vfunc_type = make_node (FUNCTION_TYPE);
1500 TREE_TYPE (vfunc_type) = integer_type_node;
1501 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1502 layout_type (vfunc_type);
1503
1504 vtable_entry_type = build_pointer_type (vfunc_type);
1505 }
1506
1507 vtabletmp =
1508 create_tmp_reg (build_pointer_type
1509 (build_pointer_type (vtable_entry_type)), "vptr");
1510
1511 /* The vptr is always at offset zero in the object. */
1512 stmt = gimple_build_assign (vtabletmp,
1513 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1514 ptr));
1515 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1516
1517 /* Form the vtable address. */
1518 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1519 "vtableaddr");
1520 stmt = gimple_build_assign (vtabletmp2,
1521 build_simple_mem_ref (vtabletmp));
1522 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1523
1524 /* Find the entry with the vcall offset. */
1525 stmt = gimple_build_assign (vtabletmp2,
1526 fold_build_pointer_plus_loc (input_location,
1527 vtabletmp2,
1528 virtual_offset));
1529 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1530
1531 /* Get the offset itself. */
1532 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1533 "vcalloffset");
1534 stmt = gimple_build_assign (vtabletmp3,
1535 build_simple_mem_ref (vtabletmp2));
1536 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1537
1538 /* Adjust the `this' pointer. */
1539 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1540 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1541 GSI_CONTINUE_LINKING);
1542 }
1543
1544 if (!this_adjusting
1545 && fixed_offset != 0)
1546 /* Adjust the pointer by the constant. */
1547 {
1548 tree ptrtmp;
1549
1550 if (TREE_CODE (ptr) == VAR_DECL)
1551 ptrtmp = ptr;
1552 else
1553 {
1554 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1555 stmt = gimple_build_assign (ptrtmp, ptr);
1556 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1557 }
1558 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1559 ptrtmp, fixed_offset);
1560 }
1561
1562 /* Emit the statement and gimplify the adjustment expression. */
1563 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1564 stmt = gimple_build_assign (ret, ptr);
1565 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1566
1567 return ret;
1568 }
1569
1570 /* Expand thunk NODE to gimple if possible.
1571 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1572 no assembler is produced.
1573 When OUTPUT_ASM_THUNK is true, also produce assembler for
1574 thunks that are not lowered. */
1575
1576 bool
1577 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1578 {
1579 bool this_adjusting = thunk.this_adjusting;
1580 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1581 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1582 tree virtual_offset = NULL;
1583 tree alias = callees->callee->decl;
1584 tree thunk_fndecl = decl;
1585 tree a;
1586
1587 /* Instrumentation thunk is the same function with
1588 a different signature. Never need to expand it. */
1589 if (thunk.add_pointer_bounds_args)
1590 return false;
1591
1592 if (!force_gimple_thunk && this_adjusting
1593 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1594 virtual_value, alias))
1595 {
1596 const char *fnname;
1597 tree fn_block;
1598 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1599
1600 if (!output_asm_thunks)
1601 {
1602 analyzed = true;
1603 return false;
1604 }
1605
1606 if (in_lto_p)
1607 get_untransformed_body ();
1608 a = DECL_ARGUMENTS (thunk_fndecl);
1609
1610 current_function_decl = thunk_fndecl;
1611
1612 /* Ensure thunks are emitted in their correct sections. */
1613 resolve_unique_section (thunk_fndecl, 0,
1614 flag_function_sections);
1615
1616 DECL_RESULT (thunk_fndecl)
1617 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1618 RESULT_DECL, 0, restype);
1619 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1620 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1621
1622 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1623 create one. */
1624 fn_block = make_node (BLOCK);
1625 BLOCK_VARS (fn_block) = a;
1626 DECL_INITIAL (thunk_fndecl) = fn_block;
1627 allocate_struct_function (thunk_fndecl, false);
1628 init_function_start (thunk_fndecl);
1629 cfun->is_thunk = 1;
1630 insn_locations_init ();
1631 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1632 prologue_location = curr_insn_location ();
1633 assemble_start_function (thunk_fndecl, fnname);
1634
1635 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1636 fixed_offset, virtual_value, alias);
1637
1638 assemble_end_function (thunk_fndecl, fnname);
1639 insn_locations_finalize ();
1640 init_insn_lengths ();
1641 free_after_compilation (cfun);
1642 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1643 thunk.thunk_p = false;
1644 analyzed = false;
1645 }
1646 else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
1647 {
1648 error ("generic thunk code fails for method %qD which uses %<...%>",
1649 thunk_fndecl);
1650 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1651 analyzed = true;
1652 return false;
1653 }
1654 else
1655 {
1656 tree restype;
1657 basic_block bb, then_bb, else_bb, return_bb;
1658 gimple_stmt_iterator bsi;
1659 int nargs = 0;
1660 tree arg;
1661 int i;
1662 tree resdecl;
1663 tree restmp = NULL;
1664 tree resbnd = NULL;
1665
1666 gcall *call;
1667 greturn *ret;
1668 bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
1669
1670 /* We may be called from expand_thunk that releses body except for
1671 DECL_ARGUMENTS. In this case force_gimple_thunk is true. */
1672 if (in_lto_p && !force_gimple_thunk)
1673 get_untransformed_body ();
1674 a = DECL_ARGUMENTS (thunk_fndecl);
1675
1676 current_function_decl = thunk_fndecl;
1677
1678 /* Ensure thunks are emitted in their correct sections. */
1679 resolve_unique_section (thunk_fndecl, 0,
1680 flag_function_sections);
1681
1682 DECL_IGNORED_P (thunk_fndecl) = 1;
1683 bitmap_obstack_initialize (NULL);
1684
1685 if (thunk.virtual_offset_p)
1686 virtual_offset = size_int (virtual_value);
1687
1688 /* Build the return declaration for the function. */
1689 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1690 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1691 {
1692 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1693 DECL_ARTIFICIAL (resdecl) = 1;
1694 DECL_IGNORED_P (resdecl) = 1;
1695 DECL_RESULT (thunk_fndecl) = resdecl;
1696 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1697 }
1698 else
1699 resdecl = DECL_RESULT (thunk_fndecl);
1700
1701 bb = then_bb = else_bb = return_bb
1702 = init_lowered_empty_function (thunk_fndecl, true, count);
1703
1704 bsi = gsi_start_bb (bb);
1705
1706 /* Build call to the function being thunked. */
1707 if (!VOID_TYPE_P (restype)
1708 && (!alias_is_noreturn || TREE_ADDRESSABLE (restype)))
1709 {
1710 if (DECL_BY_REFERENCE (resdecl))
1711 {
1712 restmp = gimple_fold_indirect_ref (resdecl);
1713 if (!restmp)
1714 restmp = build2 (MEM_REF,
1715 TREE_TYPE (TREE_TYPE (DECL_RESULT (alias))),
1716 resdecl,
1717 build_int_cst (TREE_TYPE
1718 (DECL_RESULT (alias)), 0));
1719 }
1720 else if (!is_gimple_reg_type (restype))
1721 {
1722 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
1723 {
1724 restmp = resdecl;
1725
1726 if (TREE_CODE (restmp) == VAR_DECL)
1727 add_local_decl (cfun, restmp);
1728 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1729 }
1730 else
1731 restmp = create_tmp_var (restype, "retval");
1732 }
1733 else
1734 restmp = create_tmp_reg (restype, "retval");
1735 }
1736
1737 for (arg = a; arg; arg = DECL_CHAIN (arg))
1738 nargs++;
1739 auto_vec<tree> vargs (nargs);
1740 i = 0;
1741 arg = a;
1742 if (this_adjusting)
1743 {
1744 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1745 virtual_offset));
1746 arg = DECL_CHAIN (a);
1747 i = 1;
1748 }
1749
1750 if (nargs)
1751 for (; i < nargs; i++, arg = DECL_CHAIN (arg))
1752 {
1753 tree tmp = arg;
1754 if (!is_gimple_val (arg))
1755 {
1756 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1757 (TREE_TYPE (arg)), "arg");
1758 gimple *stmt = gimple_build_assign (tmp, arg);
1759 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1760 }
1761 vargs.quick_push (tmp);
1762 }
1763 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1764 callees->call_stmt = call;
1765 gimple_call_set_from_thunk (call, true);
1766 gimple_call_set_with_bounds (call, instrumentation_clone);
1767
1768 /* Return slot optimization is always possible and in fact requred to
1769 return values with DECL_BY_REFERENCE. */
1770 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
1771 && (!is_gimple_reg_type (TREE_TYPE (resdecl))
1772 || DECL_BY_REFERENCE (resdecl)))
1773 gimple_call_set_return_slot_opt (call, true);
1774
1775 if (restmp)
1776 {
1777 gimple_call_set_lhs (call, restmp);
1778 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1779 TREE_TYPE (TREE_TYPE (alias))));
1780 }
1781 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1782 if (!alias_is_noreturn)
1783 {
1784 if (instrumentation_clone
1785 && !DECL_BY_REFERENCE (resdecl)
1786 && restmp
1787 && BOUNDED_P (restmp))
1788 {
1789 resbnd = chkp_insert_retbnd_call (NULL, restmp, &bsi);
1790 create_edge (get_create (gimple_call_fndecl (gsi_stmt (bsi))),
1791 as_a <gcall *> (gsi_stmt (bsi)),
1792 callees->count, callees->frequency);
1793 }
1794
1795 if (restmp && !this_adjusting
1796 && (fixed_offset || virtual_offset))
1797 {
1798 tree true_label = NULL_TREE;
1799
1800 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1801 {
1802 gimple *stmt;
1803 edge e;
1804 /* If the return type is a pointer, we need to
1805 protect against NULL. We know there will be an
1806 adjustment, because that's why we're emitting a
1807 thunk. */
1808 then_bb = create_basic_block (NULL, bb);
1809 then_bb->count = count - count / 16;
1810 then_bb->frequency = BB_FREQ_MAX - BB_FREQ_MAX / 16;
1811 return_bb = create_basic_block (NULL, then_bb);
1812 return_bb->count = count;
1813 return_bb->frequency = BB_FREQ_MAX;
1814 else_bb = create_basic_block (NULL, else_bb);
1815 then_bb->count = count / 16;
1816 then_bb->frequency = BB_FREQ_MAX / 16;
1817 add_bb_to_loop (then_bb, bb->loop_father);
1818 add_bb_to_loop (return_bb, bb->loop_father);
1819 add_bb_to_loop (else_bb, bb->loop_father);
1820 remove_edge (single_succ_edge (bb));
1821 true_label = gimple_block_label (then_bb);
1822 stmt = gimple_build_cond (NE_EXPR, restmp,
1823 build_zero_cst (TREE_TYPE (restmp)),
1824 NULL_TREE, NULL_TREE);
1825 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1826 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1827 e->probability = REG_BR_PROB_BASE - REG_BR_PROB_BASE / 16;
1828 e->count = count - count / 16;
1829 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1830 e->probability = REG_BR_PROB_BASE / 16;
1831 e->count = count / 16;
1832 e = make_edge (return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1833 e->probability = REG_BR_PROB_BASE;
1834 e->count = count;
1835 e = make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1836 e->probability = REG_BR_PROB_BASE;
1837 e->count = count - count / 16;
1838 e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1839 e->probability = REG_BR_PROB_BASE;
1840 e->count = count / 16;
1841 bsi = gsi_last_bb (then_bb);
1842 }
1843
1844 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1845 fixed_offset, virtual_offset);
1846 if (true_label)
1847 {
1848 gimple *stmt;
1849 bsi = gsi_last_bb (else_bb);
1850 stmt = gimple_build_assign (restmp,
1851 build_zero_cst (TREE_TYPE (restmp)));
1852 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1853 bsi = gsi_last_bb (return_bb);
1854 }
1855 }
1856 else
1857 gimple_call_set_tail (call, true);
1858
1859 /* Build return value. */
1860 if (!DECL_BY_REFERENCE (resdecl))
1861 ret = gimple_build_return (restmp);
1862 else
1863 ret = gimple_build_return (resdecl);
1864 gimple_return_set_retbnd (ret, resbnd);
1865
1866 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1867 }
1868 else
1869 {
1870 gimple_call_set_tail (call, true);
1871 remove_edge (single_succ_edge (bb));
1872 }
1873
1874 cfun->gimple_df->in_ssa_p = true;
1875 profile_status_for_fn (cfun)
1876 = count ? PROFILE_READ : PROFILE_GUESSED;
1877 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
1878 TREE_ASM_WRITTEN (thunk_fndecl) = false;
1879 delete_unreachable_blocks ();
1880 update_ssa (TODO_update_ssa);
1881 checking_verify_flow_info ();
1882 free_dominance_info (CDI_DOMINATORS);
1883
1884 /* Since we want to emit the thunk, we explicitly mark its name as
1885 referenced. */
1886 thunk.thunk_p = false;
1887 lowered = true;
1888 bitmap_obstack_release (NULL);
1889 }
1890 current_function_decl = NULL;
1891 set_cfun (NULL);
1892 return true;
1893 }
1894
1895 /* Assemble thunks and aliases associated to node. */
1896
1897 void
1898 cgraph_node::assemble_thunks_and_aliases (void)
1899 {
1900 cgraph_edge *e;
1901 ipa_ref *ref;
1902
1903 for (e = callers; e;)
1904 if (e->caller->thunk.thunk_p
1905 && !e->caller->thunk.add_pointer_bounds_args)
1906 {
1907 cgraph_node *thunk = e->caller;
1908
1909 e = e->next_caller;
1910 thunk->expand_thunk (true, false);
1911 thunk->assemble_thunks_and_aliases ();
1912 }
1913 else
1914 e = e->next_caller;
1915
1916 FOR_EACH_ALIAS (this, ref)
1917 {
1918 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
1919 if (!alias->transparent_alias)
1920 {
1921 bool saved_written = TREE_ASM_WRITTEN (decl);
1922
1923 /* Force assemble_alias to really output the alias this time instead
1924 of buffering it in same alias pairs. */
1925 TREE_ASM_WRITTEN (decl) = 1;
1926 do_assemble_alias (alias->decl,
1927 DECL_ASSEMBLER_NAME (decl));
1928 alias->assemble_thunks_and_aliases ();
1929 TREE_ASM_WRITTEN (decl) = saved_written;
1930 }
1931 }
1932 }
1933
1934 /* Expand function specified by node. */
1935
1936 void
1937 cgraph_node::expand (void)
1938 {
1939 location_t saved_loc;
1940
1941 /* We ought to not compile any inline clones. */
1942 gcc_assert (!global.inlined_to);
1943
1944 announce_function (decl);
1945 process = 0;
1946 gcc_assert (lowered);
1947 get_untransformed_body ();
1948
1949 /* Generate RTL for the body of DECL. */
1950
1951 timevar_push (TV_REST_OF_COMPILATION);
1952
1953 gcc_assert (symtab->global_info_ready);
1954
1955 /* Initialize the default bitmap obstack. */
1956 bitmap_obstack_initialize (NULL);
1957
1958 /* Initialize the RTL code for the function. */
1959 saved_loc = input_location;
1960 input_location = DECL_SOURCE_LOCATION (decl);
1961
1962 gcc_assert (DECL_STRUCT_FUNCTION (decl));
1963 push_cfun (DECL_STRUCT_FUNCTION (decl));
1964 init_function_start (decl);
1965
1966 gimple_register_cfg_hooks ();
1967
1968 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1969
1970 execute_all_ipa_transforms ();
1971
1972 /* Perform all tree transforms and optimizations. */
1973
1974 /* Signal the start of passes. */
1975 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1976
1977 execute_pass_list (cfun, g->get_passes ()->all_passes);
1978
1979 /* Signal the end of passes. */
1980 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1981
1982 bitmap_obstack_release (&reg_obstack);
1983
1984 /* Release the default bitmap obstack. */
1985 bitmap_obstack_release (NULL);
1986
1987 /* If requested, warn about function definitions where the function will
1988 return a value (usually of some struct or union type) which itself will
1989 take up a lot of stack space. */
1990 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1991 {
1992 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1993
1994 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1995 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1996 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1997 larger_than_size))
1998 {
1999 unsigned int size_as_int
2000 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
2001
2002 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
2003 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
2004 decl, size_as_int);
2005 else
2006 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
2007 decl, larger_than_size);
2008 }
2009 }
2010
2011 gimple_set_body (decl, NULL);
2012 if (DECL_STRUCT_FUNCTION (decl) == 0
2013 && !cgraph_node::get (decl)->origin)
2014 {
2015 /* Stop pointing to the local nodes about to be freed.
2016 But DECL_INITIAL must remain nonzero so we know this
2017 was an actual function definition.
2018 For a nested function, this is done in c_pop_function_context.
2019 If rest_of_compilation set this to 0, leave it 0. */
2020 if (DECL_INITIAL (decl) != 0)
2021 DECL_INITIAL (decl) = error_mark_node;
2022 }
2023
2024 input_location = saved_loc;
2025
2026 ggc_collect ();
2027 timevar_pop (TV_REST_OF_COMPILATION);
2028
2029 /* Make sure that BE didn't give up on compiling. */
2030 gcc_assert (TREE_ASM_WRITTEN (decl));
2031 if (cfun)
2032 pop_cfun ();
2033
2034 /* It would make a lot more sense to output thunks before function body to get more
2035 forward and lest backwarding jumps. This however would need solving problem
2036 with comdats. See PR48668. Also aliases must come after function itself to
2037 make one pass assemblers, like one on AIX, happy. See PR 50689.
2038 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
2039 groups. */
2040 assemble_thunks_and_aliases ();
2041 release_body ();
2042 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
2043 points to the dead function body. */
2044 remove_callees ();
2045 remove_all_references ();
2046 }
2047
2048 /* Node comparer that is responsible for the order that corresponds
2049 to time when a function was launched for the first time. */
2050
2051 static int
2052 node_cmp (const void *pa, const void *pb)
2053 {
2054 const cgraph_node *a = *(const cgraph_node * const *) pa;
2055 const cgraph_node *b = *(const cgraph_node * const *) pb;
2056
2057 /* Functions with time profile must be before these without profile. */
2058 if (!a->tp_first_run || !b->tp_first_run)
2059 return a->tp_first_run - b->tp_first_run;
2060
2061 return a->tp_first_run != b->tp_first_run
2062 ? b->tp_first_run - a->tp_first_run
2063 : b->order - a->order;
2064 }
2065
2066 /* Expand all functions that must be output.
2067
2068 Attempt to topologically sort the nodes so function is output when
2069 all called functions are already assembled to allow data to be
2070 propagated across the callgraph. Use a stack to get smaller distance
2071 between a function and its callees (later we may choose to use a more
2072 sophisticated algorithm for function reordering; we will likely want
2073 to use subsections to make the output functions appear in top-down
2074 order). */
2075
2076 static void
2077 expand_all_functions (void)
2078 {
2079 cgraph_node *node;
2080 cgraph_node **order = XCNEWVEC (cgraph_node *,
2081 symtab->cgraph_count);
2082 unsigned int expanded_func_count = 0, profiled_func_count = 0;
2083 int order_pos, new_order_pos = 0;
2084 int i;
2085
2086 order_pos = ipa_reverse_postorder (order);
2087 gcc_assert (order_pos == symtab->cgraph_count);
2088
2089 /* Garbage collector may remove inline clones we eliminate during
2090 optimization. So we must be sure to not reference them. */
2091 for (i = 0; i < order_pos; i++)
2092 if (order[i]->process)
2093 order[new_order_pos++] = order[i];
2094
2095 if (flag_profile_reorder_functions)
2096 qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
2097
2098 for (i = new_order_pos - 1; i >= 0; i--)
2099 {
2100 node = order[i];
2101
2102 if (node->process)
2103 {
2104 expanded_func_count++;
2105 if(node->tp_first_run)
2106 profiled_func_count++;
2107
2108 if (symtab->dump_file)
2109 fprintf (symtab->dump_file,
2110 "Time profile order in expand_all_functions:%s:%d\n",
2111 node->asm_name (), node->tp_first_run);
2112 node->process = 0;
2113 node->expand ();
2114 }
2115 }
2116
2117 if (dump_file)
2118 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
2119 main_input_filename, profiled_func_count, expanded_func_count);
2120
2121 if (symtab->dump_file && flag_profile_reorder_functions)
2122 fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
2123 profiled_func_count, expanded_func_count);
2124
2125 symtab->process_new_functions ();
2126 free_gimplify_stack ();
2127
2128 free (order);
2129 }
2130
2131 /* This is used to sort the node types by the cgraph order number. */
2132
2133 enum cgraph_order_sort_kind
2134 {
2135 ORDER_UNDEFINED = 0,
2136 ORDER_FUNCTION,
2137 ORDER_VAR,
2138 ORDER_ASM
2139 };
2140
2141 struct cgraph_order_sort
2142 {
2143 enum cgraph_order_sort_kind kind;
2144 union
2145 {
2146 cgraph_node *f;
2147 varpool_node *v;
2148 asm_node *a;
2149 } u;
2150 };
2151
2152 /* Output all functions, variables, and asm statements in the order
2153 according to their order fields, which is the order in which they
2154 appeared in the file. This implements -fno-toplevel-reorder. In
2155 this mode we may output functions and variables which don't really
2156 need to be output.
2157 When NO_REORDER is true only do this for symbols marked no reorder. */
2158
2159 static void
2160 output_in_order (bool no_reorder)
2161 {
2162 int max;
2163 cgraph_order_sort *nodes;
2164 int i;
2165 cgraph_node *pf;
2166 varpool_node *pv;
2167 asm_node *pa;
2168 max = symtab->order;
2169 nodes = XCNEWVEC (cgraph_order_sort, max);
2170
2171 FOR_EACH_DEFINED_FUNCTION (pf)
2172 {
2173 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
2174 {
2175 if (no_reorder && !pf->no_reorder)
2176 continue;
2177 i = pf->order;
2178 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2179 nodes[i].kind = ORDER_FUNCTION;
2180 nodes[i].u.f = pf;
2181 }
2182 }
2183
2184 FOR_EACH_DEFINED_VARIABLE (pv)
2185 if (!DECL_EXTERNAL (pv->decl))
2186 {
2187 if (no_reorder && !pv->no_reorder)
2188 continue;
2189 i = pv->order;
2190 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2191 nodes[i].kind = ORDER_VAR;
2192 nodes[i].u.v = pv;
2193 }
2194
2195 for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
2196 {
2197 i = pa->order;
2198 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2199 nodes[i].kind = ORDER_ASM;
2200 nodes[i].u.a = pa;
2201 }
2202
2203 /* In toplevel reorder mode we output all statics; mark them as needed. */
2204
2205 for (i = 0; i < max; ++i)
2206 if (nodes[i].kind == ORDER_VAR)
2207 nodes[i].u.v->finalize_named_section_flags ();
2208
2209 for (i = 0; i < max; ++i)
2210 {
2211 switch (nodes[i].kind)
2212 {
2213 case ORDER_FUNCTION:
2214 nodes[i].u.f->process = 0;
2215 nodes[i].u.f->expand ();
2216 break;
2217
2218 case ORDER_VAR:
2219 #ifdef ACCEL_COMPILER
2220 /* Do not assemble "omp declare target link" vars. */
2221 if (DECL_HAS_VALUE_EXPR_P (nodes[i].u.v->decl)
2222 && lookup_attribute ("omp declare target link",
2223 DECL_ATTRIBUTES (nodes[i].u.v->decl)))
2224 break;
2225 #endif
2226 nodes[i].u.v->assemble_decl ();
2227 break;
2228
2229 case ORDER_ASM:
2230 assemble_asm (nodes[i].u.a->asm_str);
2231 break;
2232
2233 case ORDER_UNDEFINED:
2234 break;
2235
2236 default:
2237 gcc_unreachable ();
2238 }
2239 }
2240
2241 symtab->clear_asm_symbols ();
2242
2243 free (nodes);
2244 }
2245
2246 static void
2247 ipa_passes (void)
2248 {
2249 gcc::pass_manager *passes = g->get_passes ();
2250
2251 set_cfun (NULL);
2252 current_function_decl = NULL;
2253 gimple_register_cfg_hooks ();
2254 bitmap_obstack_initialize (NULL);
2255
2256 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2257
2258 if (!in_lto_p)
2259 {
2260 execute_ipa_pass_list (passes->all_small_ipa_passes);
2261 if (seen_error ())
2262 return;
2263 }
2264
2265 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2266 devirtualization and other changes where removal iterate. */
2267 symtab->remove_unreachable_nodes (symtab->dump_file);
2268
2269 /* If pass_all_early_optimizations was not scheduled, the state of
2270 the cgraph will not be properly updated. Update it now. */
2271 if (symtab->state < IPA_SSA)
2272 symtab->state = IPA_SSA;
2273
2274 if (!in_lto_p)
2275 {
2276 /* Generate coverage variables and constructors. */
2277 coverage_finish ();
2278
2279 /* Process new functions added. */
2280 set_cfun (NULL);
2281 current_function_decl = NULL;
2282 symtab->process_new_functions ();
2283
2284 execute_ipa_summary_passes
2285 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2286 }
2287
2288 /* Some targets need to handle LTO assembler output specially. */
2289 if (flag_generate_lto || flag_generate_offload)
2290 targetm.asm_out.lto_start ();
2291
2292 if (!in_lto_p)
2293 {
2294 if (g->have_offload)
2295 {
2296 section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2297 lto_stream_offload_p = true;
2298 ipa_write_summaries ();
2299 lto_stream_offload_p = false;
2300 }
2301 if (flag_lto)
2302 {
2303 section_name_prefix = LTO_SECTION_NAME_PREFIX;
2304 lto_stream_offload_p = false;
2305 ipa_write_summaries ();
2306 }
2307 }
2308
2309 if (flag_generate_lto || flag_generate_offload)
2310 targetm.asm_out.lto_end ();
2311
2312 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2313 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2314 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2315
2316 bitmap_obstack_release (NULL);
2317 }
2318
2319
2320 /* Return string alias is alias of. */
2321
2322 static tree
2323 get_alias_symbol (tree decl)
2324 {
2325 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2326 return get_identifier (TREE_STRING_POINTER
2327 (TREE_VALUE (TREE_VALUE (alias))));
2328 }
2329
2330
2331 /* Weakrefs may be associated to external decls and thus not output
2332 at expansion time. Emit all necessary aliases. */
2333
2334 void
2335 symbol_table::output_weakrefs (void)
2336 {
2337 symtab_node *node;
2338 cgraph_node *cnode;
2339 FOR_EACH_SYMBOL (node)
2340 if (node->alias
2341 && !TREE_ASM_WRITTEN (node->decl)
2342 && (!(cnode = dyn_cast <cgraph_node *> (node))
2343 || !cnode->instrumented_version
2344 || !TREE_ASM_WRITTEN (cnode->instrumented_version->decl))
2345 && node->weakref)
2346 {
2347 tree target;
2348
2349 /* Weakrefs are special by not requiring target definition in current
2350 compilation unit. It is thus bit hard to work out what we want to
2351 alias.
2352 When alias target is defined, we need to fetch it from symtab reference,
2353 otherwise it is pointed to by alias_target. */
2354 if (node->alias_target)
2355 target = (DECL_P (node->alias_target)
2356 ? DECL_ASSEMBLER_NAME (node->alias_target)
2357 : node->alias_target);
2358 else if (node->analyzed)
2359 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2360 else
2361 {
2362 gcc_unreachable ();
2363 target = get_alias_symbol (node->decl);
2364 }
2365 do_assemble_alias (node->decl, target);
2366 }
2367 }
2368
2369 /* Perform simple optimizations based on callgraph. */
2370
2371 void
2372 symbol_table::compile (void)
2373 {
2374 if (seen_error ())
2375 return;
2376
2377 symtab_node::checking_verify_symtab_nodes ();
2378
2379 timevar_push (TV_CGRAPHOPT);
2380 if (pre_ipa_mem_report)
2381 {
2382 fprintf (stderr, "Memory consumption before IPA\n");
2383 dump_memory_report (false);
2384 }
2385 if (!quiet_flag)
2386 fprintf (stderr, "Performing interprocedural optimizations\n");
2387 state = IPA;
2388
2389 /* Offloading requires LTO infrastructure. */
2390 if (!in_lto_p && g->have_offload)
2391 flag_generate_offload = 1;
2392
2393 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2394 if (flag_generate_lto || flag_generate_offload)
2395 lto_streamer_hooks_init ();
2396
2397 /* Don't run the IPA passes if there was any error or sorry messages. */
2398 if (!seen_error ())
2399 ipa_passes ();
2400
2401 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2402 if (seen_error ()
2403 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2404 {
2405 timevar_pop (TV_CGRAPHOPT);
2406 return;
2407 }
2408
2409 global_info_ready = true;
2410 if (dump_file)
2411 {
2412 fprintf (dump_file, "Optimized ");
2413 symtab_node:: dump_table (dump_file);
2414 }
2415 if (post_ipa_mem_report)
2416 {
2417 fprintf (stderr, "Memory consumption after IPA\n");
2418 dump_memory_report (false);
2419 }
2420 timevar_pop (TV_CGRAPHOPT);
2421
2422 /* Output everything. */
2423 (*debug_hooks->assembly_start) ();
2424 if (!quiet_flag)
2425 fprintf (stderr, "Assembling functions:\n");
2426 symtab_node::checking_verify_symtab_nodes ();
2427
2428 materialize_all_clones ();
2429 bitmap_obstack_initialize (NULL);
2430 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2431 bitmap_obstack_release (NULL);
2432 mark_functions_to_output ();
2433
2434 /* When weakref support is missing, we autmatically translate all
2435 references to NODE to references to its ultimate alias target.
2436 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2437 TREE_CHAIN.
2438
2439 Set up this mapping before we output any assembler but once we are sure
2440 that all symbol renaming is done.
2441
2442 FIXME: All this uglyness can go away if we just do renaming at gimple
2443 level by physically rewritting the IL. At the moment we can only redirect
2444 calls, so we need infrastructure for renaming references as well. */
2445 #ifndef ASM_OUTPUT_WEAKREF
2446 symtab_node *node;
2447
2448 FOR_EACH_SYMBOL (node)
2449 if (node->alias
2450 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2451 {
2452 IDENTIFIER_TRANSPARENT_ALIAS
2453 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2454 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2455 = (node->alias_target ? node->alias_target
2456 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2457 }
2458 #endif
2459
2460 state = EXPANSION;
2461
2462 if (!flag_toplevel_reorder)
2463 output_in_order (false);
2464 else
2465 {
2466 /* Output first asm statements and anything ordered. The process
2467 flag is cleared for these nodes, so we skip them later. */
2468 output_in_order (true);
2469 expand_all_functions ();
2470 output_variables ();
2471 }
2472
2473 process_new_functions ();
2474 state = FINISHED;
2475 output_weakrefs ();
2476
2477 if (dump_file)
2478 {
2479 fprintf (dump_file, "\nFinal ");
2480 symtab_node::dump_table (dump_file);
2481 }
2482 if (!flag_checking)
2483 return;
2484 symtab_node::verify_symtab_nodes ();
2485 /* Double check that all inline clones are gone and that all
2486 function bodies have been released from memory. */
2487 if (!seen_error ())
2488 {
2489 cgraph_node *node;
2490 bool error_found = false;
2491
2492 FOR_EACH_DEFINED_FUNCTION (node)
2493 if (node->global.inlined_to
2494 || gimple_has_body_p (node->decl))
2495 {
2496 error_found = true;
2497 node->debug ();
2498 }
2499 if (error_found)
2500 internal_error ("nodes with unreleased memory found");
2501 }
2502 }
2503
2504
2505 /* Analyze the whole compilation unit once it is parsed completely. */
2506
2507 void
2508 symbol_table::finalize_compilation_unit (void)
2509 {
2510 timevar_push (TV_CGRAPH);
2511
2512 /* If we're here there's no current function anymore. Some frontends
2513 are lazy in clearing these. */
2514 current_function_decl = NULL;
2515 set_cfun (NULL);
2516
2517 /* Do not skip analyzing the functions if there were errors, we
2518 miss diagnostics for following functions otherwise. */
2519
2520 /* Emit size functions we didn't inline. */
2521 finalize_size_functions ();
2522
2523 /* Mark alias targets necessary and emit diagnostics. */
2524 handle_alias_pairs ();
2525
2526 if (!quiet_flag)
2527 {
2528 fprintf (stderr, "\nAnalyzing compilation unit\n");
2529 fflush (stderr);
2530 }
2531
2532 if (flag_dump_passes)
2533 dump_passes ();
2534
2535 /* Gimplify and lower all functions, compute reachability and
2536 remove unreachable nodes. */
2537 analyze_functions (/*first_time=*/true);
2538
2539 /* Mark alias targets necessary and emit diagnostics. */
2540 handle_alias_pairs ();
2541
2542 /* Gimplify and lower thunks. */
2543 analyze_functions (/*first_time=*/false);
2544
2545 if (!seen_error ())
2546 {
2547 /* Emit early debug for reachable functions, and by consequence,
2548 locally scoped symbols. */
2549 struct cgraph_node *cnode;
2550 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (cnode)
2551 (*debug_hooks->early_global_decl) (cnode->decl);
2552
2553 /* Clean up anything that needs cleaning up after initial debug
2554 generation. */
2555 (*debug_hooks->early_finish) ();
2556 }
2557
2558 /* Finally drive the pass manager. */
2559 compile ();
2560
2561 timevar_pop (TV_CGRAPH);
2562 }
2563
2564 /* Reset all state within cgraphunit.c so that we can rerun the compiler
2565 within the same process. For use by toplev::finalize. */
2566
2567 void
2568 cgraphunit_c_finalize (void)
2569 {
2570 gcc_assert (cgraph_new_nodes.length () == 0);
2571 cgraph_new_nodes.truncate (0);
2572
2573 vtable_entry_type = NULL;
2574 queued_nodes = &symtab_terminator;
2575
2576 first_analyzed = NULL;
2577 first_analyzed_var = NULL;
2578 }
2579
2580 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2581 kind of wrapper method. */
2582
2583 void
2584 cgraph_node::create_wrapper (cgraph_node *target)
2585 {
2586 /* Preserve DECL_RESULT so we get right by reference flag. */
2587 tree decl_result = DECL_RESULT (decl);
2588
2589 /* Remove the function's body but keep arguments to be reused
2590 for thunk. */
2591 release_body (true);
2592 reset ();
2593
2594 DECL_UNINLINABLE (decl) = false;
2595 DECL_RESULT (decl) = decl_result;
2596 DECL_INITIAL (decl) = NULL;
2597 allocate_struct_function (decl, false);
2598 set_cfun (NULL);
2599
2600 /* Turn alias into thunk and expand it into GIMPLE representation. */
2601 definition = true;
2602
2603 memset (&thunk, 0, sizeof (cgraph_thunk_info));
2604 thunk.thunk_p = true;
2605 create_edge (target, NULL, count, CGRAPH_FREQ_BASE);
2606 callees->can_throw_external = !TREE_NOTHROW (target->decl);
2607
2608 tree arguments = DECL_ARGUMENTS (decl);
2609
2610 while (arguments)
2611 {
2612 TREE_ADDRESSABLE (arguments) = false;
2613 arguments = TREE_CHAIN (arguments);
2614 }
2615
2616 expand_thunk (false, true);
2617
2618 /* Inline summary set-up. */
2619 analyze ();
2620 inline_analyze_function (this);
2621 }
2622
2623 #include "gt-cgraphunit.h"