]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraphunit.c
* lto-symtab.c (lto_cgraph_replace_node): Update.
[thirdparty/gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011, 2012 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline
36 function.)
37
38 - varpool_finalize_variable
39
40 This function has same behavior as the above but is used for static
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
47
48 In the call-graph construction and local function analysis takes
49 place here. Bodies of unreachable functions are released to
50 conserve memory usage.
51
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
64
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
107
108
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "output.h"
115 #include "rtl.h"
116 #include "tree-flow.h"
117 #include "tree-inline.h"
118 #include "langhooks.h"
119 #include "pointer-set.h"
120 #include "toplev.h"
121 #include "flags.h"
122 #include "ggc.h"
123 #include "debug.h"
124 #include "target.h"
125 #include "cgraph.h"
126 #include "diagnostic.h"
127 #include "tree-pretty-print.h"
128 #include "gimple-pretty-print.h"
129 #include "timevar.h"
130 #include "params.h"
131 #include "fibheap.h"
132 #include "intl.h"
133 #include "function.h"
134 #include "ipa-prop.h"
135 #include "gimple.h"
136 #include "tree-iterator.h"
137 #include "tree-pass.h"
138 #include "tree-dump.h"
139 #include "output.h"
140 #include "coverage.h"
141 #include "plugin.h"
142 #include "ipa-inline.h"
143 #include "ipa-utils.h"
144 #include "lto-streamer.h"
145 #include "except.h"
146 #include "regset.h" /* FIXME: For reg_obstack. */
147
148 static void cgraph_expand_all_functions (void);
149 static void cgraph_mark_functions_to_output (void);
150 static void cgraph_expand_function (struct cgraph_node *);
151 static void cgraph_output_pending_asms (void);
152 static void tree_rest_of_compilation (struct cgraph_node *);
153
154 FILE *cgraph_dump_file;
155
156 /* Used for vtable lookup in thunk adjusting. */
157 static GTY (()) tree vtable_entry_type;
158
159 /* Determine if function DECL is needed. That is, visible to something
160 either outside this translation unit, something magic in the system
161 configury. */
162
163 bool
164 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
165 {
166 /* If the user told us it is used, then it must be so. */
167 if (node->symbol.externally_visible)
168 return true;
169
170 /* ??? If the assembler name is set by hand, it is possible to assemble
171 the name later after finalizing the function and the fact is noticed
172 in assemble_name then. This is arguably a bug. */
173 if (DECL_ASSEMBLER_NAME_SET_P (decl)
174 && (!node->thunk.thunk_p && !node->same_body_alias)
175 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
176 return true;
177
178 /* With -fkeep-inline-functions we are keeping all inline functions except
179 for extern inline ones. */
180 if (flag_keep_inline_functions
181 && DECL_DECLARED_INLINE_P (decl)
182 && !DECL_EXTERNAL (decl)
183 && !DECL_DISREGARD_INLINE_LIMITS (decl))
184 return true;
185
186 /* If we decided it was needed before, but at the time we didn't have
187 the body of the function available, then it's still needed. We have
188 to go back and re-check its dependencies now. */
189 if (node->needed)
190 return true;
191
192 /* Externally visible functions must be output. The exception is
193 COMDAT functions that must be output only when they are needed.
194
195 When not optimizing, also output the static functions. (see
196 PR24561), but don't do so for always_inline functions, functions
197 declared inline and nested functions. These were optimized out
198 in the original implementation and it is unclear whether we want
199 to change the behavior here. */
200 if (((TREE_PUBLIC (decl)
201 || (!optimize
202 && !node->same_body_alias
203 && !DECL_DISREGARD_INLINE_LIMITS (decl)
204 && !DECL_DECLARED_INLINE_P (decl)
205 && !(DECL_CONTEXT (decl)
206 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
207 && !flag_whole_program
208 && !flag_lto)
209 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
210 return true;
211
212 return false;
213 }
214
215 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
216 functions into callgraph in a way so they look like ordinary reachable
217 functions inserted into callgraph already at construction time. */
218
219 bool
220 cgraph_process_new_functions (void)
221 {
222 bool output = false;
223 tree fndecl;
224 struct cgraph_node *node;
225
226 varpool_analyze_pending_decls ();
227 /* Note that this queue may grow as its being processed, as the new
228 functions may generate new ones. */
229 while (cgraph_new_nodes)
230 {
231 node = cgraph_new_nodes;
232 fndecl = node->symbol.decl;
233 cgraph_new_nodes = cgraph_new_nodes->next_needed;
234 switch (cgraph_state)
235 {
236 case CGRAPH_STATE_CONSTRUCTION:
237 /* At construction time we just need to finalize function and move
238 it into reachable functions list. */
239
240 node->next_needed = NULL;
241 cgraph_finalize_function (fndecl, false);
242 cgraph_mark_reachable_node (node);
243 output = true;
244 cgraph_call_function_insertion_hooks (node);
245 break;
246
247 case CGRAPH_STATE_IPA:
248 case CGRAPH_STATE_IPA_SSA:
249 /* When IPA optimization already started, do all essential
250 transformations that has been already performed on the whole
251 cgraph but not on this function. */
252
253 gimple_register_cfg_hooks ();
254 if (!node->analyzed)
255 cgraph_analyze_function (node);
256 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
257 current_function_decl = fndecl;
258 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
259 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
260 /* When not optimizing, be sure we run early local passes anyway
261 to expand OMP. */
262 || !optimize)
263 execute_pass_list (pass_early_local_passes.pass.sub);
264 else
265 compute_inline_parameters (node, true);
266 free_dominance_info (CDI_POST_DOMINATORS);
267 free_dominance_info (CDI_DOMINATORS);
268 pop_cfun ();
269 current_function_decl = NULL;
270 cgraph_call_function_insertion_hooks (node);
271 break;
272
273 case CGRAPH_STATE_EXPANSION:
274 /* Functions created during expansion shall be compiled
275 directly. */
276 node->process = 0;
277 cgraph_call_function_insertion_hooks (node);
278 cgraph_expand_function (node);
279 break;
280
281 default:
282 gcc_unreachable ();
283 break;
284 }
285 varpool_analyze_pending_decls ();
286 }
287 return output;
288 }
289
290 /* As an GCC extension we allow redefinition of the function. The
291 semantics when both copies of bodies differ is not well defined.
292 We replace the old body with new body so in unit at a time mode
293 we always use new body, while in normal mode we may end up with
294 old body inlined into some functions and new body expanded and
295 inlined in others.
296
297 ??? It may make more sense to use one body for inlining and other
298 body for expanding the function but this is difficult to do. */
299
300 static void
301 cgraph_reset_node (struct cgraph_node *node)
302 {
303 /* If node->process is set, then we have already begun whole-unit analysis.
304 This is *not* testing for whether we've already emitted the function.
305 That case can be sort-of legitimately seen with real function redefinition
306 errors. I would argue that the front end should never present us with
307 such a case, but don't enforce that for now. */
308 gcc_assert (!node->process);
309
310 /* Reset our data structures so we can analyze the function again. */
311 memset (&node->local, 0, sizeof (node->local));
312 memset (&node->global, 0, sizeof (node->global));
313 memset (&node->rtl, 0, sizeof (node->rtl));
314 node->analyzed = false;
315 node->local.finalized = false;
316
317 cgraph_node_remove_callees (node);
318 }
319
320 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
321 logic in effect. If NESTED is true, then our caller cannot stand to have
322 the garbage collector run at the moment. We would need to either create
323 a new GC context, or just not compile right now. */
324
325 void
326 cgraph_finalize_function (tree decl, bool nested)
327 {
328 struct cgraph_node *node = cgraph_get_create_node (decl);
329
330 if (node->local.finalized)
331 {
332 cgraph_reset_node (node);
333 node->local.redefined_extern_inline = true;
334 }
335
336 notice_global_symbol (decl);
337 node->local.finalized = true;
338 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
339
340 if (cgraph_decide_is_function_needed (node, decl))
341 cgraph_mark_needed_node (node);
342
343 /* Since we reclaim unreachable nodes at the end of every language
344 level unit, we need to be conservative about possible entry points
345 there. */
346 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
347 || DECL_STATIC_CONSTRUCTOR (decl)
348 || DECL_STATIC_DESTRUCTOR (decl)
349 /* COMDAT virtual functions may be referenced by vtable from
350 other compilation unit. Still we want to devirtualize calls
351 to those so we need to analyze them.
352 FIXME: We should introduce may edges for this purpose and update
353 their handling in unreachable function removal and inliner too. */
354 || (DECL_VIRTUAL_P (decl)
355 && optimize && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
356 cgraph_mark_reachable_node (node);
357
358 /* If we've not yet emitted decl, tell the debug info about it. */
359 if (!TREE_ASM_WRITTEN (decl))
360 (*debug_hooks->deferred_inline_function) (decl);
361
362 /* Possibly warn about unused parameters. */
363 if (warn_unused_parameter)
364 do_warn_unused_parameter (decl);
365
366 if (!nested)
367 ggc_collect ();
368 }
369
370 /* Add the function FNDECL to the call graph.
371 Unlike cgraph_finalize_function, this function is intended to be used
372 by middle end and allows insertion of new function at arbitrary point
373 of compilation. The function can be either in high, low or SSA form
374 GIMPLE.
375
376 The function is assumed to be reachable and have address taken (so no
377 API breaking optimizations are performed on it).
378
379 Main work done by this function is to enqueue the function for later
380 processing to avoid need the passes to be re-entrant. */
381
382 void
383 cgraph_add_new_function (tree fndecl, bool lowered)
384 {
385 struct cgraph_node *node;
386 switch (cgraph_state)
387 {
388 case CGRAPH_STATE_CONSTRUCTION:
389 /* Just enqueue function to be processed at nearest occurrence. */
390 node = cgraph_create_node (fndecl);
391 node->next_needed = cgraph_new_nodes;
392 if (lowered)
393 node->lowered = true;
394 cgraph_new_nodes = node;
395 break;
396
397 case CGRAPH_STATE_IPA:
398 case CGRAPH_STATE_IPA_SSA:
399 case CGRAPH_STATE_EXPANSION:
400 /* Bring the function into finalized state and enqueue for later
401 analyzing and compilation. */
402 node = cgraph_get_create_node (fndecl);
403 node->local.local = false;
404 node->local.finalized = true;
405 node->reachable = node->needed = true;
406 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
407 {
408 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
409 current_function_decl = fndecl;
410 gimple_register_cfg_hooks ();
411 bitmap_obstack_initialize (NULL);
412 execute_pass_list (all_lowering_passes);
413 execute_pass_list (pass_early_local_passes.pass.sub);
414 bitmap_obstack_release (NULL);
415 pop_cfun ();
416 current_function_decl = NULL;
417
418 lowered = true;
419 }
420 if (lowered)
421 node->lowered = true;
422 node->next_needed = cgraph_new_nodes;
423 cgraph_new_nodes = node;
424 break;
425
426 case CGRAPH_STATE_FINISHED:
427 /* At the very end of compilation we have to do all the work up
428 to expansion. */
429 node = cgraph_create_node (fndecl);
430 if (lowered)
431 node->lowered = true;
432 cgraph_analyze_function (node);
433 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
434 current_function_decl = fndecl;
435 gimple_register_cfg_hooks ();
436 bitmap_obstack_initialize (NULL);
437 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
438 execute_pass_list (pass_early_local_passes.pass.sub);
439 bitmap_obstack_release (NULL);
440 tree_rest_of_compilation (node);
441 pop_cfun ();
442 current_function_decl = NULL;
443 break;
444
445 default:
446 gcc_unreachable ();
447 }
448
449 /* Set a personality if required and we already passed EH lowering. */
450 if (lowered
451 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
452 == eh_personality_lang))
453 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
454 }
455
456 /* C99 extern inline keywords allow changing of declaration after function
457 has been finalized. We need to re-decide if we want to mark the function as
458 needed then. */
459
460 void
461 cgraph_mark_if_needed (tree decl)
462 {
463 struct cgraph_node *node = cgraph_get_node (decl);
464 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
465 cgraph_mark_needed_node (node);
466 }
467
468 /* Return TRUE if NODE2 is equivalent to NODE or its clone. */
469 static bool
470 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
471 {
472 node = cgraph_function_or_thunk_node (node, NULL);
473 node2 = cgraph_function_or_thunk_node (node2, NULL);
474 while (node != node2 && node2)
475 node2 = node2->clone_of;
476 return node2 != NULL;
477 }
478
479 /* Verify edge E count and frequency. */
480
481 static bool
482 verify_edge_count_and_frequency (struct cgraph_edge *e)
483 {
484 bool error_found = false;
485 if (e->count < 0)
486 {
487 error ("caller edge count is negative");
488 error_found = true;
489 }
490 if (e->frequency < 0)
491 {
492 error ("caller edge frequency is negative");
493 error_found = true;
494 }
495 if (e->frequency > CGRAPH_FREQ_MAX)
496 {
497 error ("caller edge frequency is too large");
498 error_found = true;
499 }
500 if (gimple_has_body_p (e->caller->symbol.decl)
501 && !e->caller->global.inlined_to
502 /* FIXME: Inline-analysis sets frequency to 0 when edge is optimized out.
503 Remove this once edges are actualy removed from the function at that time. */
504 && (e->frequency
505 || (inline_edge_summary_vec
506 && ((VEC_length(inline_edge_summary_t, inline_edge_summary_vec)
507 <= (unsigned) e->uid)
508 || !inline_edge_summary (e)->predicate)))
509 && (e->frequency
510 != compute_call_stmt_bb_frequency (e->caller->symbol.decl,
511 gimple_bb (e->call_stmt))))
512 {
513 error ("caller edge frequency %i does not match BB frequency %i",
514 e->frequency,
515 compute_call_stmt_bb_frequency (e->caller->symbol.decl,
516 gimple_bb (e->call_stmt)));
517 error_found = true;
518 }
519 return error_found;
520 }
521
522 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
523 static void
524 cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt)
525 {
526 /* debug_gimple_stmt needs correct cfun */
527 if (cfun != this_cfun)
528 set_cfun (this_cfun);
529 debug_gimple_stmt (stmt);
530 }
531
532 /* Verify that call graph edge E corresponds to DECL from the associated
533 statement. Return true if the verification should fail. */
534
535 static bool
536 verify_edge_corresponds_to_fndecl (struct cgraph_edge *e, tree decl)
537 {
538 struct cgraph_node *node;
539
540 if (!decl || e->callee->global.inlined_to)
541 return false;
542 node = cgraph_get_node (decl);
543
544 /* We do not know if a node from a different partition is an alias or what it
545 aliases and therefore cannot do the former_clone_of check reliably. */
546 if (!node || node->symbol.in_other_partition)
547 return false;
548 node = cgraph_function_or_thunk_node (node, NULL);
549
550 if ((e->callee->former_clone_of != node->symbol.decl
551 && (!node->same_body_alias
552 || e->callee->former_clone_of != node->thunk.alias))
553 /* IPA-CP sometimes redirect edge to clone and then back to the former
554 function. This ping-pong has to go, eventually. */
555 && (node != cgraph_function_or_thunk_node (e->callee, NULL))
556 && !clone_of_p (node, e->callee)
557 /* If decl is a same body alias of some other decl, allow e->callee to be
558 a clone of a clone of that other decl too. */
559 && (!node->same_body_alias
560 || !clone_of_p (cgraph_get_node (node->thunk.alias), e->callee)))
561 return true;
562 else
563 return false;
564 }
565
566 /* Verify cgraph nodes of given cgraph node. */
567 DEBUG_FUNCTION void
568 verify_cgraph_node (struct cgraph_node *node)
569 {
570 struct cgraph_edge *e;
571 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->symbol.decl);
572 basic_block this_block;
573 gimple_stmt_iterator gsi;
574 bool error_found = false;
575
576 if (seen_error ())
577 return;
578
579 timevar_push (TV_CGRAPH_VERIFY);
580 for (e = node->callees; e; e = e->next_callee)
581 if (e->aux)
582 {
583 error ("aux field set for edge %s->%s",
584 identifier_to_locale (cgraph_node_name (e->caller)),
585 identifier_to_locale (cgraph_node_name (e->callee)));
586 error_found = true;
587 }
588 if (node->count < 0)
589 {
590 error ("execution count is negative");
591 error_found = true;
592 }
593 if (node->global.inlined_to && node->symbol.externally_visible)
594 {
595 error ("externally visible inline clone");
596 error_found = true;
597 }
598 if (node->global.inlined_to && node->symbol.address_taken)
599 {
600 error ("inline clone with address taken");
601 error_found = true;
602 }
603 if (node->global.inlined_to && node->needed)
604 {
605 error ("inline clone is needed");
606 error_found = true;
607 }
608 for (e = node->indirect_calls; e; e = e->next_callee)
609 {
610 if (e->aux)
611 {
612 error ("aux field set for indirect edge from %s",
613 identifier_to_locale (cgraph_node_name (e->caller)));
614 error_found = true;
615 }
616 if (!e->indirect_unknown_callee
617 || !e->indirect_info)
618 {
619 error ("An indirect edge from %s is not marked as indirect or has "
620 "associated indirect_info, the corresponding statement is: ",
621 identifier_to_locale (cgraph_node_name (e->caller)));
622 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
623 error_found = true;
624 }
625 }
626 for (e = node->callers; e; e = e->next_caller)
627 {
628 if (verify_edge_count_and_frequency (e))
629 error_found = true;
630 if (!e->inline_failed)
631 {
632 if (node->global.inlined_to
633 != (e->caller->global.inlined_to
634 ? e->caller->global.inlined_to : e->caller))
635 {
636 error ("inlined_to pointer is wrong");
637 error_found = true;
638 }
639 if (node->callers->next_caller)
640 {
641 error ("multiple inline callers");
642 error_found = true;
643 }
644 }
645 else
646 if (node->global.inlined_to)
647 {
648 error ("inlined_to pointer set for noninline callers");
649 error_found = true;
650 }
651 }
652 for (e = node->indirect_calls; e; e = e->next_callee)
653 if (verify_edge_count_and_frequency (e))
654 error_found = true;
655 if (!node->callers && node->global.inlined_to)
656 {
657 error ("inlined_to pointer is set but no predecessors found");
658 error_found = true;
659 }
660 if (node->global.inlined_to == node)
661 {
662 error ("inlined_to pointer refers to itself");
663 error_found = true;
664 }
665
666 if (!cgraph_get_node (node->symbol.decl))
667 {
668 error ("node not found in cgraph_hash");
669 error_found = true;
670 }
671
672 if (node->clone_of)
673 {
674 struct cgraph_node *n;
675 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
676 if (n == node)
677 break;
678 if (!n)
679 {
680 error ("node has wrong clone_of");
681 error_found = true;
682 }
683 }
684 if (node->clones)
685 {
686 struct cgraph_node *n;
687 for (n = node->clones; n; n = n->next_sibling_clone)
688 if (n->clone_of != node)
689 break;
690 if (n)
691 {
692 error ("node has wrong clone list");
693 error_found = true;
694 }
695 }
696 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
697 {
698 error ("node is in clone list but it is not clone");
699 error_found = true;
700 }
701 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
702 {
703 error ("node has wrong prev_clone pointer");
704 error_found = true;
705 }
706 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
707 {
708 error ("double linked list of clones corrupted");
709 error_found = true;
710 }
711 if (node->symbol.same_comdat_group)
712 {
713 symtab_node n = node->symbol.same_comdat_group;
714
715 if (!DECL_ONE_ONLY (n->symbol.decl))
716 {
717 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
718 error_found = true;
719 }
720 if (n == (symtab_node)node)
721 {
722 error ("node is alone in a comdat group");
723 error_found = true;
724 }
725 do
726 {
727 if (!n->symbol.same_comdat_group)
728 {
729 error ("same_comdat_group is not a circular list");
730 error_found = true;
731 break;
732 }
733 n = n->symbol.same_comdat_group;
734 }
735 while (n != (symtab_node)node);
736 }
737
738 if (node->analyzed && node->alias)
739 {
740 bool ref_found = false;
741 int i;
742 struct ipa_ref *ref;
743
744 if (node->callees)
745 {
746 error ("Alias has call edges");
747 error_found = true;
748 }
749 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list,
750 i, ref); i++)
751 if (ref->use != IPA_REF_ALIAS)
752 {
753 error ("Alias has non-alias reference");
754 error_found = true;
755 }
756 else if (ref_found)
757 {
758 error ("Alias has more than one alias reference");
759 error_found = true;
760 }
761 else
762 ref_found = true;
763 if (!ref_found)
764 {
765 error ("Analyzed alias has no reference");
766 error_found = true;
767 }
768 }
769 if (node->analyzed && node->thunk.thunk_p)
770 {
771 if (!node->callees)
772 {
773 error ("No edge out of thunk node");
774 error_found = true;
775 }
776 else if (node->callees->next_callee)
777 {
778 error ("More than one edge out of thunk node");
779 error_found = true;
780 }
781 if (gimple_has_body_p (node->symbol.decl))
782 {
783 error ("Thunk is not supposed to have body");
784 error_found = true;
785 }
786 }
787 else if (node->analyzed && gimple_has_body_p (node->symbol.decl)
788 && !TREE_ASM_WRITTEN (node->symbol.decl)
789 && (!DECL_EXTERNAL (node->symbol.decl) || node->global.inlined_to)
790 && !flag_wpa)
791 {
792 if (this_cfun->cfg)
793 {
794 /* The nodes we're interested in are never shared, so walk
795 the tree ignoring duplicates. */
796 struct pointer_set_t *visited_nodes = pointer_set_create ();
797 /* Reach the trees by walking over the CFG, and note the
798 enclosing basic-blocks in the call edges. */
799 FOR_EACH_BB_FN (this_block, this_cfun)
800 for (gsi = gsi_start_bb (this_block);
801 !gsi_end_p (gsi);
802 gsi_next (&gsi))
803 {
804 gimple stmt = gsi_stmt (gsi);
805 if (is_gimple_call (stmt))
806 {
807 struct cgraph_edge *e = cgraph_edge (node, stmt);
808 tree decl = gimple_call_fndecl (stmt);
809 if (e)
810 {
811 if (e->aux)
812 {
813 error ("shared call_stmt:");
814 cgraph_debug_gimple_stmt (this_cfun, stmt);
815 error_found = true;
816 }
817 if (!e->indirect_unknown_callee)
818 {
819 if (verify_edge_corresponds_to_fndecl (e, decl))
820 {
821 error ("edge points to wrong declaration:");
822 debug_tree (e->callee->symbol.decl);
823 fprintf (stderr," Instead of:");
824 debug_tree (decl);
825 error_found = true;
826 }
827 }
828 else if (decl)
829 {
830 error ("an indirect edge with unknown callee "
831 "corresponding to a call_stmt with "
832 "a known declaration:");
833 error_found = true;
834 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
835 }
836 e->aux = (void *)1;
837 }
838 else if (decl)
839 {
840 error ("missing callgraph edge for call stmt:");
841 cgraph_debug_gimple_stmt (this_cfun, stmt);
842 error_found = true;
843 }
844 }
845 }
846 pointer_set_destroy (visited_nodes);
847 }
848 else
849 /* No CFG available?! */
850 gcc_unreachable ();
851
852 for (e = node->callees; e; e = e->next_callee)
853 {
854 if (!e->aux)
855 {
856 error ("edge %s->%s has no corresponding call_stmt",
857 identifier_to_locale (cgraph_node_name (e->caller)),
858 identifier_to_locale (cgraph_node_name (e->callee)));
859 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
860 error_found = true;
861 }
862 e->aux = 0;
863 }
864 for (e = node->indirect_calls; e; e = e->next_callee)
865 {
866 if (!e->aux)
867 {
868 error ("an indirect edge from %s has no corresponding call_stmt",
869 identifier_to_locale (cgraph_node_name (e->caller)));
870 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
871 error_found = true;
872 }
873 e->aux = 0;
874 }
875 }
876 if (error_found)
877 {
878 dump_cgraph_node (stderr, node);
879 internal_error ("verify_cgraph_node failed");
880 }
881 timevar_pop (TV_CGRAPH_VERIFY);
882 }
883
884 /* Verify whole cgraph structure. */
885 DEBUG_FUNCTION void
886 verify_cgraph (void)
887 {
888 struct cgraph_node *node;
889
890 if (seen_error ())
891 return;
892
893 FOR_EACH_FUNCTION (node)
894 verify_cgraph_node (node);
895 }
896
897 /* Output all asm statements we have stored up to be output. */
898
899 static void
900 cgraph_output_pending_asms (void)
901 {
902 struct cgraph_asm_node *can;
903
904 if (seen_error ())
905 return;
906
907 for (can = cgraph_asm_nodes; can; can = can->next)
908 assemble_asm (can->asm_str);
909 cgraph_asm_nodes = NULL;
910 }
911
912 /* Analyze the function scheduled to be output. */
913 void
914 cgraph_analyze_function (struct cgraph_node *node)
915 {
916 tree save = current_function_decl;
917 tree decl = node->symbol.decl;
918
919 if (node->alias && node->thunk.alias)
920 {
921 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
922 struct cgraph_node *n;
923
924 for (n = tgt; n && n->alias;
925 n = n->analyzed ? cgraph_alias_aliased_node (n) : NULL)
926 if (n == node)
927 {
928 error ("function %q+D part of alias cycle", node->symbol.decl);
929 node->alias = false;
930 return;
931 }
932 if (!VEC_length (ipa_ref_t, node->symbol.ref_list.references))
933 ipa_record_reference ((symtab_node)node, (symtab_node)tgt,
934 IPA_REF_ALIAS, NULL);
935 if (node->same_body_alias)
936 {
937 DECL_VIRTUAL_P (node->symbol.decl) = DECL_VIRTUAL_P (node->thunk.alias);
938 DECL_DECLARED_INLINE_P (node->symbol.decl)
939 = DECL_DECLARED_INLINE_P (node->thunk.alias);
940 DECL_DISREGARD_INLINE_LIMITS (node->symbol.decl)
941 = DECL_DISREGARD_INLINE_LIMITS (node->thunk.alias);
942 }
943
944 /* Fixup visibility nonsences C++ frontend produce on same body aliases. */
945 if (TREE_PUBLIC (node->symbol.decl) && node->same_body_alias)
946 {
947 DECL_EXTERNAL (node->symbol.decl) = DECL_EXTERNAL (node->thunk.alias);
948 if (DECL_ONE_ONLY (node->thunk.alias))
949 {
950 DECL_COMDAT (node->symbol.decl) = DECL_COMDAT (node->thunk.alias);
951 DECL_COMDAT_GROUP (node->symbol.decl) = DECL_COMDAT_GROUP (node->thunk.alias);
952 if (DECL_ONE_ONLY (node->thunk.alias) && !node->symbol.same_comdat_group)
953 {
954 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
955 node->symbol.same_comdat_group = (symtab_node)tgt;
956 if (!tgt->symbol.same_comdat_group)
957 tgt->symbol.same_comdat_group = (symtab_node)node;
958 else
959 {
960 symtab_node n;
961 for (n = tgt->symbol.same_comdat_group;
962 n->symbol.same_comdat_group != (symtab_node)tgt;
963 n = n->symbol.same_comdat_group)
964 ;
965 n->symbol.same_comdat_group = (symtab_node)node;
966 }
967 }
968 }
969 }
970 cgraph_mark_reachable_node (cgraph_alias_aliased_node (node));
971 if (node->symbol.address_taken)
972 cgraph_mark_address_taken_node (cgraph_alias_aliased_node (node));
973 if (cgraph_decide_is_function_needed (node, node->symbol.decl))
974 cgraph_mark_needed_node (node);
975 }
976 else if (node->thunk.thunk_p)
977 {
978 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
979 NULL, 0, CGRAPH_FREQ_BASE);
980 }
981 else
982 {
983 current_function_decl = decl;
984 push_cfun (DECL_STRUCT_FUNCTION (decl));
985
986 assign_assembler_name_if_neeeded (node->symbol.decl);
987
988 /* Make sure to gimplify bodies only once. During analyzing a
989 function we lower it, which will require gimplified nested
990 functions, so we can end up here with an already gimplified
991 body. */
992 if (!gimple_body (decl))
993 gimplify_function_tree (decl);
994 dump_function (TDI_generic, decl);
995
996 /* Lower the function. */
997 if (!node->lowered)
998 {
999 if (node->nested)
1000 lower_nested_functions (node->symbol.decl);
1001 gcc_assert (!node->nested);
1002
1003 gimple_register_cfg_hooks ();
1004 bitmap_obstack_initialize (NULL);
1005 execute_pass_list (all_lowering_passes);
1006 free_dominance_info (CDI_POST_DOMINATORS);
1007 free_dominance_info (CDI_DOMINATORS);
1008 compact_blocks ();
1009 bitmap_obstack_release (NULL);
1010 node->lowered = true;
1011 }
1012
1013 pop_cfun ();
1014 }
1015 node->analyzed = true;
1016
1017 current_function_decl = save;
1018 }
1019
1020 /* C++ frontend produce same body aliases all over the place, even before PCH
1021 gets streamed out. It relies on us linking the aliases with their function
1022 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
1023 first produce aliases without links, but once C++ FE is sure he won't sream
1024 PCH we build the links via this function. */
1025
1026 void
1027 cgraph_process_same_body_aliases (void)
1028 {
1029 struct cgraph_node *node;
1030 FOR_EACH_FUNCTION (node)
1031 if (node->same_body_alias
1032 && !VEC_length (ipa_ref_t, node->symbol.ref_list.references))
1033 {
1034 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
1035 ipa_record_reference ((symtab_node)node, (symtab_node)tgt,
1036 IPA_REF_ALIAS, NULL);
1037 }
1038 same_body_aliases_done = true;
1039 }
1040
1041 /* Process attributes common for vars and functions. */
1042
1043 static void
1044 process_common_attributes (tree decl)
1045 {
1046 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
1047
1048 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
1049 {
1050 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
1051 "%<weakref%> attribute should be accompanied with"
1052 " an %<alias%> attribute");
1053 DECL_WEAK (decl) = 0;
1054 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
1055 DECL_ATTRIBUTES (decl));
1056 }
1057 }
1058
1059 /* Look for externally_visible and used attributes and mark cgraph nodes
1060 accordingly.
1061
1062 We cannot mark the nodes at the point the attributes are processed (in
1063 handle_*_attribute) because the copy of the declarations available at that
1064 point may not be canonical. For example, in:
1065
1066 void f();
1067 void f() __attribute__((used));
1068
1069 the declaration we see in handle_used_attribute will be the second
1070 declaration -- but the front end will subsequently merge that declaration
1071 with the original declaration and discard the second declaration.
1072
1073 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
1074
1075 void f() {}
1076 void f() __attribute__((externally_visible));
1077
1078 is valid.
1079
1080 So, we walk the nodes at the end of the translation unit, applying the
1081 attributes at that point. */
1082
1083 static void
1084 process_function_and_variable_attributes (struct cgraph_node *first,
1085 struct varpool_node *first_var)
1086 {
1087 struct cgraph_node *node;
1088 struct varpool_node *vnode;
1089
1090 for (node = cgraph_first_function (); node != first;
1091 node = cgraph_next_function (node))
1092 {
1093 tree decl = node->symbol.decl;
1094 if (DECL_PRESERVE_P (decl))
1095 cgraph_mark_needed_node (node);
1096 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
1097 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
1098 && TREE_PUBLIC (node->symbol.decl))
1099 {
1100 if (node->local.finalized)
1101 cgraph_mark_needed_node (node);
1102 }
1103 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
1104 {
1105 if (! TREE_PUBLIC (node->symbol.decl))
1106 warning_at (DECL_SOURCE_LOCATION (node->symbol.decl), OPT_Wattributes,
1107 "%<externally_visible%>"
1108 " attribute have effect only on public objects");
1109 else if (node->local.finalized)
1110 cgraph_mark_needed_node (node);
1111 }
1112 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
1113 && (node->local.finalized && !node->alias))
1114 {
1115 warning_at (DECL_SOURCE_LOCATION (node->symbol.decl), OPT_Wattributes,
1116 "%<weakref%> attribute ignored"
1117 " because function is defined");
1118 DECL_WEAK (decl) = 0;
1119 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
1120 DECL_ATTRIBUTES (decl));
1121 }
1122
1123 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
1124 && !DECL_DECLARED_INLINE_P (decl)
1125 /* redefining extern inline function makes it DECL_UNINLINABLE. */
1126 && !DECL_UNINLINABLE (decl))
1127 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
1128 "always_inline function might not be inlinable");
1129
1130 process_common_attributes (decl);
1131 }
1132 for (vnode = varpool_first_variable (); vnode != first_var;
1133 vnode = varpool_next_variable (vnode))
1134 {
1135 tree decl = vnode->symbol.decl;
1136 if (DECL_PRESERVE_P (decl))
1137 {
1138 vnode->force_output = true;
1139 if (vnode->finalized)
1140 varpool_mark_needed_node (vnode);
1141 }
1142 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
1143 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
1144 && TREE_PUBLIC (vnode->symbol.decl))
1145 {
1146 if (vnode->finalized)
1147 varpool_mark_needed_node (vnode);
1148 }
1149 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
1150 {
1151 if (! TREE_PUBLIC (vnode->symbol.decl))
1152 warning_at (DECL_SOURCE_LOCATION (vnode->symbol.decl), OPT_Wattributes,
1153 "%<externally_visible%>"
1154 " attribute have effect only on public objects");
1155 else if (vnode->finalized)
1156 varpool_mark_needed_node (vnode);
1157 }
1158 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
1159 && vnode->finalized
1160 && DECL_INITIAL (decl))
1161 {
1162 warning_at (DECL_SOURCE_LOCATION (vnode->symbol.decl), OPT_Wattributes,
1163 "%<weakref%> attribute ignored"
1164 " because variable is initialized");
1165 DECL_WEAK (decl) = 0;
1166 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
1167 DECL_ATTRIBUTES (decl));
1168 }
1169 process_common_attributes (decl);
1170 }
1171 }
1172
1173 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
1174 each reachable functions) and build cgraph.
1175 The function can be called multiple times after inserting new nodes
1176 into beginning of queue. Just the new part of queue is re-scanned then. */
1177
1178 static void
1179 cgraph_analyze_functions (void)
1180 {
1181 /* Keep track of already processed nodes when called multiple times for
1182 intermodule optimization. */
1183 static struct cgraph_node *first_analyzed;
1184 struct cgraph_node *first_processed = first_analyzed;
1185 static struct varpool_node *first_analyzed_var;
1186 struct cgraph_node *node, *next;
1187
1188 bitmap_obstack_initialize (NULL);
1189 process_function_and_variable_attributes (first_processed,
1190 first_analyzed_var);
1191 first_processed = cgraph_first_function ();
1192 first_analyzed_var = varpool_first_variable ();
1193 varpool_analyze_pending_decls ();
1194 if (cgraph_dump_file)
1195 {
1196 fprintf (cgraph_dump_file, "Initial entry points:");
1197 for (node = cgraph_first_function (); node != first_analyzed;
1198 node = cgraph_next_function (node))
1199 if (node->needed)
1200 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1201 fprintf (cgraph_dump_file, "\n");
1202 }
1203 cgraph_process_new_functions ();
1204
1205 /* Propagate reachability flag and lower representation of all reachable
1206 functions. In the future, lowering will introduce new functions and
1207 new entry points on the way (by template instantiation and virtual
1208 method table generation for instance). */
1209 while (cgraph_nodes_queue)
1210 {
1211 struct cgraph_edge *edge;
1212 tree decl = cgraph_nodes_queue->symbol.decl;
1213
1214 node = cgraph_nodes_queue;
1215 x_cgraph_nodes_queue = (symtab_node)cgraph_nodes_queue->next_needed;
1216 node->next_needed = NULL;
1217
1218 /* ??? It is possible to create extern inline function and later using
1219 weak alias attribute to kill its body. See
1220 gcc.c-torture/compile/20011119-1.c */
1221 if (!DECL_STRUCT_FUNCTION (decl)
1222 && (!node->alias || !node->thunk.alias)
1223 && !node->thunk.thunk_p)
1224 {
1225 cgraph_reset_node (node);
1226 node->local.redefined_extern_inline = true;
1227 continue;
1228 }
1229
1230 if (!node->analyzed)
1231 cgraph_analyze_function (node);
1232
1233 for (edge = node->callees; edge; edge = edge->next_callee)
1234 if (!edge->callee->reachable)
1235 cgraph_mark_reachable_node (edge->callee);
1236 for (edge = node->callers; edge; edge = edge->next_caller)
1237 if (!edge->caller->reachable && edge->caller->thunk.thunk_p)
1238 cgraph_mark_reachable_node (edge->caller);
1239
1240 if (node->symbol.same_comdat_group)
1241 {
1242 for (next = cgraph (node->symbol.same_comdat_group);
1243 next != node;
1244 next = cgraph (next->symbol.same_comdat_group))
1245 cgraph_mark_reachable_node (next);
1246 }
1247
1248 /* If decl is a clone of an abstract function, mark that abstract
1249 function so that we don't release its body. The DECL_INITIAL() of that
1250 abstract function declaration will be later needed to output debug
1251 info. */
1252 if (DECL_ABSTRACT_ORIGIN (decl))
1253 {
1254 struct cgraph_node *origin_node;
1255 origin_node = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
1256 origin_node->abstract_and_needed = true;
1257 }
1258
1259 /* We finalize local static variables during constructing callgraph
1260 edges. Process their attributes too. */
1261 process_function_and_variable_attributes (first_processed,
1262 first_analyzed_var);
1263 first_processed = cgraph_first_function ();
1264 first_analyzed_var = varpool_first_variable ();
1265 varpool_analyze_pending_decls ();
1266 cgraph_process_new_functions ();
1267 }
1268
1269 /* Collect entry points to the unit. */
1270 if (cgraph_dump_file)
1271 {
1272 fprintf (cgraph_dump_file, "Unit entry points:");
1273 for (node = cgraph_first_function (); node != first_analyzed;
1274 node = cgraph_next_function (node))
1275 if (node->needed)
1276 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1277 fprintf (cgraph_dump_file, "\n\nInitial ");
1278 dump_symtab (cgraph_dump_file);
1279 }
1280
1281 if (cgraph_dump_file)
1282 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1283
1284 for (node = cgraph_first_function (); node != first_analyzed;
1285 node = next)
1286 {
1287 tree decl = node->symbol.decl;
1288 next = cgraph_next_function (node);
1289
1290 if (node->local.finalized && !gimple_has_body_p (decl)
1291 && (!node->alias || !node->thunk.alias)
1292 && !node->thunk.thunk_p)
1293 cgraph_reset_node (node);
1294
1295 if (!node->reachable
1296 && (gimple_has_body_p (decl) || node->thunk.thunk_p
1297 || (node->alias && node->thunk.alias)))
1298 {
1299 if (cgraph_dump_file)
1300 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1301 cgraph_remove_node (node);
1302 continue;
1303 }
1304 else
1305 node->next_needed = NULL;
1306 gcc_assert (!node->local.finalized || node->thunk.thunk_p
1307 || node->alias
1308 || gimple_has_body_p (decl));
1309 gcc_assert (node->analyzed == node->local.finalized);
1310 }
1311 if (cgraph_dump_file)
1312 {
1313 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1314 dump_symtab (cgraph_dump_file);
1315 }
1316 bitmap_obstack_release (NULL);
1317 first_analyzed = cgraph_first_function ();
1318 ggc_collect ();
1319 }
1320
1321 /* Translate the ugly representation of aliases as alias pairs into nice
1322 representation in callgraph. We don't handle all cases yet,
1323 unforutnately. */
1324
1325 static void
1326 handle_alias_pairs (void)
1327 {
1328 alias_pair *p;
1329 unsigned i;
1330 struct cgraph_node *target_node;
1331 struct cgraph_node *src_node;
1332 struct varpool_node *target_vnode;
1333
1334 for (i = 0; VEC_iterate (alias_pair, alias_pairs, i, p);)
1335 {
1336 if (TREE_CODE (p->decl) == FUNCTION_DECL
1337 && (target_node = cgraph_node_for_asm (p->target)) != NULL)
1338 {
1339 src_node = cgraph_get_node (p->decl);
1340 if (src_node && src_node->local.finalized)
1341 cgraph_reset_node (src_node);
1342 /* Normally EXTERNAL flag is used to mark external inlines,
1343 however for aliases it seems to be allowed to use it w/o
1344 any meaning. See gcc.dg/attr-alias-3.c
1345 However for weakref we insist on EXTERNAL flag being set.
1346 See gcc.dg/attr-alias-5.c */
1347 if (DECL_EXTERNAL (p->decl))
1348 DECL_EXTERNAL (p->decl)
1349 = lookup_attribute ("weakref",
1350 DECL_ATTRIBUTES (p->decl)) != NULL;
1351 cgraph_create_function_alias (p->decl, target_node->symbol.decl);
1352 VEC_unordered_remove (alias_pair, alias_pairs, i);
1353 }
1354 else if (TREE_CODE (p->decl) == VAR_DECL
1355 && (target_vnode = varpool_node_for_asm (p->target)) != NULL)
1356 {
1357 /* Normally EXTERNAL flag is used to mark external inlines,
1358 however for aliases it seems to be allowed to use it w/o
1359 any meaning. See gcc.dg/attr-alias-3.c
1360 However for weakref we insist on EXTERNAL flag being set.
1361 See gcc.dg/attr-alias-5.c */
1362 if (DECL_EXTERNAL (p->decl))
1363 DECL_EXTERNAL (p->decl)
1364 = lookup_attribute ("weakref",
1365 DECL_ATTRIBUTES (p->decl)) != NULL;
1366 varpool_create_variable_alias (p->decl, target_vnode->symbol.decl);
1367 VEC_unordered_remove (alias_pair, alias_pairs, i);
1368 }
1369 /* Weakrefs with target not defined in current unit are easy to handle; they
1370 behave just as external variables except we need to note the alias flag
1371 to later output the weakref pseudo op into asm file. */
1372 else if (lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL
1373 && (TREE_CODE (p->decl) == FUNCTION_DECL
1374 ? (varpool_node_for_asm (p->target) == NULL)
1375 : (cgraph_node_for_asm (p->target) == NULL)))
1376 {
1377 if (TREE_CODE (p->decl) == FUNCTION_DECL)
1378 cgraph_get_create_node (p->decl)->alias = true;
1379 else
1380 varpool_get_node (p->decl)->alias = true;
1381 DECL_EXTERNAL (p->decl) = 1;
1382 VEC_unordered_remove (alias_pair, alias_pairs, i);
1383 }
1384 else
1385 {
1386 if (dump_file)
1387 fprintf (dump_file, "Unhandled alias %s->%s\n",
1388 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
1389 IDENTIFIER_POINTER (p->target));
1390
1391 i++;
1392 }
1393 }
1394 }
1395
1396
1397 /* Figure out what functions we want to assemble. */
1398
1399 static void
1400 cgraph_mark_functions_to_output (void)
1401 {
1402 struct cgraph_node *node;
1403 #ifdef ENABLE_CHECKING
1404 bool check_same_comdat_groups = false;
1405
1406 FOR_EACH_FUNCTION (node)
1407 gcc_assert (!node->process);
1408 #endif
1409
1410 FOR_EACH_FUNCTION (node)
1411 {
1412 tree decl = node->symbol.decl;
1413 struct cgraph_edge *e;
1414
1415 gcc_assert (!node->process || node->symbol.same_comdat_group);
1416 if (node->process)
1417 continue;
1418
1419 for (e = node->callers; e; e = e->next_caller)
1420 if (e->inline_failed)
1421 break;
1422
1423 /* We need to output all local functions that are used and not
1424 always inlined, as well as those that are reachable from
1425 outside the current compilation unit. */
1426 if (node->analyzed
1427 && !node->thunk.thunk_p
1428 && !node->alias
1429 && !node->global.inlined_to
1430 && (!cgraph_only_called_directly_p (node)
1431 || ((e || ipa_ref_has_aliases_p (&node->symbol.ref_list))
1432 && node->reachable))
1433 && !TREE_ASM_WRITTEN (decl)
1434 && !DECL_EXTERNAL (decl))
1435 {
1436 node->process = 1;
1437 if (node->symbol.same_comdat_group)
1438 {
1439 struct cgraph_node *next;
1440 for (next = cgraph (node->symbol.same_comdat_group);
1441 next != node;
1442 next = cgraph (next->symbol.same_comdat_group))
1443 if (!next->thunk.thunk_p && !next->alias)
1444 next->process = 1;
1445 }
1446 }
1447 else if (node->symbol.same_comdat_group)
1448 {
1449 #ifdef ENABLE_CHECKING
1450 check_same_comdat_groups = true;
1451 #endif
1452 }
1453 else
1454 {
1455 /* We should've reclaimed all functions that are not needed. */
1456 #ifdef ENABLE_CHECKING
1457 if (!node->global.inlined_to
1458 && gimple_has_body_p (decl)
1459 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1460 are inside partition, we can end up not removing the body since we no longer
1461 have analyzed node pointing to it. */
1462 && !node->symbol.in_other_partition
1463 && !node->alias
1464 && !DECL_EXTERNAL (decl))
1465 {
1466 dump_cgraph_node (stderr, node);
1467 internal_error ("failed to reclaim unneeded function");
1468 }
1469 #endif
1470 gcc_assert (node->global.inlined_to
1471 || !gimple_has_body_p (decl)
1472 || node->symbol.in_other_partition
1473 || DECL_EXTERNAL (decl));
1474
1475 }
1476
1477 }
1478 #ifdef ENABLE_CHECKING
1479 if (check_same_comdat_groups)
1480 FOR_EACH_FUNCTION (node)
1481 if (node->symbol.same_comdat_group && !node->process)
1482 {
1483 tree decl = node->symbol.decl;
1484 if (!node->global.inlined_to
1485 && gimple_has_body_p (decl)
1486 /* FIXME: in an ltrans unit when the offline copy is outside a
1487 partition but inline copies are inside a partition, we can
1488 end up not removing the body since we no longer have an
1489 analyzed node pointing to it. */
1490 && !node->symbol.in_other_partition
1491 && !DECL_EXTERNAL (decl))
1492 {
1493 dump_cgraph_node (stderr, node);
1494 internal_error ("failed to reclaim unneeded function in same "
1495 "comdat group");
1496 }
1497 }
1498 #endif
1499 }
1500
1501 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1502 in lowered gimple form.
1503
1504 Set current_function_decl and cfun to newly constructed empty function body.
1505 return basic block in the function body. */
1506
1507 static basic_block
1508 init_lowered_empty_function (tree decl)
1509 {
1510 basic_block bb;
1511
1512 current_function_decl = decl;
1513 allocate_struct_function (decl, false);
1514 gimple_register_cfg_hooks ();
1515 init_empty_tree_cfg ();
1516 init_tree_ssa (cfun);
1517 init_ssa_operands ();
1518 cfun->gimple_df->in_ssa_p = true;
1519 DECL_INITIAL (decl) = make_node (BLOCK);
1520
1521 DECL_SAVED_TREE (decl) = error_mark_node;
1522 cfun->curr_properties |=
1523 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1524 PROP_ssa | PROP_gimple_any);
1525
1526 /* Create BB for body of the function and connect it properly. */
1527 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1528 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1529 make_edge (bb, EXIT_BLOCK_PTR, 0);
1530
1531 return bb;
1532 }
1533
1534 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1535 offset indicated by VIRTUAL_OFFSET, if that is
1536 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1537 zero for a result adjusting thunk. */
1538
1539 static tree
1540 thunk_adjust (gimple_stmt_iterator * bsi,
1541 tree ptr, bool this_adjusting,
1542 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1543 {
1544 gimple stmt;
1545 tree ret;
1546
1547 if (this_adjusting
1548 && fixed_offset != 0)
1549 {
1550 stmt = gimple_build_assign
1551 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1552 ptr,
1553 fixed_offset));
1554 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1555 }
1556
1557 /* If there's a virtual offset, look up that value in the vtable and
1558 adjust the pointer again. */
1559 if (virtual_offset)
1560 {
1561 tree vtabletmp;
1562 tree vtabletmp2;
1563 tree vtabletmp3;
1564
1565 if (!vtable_entry_type)
1566 {
1567 tree vfunc_type = make_node (FUNCTION_TYPE);
1568 TREE_TYPE (vfunc_type) = integer_type_node;
1569 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1570 layout_type (vfunc_type);
1571
1572 vtable_entry_type = build_pointer_type (vfunc_type);
1573 }
1574
1575 vtabletmp =
1576 create_tmp_var (build_pointer_type
1577 (build_pointer_type (vtable_entry_type)), "vptr");
1578
1579 /* The vptr is always at offset zero in the object. */
1580 stmt = gimple_build_assign (vtabletmp,
1581 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1582 ptr));
1583 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1584 mark_symbols_for_renaming (stmt);
1585 find_referenced_vars_in (stmt);
1586
1587 /* Form the vtable address. */
1588 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1589 "vtableaddr");
1590 stmt = gimple_build_assign (vtabletmp2,
1591 build_simple_mem_ref (vtabletmp));
1592 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1593 mark_symbols_for_renaming (stmt);
1594 find_referenced_vars_in (stmt);
1595
1596 /* Find the entry with the vcall offset. */
1597 stmt = gimple_build_assign (vtabletmp2,
1598 fold_build_pointer_plus_loc (input_location,
1599 vtabletmp2,
1600 virtual_offset));
1601 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1602
1603 /* Get the offset itself. */
1604 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1605 "vcalloffset");
1606 stmt = gimple_build_assign (vtabletmp3,
1607 build_simple_mem_ref (vtabletmp2));
1608 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1609 mark_symbols_for_renaming (stmt);
1610 find_referenced_vars_in (stmt);
1611
1612 /* Adjust the `this' pointer. */
1613 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1614 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1615 GSI_CONTINUE_LINKING);
1616 }
1617
1618 if (!this_adjusting
1619 && fixed_offset != 0)
1620 /* Adjust the pointer by the constant. */
1621 {
1622 tree ptrtmp;
1623
1624 if (TREE_CODE (ptr) == VAR_DECL)
1625 ptrtmp = ptr;
1626 else
1627 {
1628 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1629 stmt = gimple_build_assign (ptrtmp, ptr);
1630 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1631 mark_symbols_for_renaming (stmt);
1632 find_referenced_vars_in (stmt);
1633 }
1634 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1635 ptrtmp, fixed_offset);
1636 }
1637
1638 /* Emit the statement and gimplify the adjustment expression. */
1639 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1640 stmt = gimple_build_assign (ret, ptr);
1641 mark_symbols_for_renaming (stmt);
1642 find_referenced_vars_in (stmt);
1643 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1644
1645 return ret;
1646 }
1647
1648 /* Produce assembler for thunk NODE. */
1649
1650 static void
1651 assemble_thunk (struct cgraph_node *node)
1652 {
1653 bool this_adjusting = node->thunk.this_adjusting;
1654 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1655 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1656 tree virtual_offset = NULL;
1657 tree alias = node->thunk.alias;
1658 tree thunk_fndecl = node->symbol.decl;
1659 tree a = DECL_ARGUMENTS (thunk_fndecl);
1660
1661 current_function_decl = thunk_fndecl;
1662
1663 /* Ensure thunks are emitted in their correct sections. */
1664 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1665
1666 if (this_adjusting
1667 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1668 virtual_value, alias))
1669 {
1670 const char *fnname;
1671 tree fn_block;
1672 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1673
1674 DECL_RESULT (thunk_fndecl)
1675 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1676 RESULT_DECL, 0, restype);
1677 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1678
1679 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1680 create one. */
1681 fn_block = make_node (BLOCK);
1682 BLOCK_VARS (fn_block) = a;
1683 DECL_INITIAL (thunk_fndecl) = fn_block;
1684 init_function_start (thunk_fndecl);
1685 cfun->is_thunk = 1;
1686 assemble_start_function (thunk_fndecl, fnname);
1687
1688 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1689 fixed_offset, virtual_value, alias);
1690
1691 assemble_end_function (thunk_fndecl, fnname);
1692 init_insn_lengths ();
1693 free_after_compilation (cfun);
1694 set_cfun (NULL);
1695 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1696 node->thunk.thunk_p = false;
1697 node->analyzed = false;
1698 }
1699 else
1700 {
1701 tree restype;
1702 basic_block bb, then_bb, else_bb, return_bb;
1703 gimple_stmt_iterator bsi;
1704 int nargs = 0;
1705 tree arg;
1706 int i;
1707 tree resdecl;
1708 tree restmp = NULL;
1709 VEC(tree, heap) *vargs;
1710
1711 gimple call;
1712 gimple ret;
1713
1714 DECL_IGNORED_P (thunk_fndecl) = 1;
1715 bitmap_obstack_initialize (NULL);
1716
1717 if (node->thunk.virtual_offset_p)
1718 virtual_offset = size_int (virtual_value);
1719
1720 /* Build the return declaration for the function. */
1721 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1722 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1723 {
1724 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1725 DECL_ARTIFICIAL (resdecl) = 1;
1726 DECL_IGNORED_P (resdecl) = 1;
1727 DECL_RESULT (thunk_fndecl) = resdecl;
1728 }
1729 else
1730 resdecl = DECL_RESULT (thunk_fndecl);
1731
1732 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1733
1734 bsi = gsi_start_bb (bb);
1735
1736 /* Build call to the function being thunked. */
1737 if (!VOID_TYPE_P (restype))
1738 {
1739 if (!is_gimple_reg_type (restype))
1740 {
1741 restmp = resdecl;
1742 add_local_decl (cfun, restmp);
1743 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1744 }
1745 else
1746 restmp = create_tmp_var_raw (restype, "retval");
1747 }
1748
1749 for (arg = a; arg; arg = DECL_CHAIN (arg))
1750 nargs++;
1751 vargs = VEC_alloc (tree, heap, nargs);
1752 if (this_adjusting)
1753 VEC_quick_push (tree, vargs,
1754 thunk_adjust (&bsi,
1755 a, 1, fixed_offset,
1756 virtual_offset));
1757 else
1758 VEC_quick_push (tree, vargs, a);
1759 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1760 VEC_quick_push (tree, vargs, arg);
1761 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1762 VEC_free (tree, heap, vargs);
1763 gimple_call_set_from_thunk (call, true);
1764 if (restmp)
1765 gimple_call_set_lhs (call, restmp);
1766 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1767 mark_symbols_for_renaming (call);
1768 find_referenced_vars_in (call);
1769 update_stmt (call);
1770
1771 if (restmp && !this_adjusting)
1772 {
1773 tree true_label = NULL_TREE;
1774
1775 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1776 {
1777 gimple stmt;
1778 /* If the return type is a pointer, we need to
1779 protect against NULL. We know there will be an
1780 adjustment, because that's why we're emitting a
1781 thunk. */
1782 then_bb = create_basic_block (NULL, (void *) 0, bb);
1783 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1784 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1785 remove_edge (single_succ_edge (bb));
1786 true_label = gimple_block_label (then_bb);
1787 stmt = gimple_build_cond (NE_EXPR, restmp,
1788 build_zero_cst (TREE_TYPE (restmp)),
1789 NULL_TREE, NULL_TREE);
1790 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1791 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1792 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1793 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1794 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1795 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1796 bsi = gsi_last_bb (then_bb);
1797 }
1798
1799 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1800 fixed_offset, virtual_offset);
1801 if (true_label)
1802 {
1803 gimple stmt;
1804 bsi = gsi_last_bb (else_bb);
1805 stmt = gimple_build_assign (restmp,
1806 build_zero_cst (TREE_TYPE (restmp)));
1807 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1808 bsi = gsi_last_bb (return_bb);
1809 }
1810 }
1811 else
1812 gimple_call_set_tail (call, true);
1813
1814 /* Build return value. */
1815 ret = gimple_build_return (restmp);
1816 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1817
1818 delete_unreachable_blocks ();
1819 update_ssa (TODO_update_ssa);
1820
1821 /* Since we want to emit the thunk, we explicitly mark its name as
1822 referenced. */
1823 node->thunk.thunk_p = false;
1824 cgraph_node_remove_callees (node);
1825 cgraph_add_new_function (thunk_fndecl, true);
1826 bitmap_obstack_release (NULL);
1827 }
1828 current_function_decl = NULL;
1829 }
1830
1831
1832
1833 /* Assemble thunks and aliases asociated to NODE. */
1834
1835 static void
1836 assemble_thunks_and_aliases (struct cgraph_node *node)
1837 {
1838 struct cgraph_edge *e;
1839 int i;
1840 struct ipa_ref *ref;
1841
1842 for (e = node->callers; e;)
1843 if (e->caller->thunk.thunk_p)
1844 {
1845 struct cgraph_node *thunk = e->caller;
1846
1847 e = e->next_caller;
1848 assemble_thunks_and_aliases (thunk);
1849 assemble_thunk (thunk);
1850 }
1851 else
1852 e = e->next_caller;
1853 for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list,
1854 i, ref); i++)
1855 if (ref->use == IPA_REF_ALIAS)
1856 {
1857 struct cgraph_node *alias = ipa_ref_referring_node (ref);
1858 bool saved_written = TREE_ASM_WRITTEN (alias->thunk.alias);
1859
1860 /* Force assemble_alias to really output the alias this time instead
1861 of buffering it in same alias pairs. */
1862 TREE_ASM_WRITTEN (alias->thunk.alias) = 1;
1863 assemble_alias (alias->symbol.decl,
1864 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1865 assemble_thunks_and_aliases (alias);
1866 TREE_ASM_WRITTEN (alias->thunk.alias) = saved_written;
1867 }
1868 }
1869
1870 /* Perform IPA transforms and all further optimizations and compilation
1871 for FNDECL. */
1872
1873 static void
1874 tree_rest_of_compilation (struct cgraph_node *node)
1875 {
1876 tree fndecl = node->symbol.decl;
1877 location_t saved_loc;
1878
1879 timevar_push (TV_REST_OF_COMPILATION);
1880
1881 gcc_assert (cgraph_global_info_ready);
1882
1883 /* Initialize the default bitmap obstack. */
1884 bitmap_obstack_initialize (NULL);
1885
1886 /* Initialize the RTL code for the function. */
1887 current_function_decl = fndecl;
1888 saved_loc = input_location;
1889 input_location = DECL_SOURCE_LOCATION (fndecl);
1890 init_function_start (fndecl);
1891
1892 gimple_register_cfg_hooks ();
1893
1894 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1895
1896 execute_all_ipa_transforms ();
1897
1898 /* Perform all tree transforms and optimizations. */
1899
1900 /* Signal the start of passes. */
1901 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1902
1903 execute_pass_list (all_passes);
1904
1905 /* Signal the end of passes. */
1906 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1907
1908 bitmap_obstack_release (&reg_obstack);
1909
1910 /* Release the default bitmap obstack. */
1911 bitmap_obstack_release (NULL);
1912
1913 set_cfun (NULL);
1914
1915 /* If requested, warn about function definitions where the function will
1916 return a value (usually of some struct or union type) which itself will
1917 take up a lot of stack space. */
1918 if (warn_larger_than && !DECL_EXTERNAL (fndecl) && TREE_TYPE (fndecl))
1919 {
1920 tree ret_type = TREE_TYPE (TREE_TYPE (fndecl));
1921
1922 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1923 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1924 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1925 larger_than_size))
1926 {
1927 unsigned int size_as_int
1928 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1929
1930 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1931 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1932 fndecl, size_as_int);
1933 else
1934 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1935 fndecl, larger_than_size);
1936 }
1937 }
1938
1939 gimple_set_body (fndecl, NULL);
1940 if (DECL_STRUCT_FUNCTION (fndecl) == 0
1941 && !cgraph_get_node (fndecl)->origin)
1942 {
1943 /* Stop pointing to the local nodes about to be freed.
1944 But DECL_INITIAL must remain nonzero so we know this
1945 was an actual function definition.
1946 For a nested function, this is done in c_pop_function_context.
1947 If rest_of_compilation set this to 0, leave it 0. */
1948 if (DECL_INITIAL (fndecl) != 0)
1949 DECL_INITIAL (fndecl) = error_mark_node;
1950 }
1951
1952 input_location = saved_loc;
1953
1954 ggc_collect ();
1955 timevar_pop (TV_REST_OF_COMPILATION);
1956 }
1957
1958 /* Expand function specified by NODE. */
1959
1960 static void
1961 cgraph_expand_function (struct cgraph_node *node)
1962 {
1963 tree decl = node->symbol.decl;
1964
1965 /* We ought to not compile any inline clones. */
1966 gcc_assert (!node->global.inlined_to);
1967
1968 announce_function (decl);
1969 node->process = 0;
1970 gcc_assert (node->lowered);
1971
1972 /* Generate RTL for the body of DECL. */
1973 tree_rest_of_compilation (node);
1974
1975 /* Make sure that BE didn't give up on compiling. */
1976 gcc_assert (TREE_ASM_WRITTEN (decl));
1977 current_function_decl = NULL;
1978 gcc_assert (!cgraph_preserve_function_body_p (node));
1979
1980 /* It would make a lot more sense to output thunks before function body to get more
1981 forward and lest backwarding jumps. This is however would need solving problem
1982 with comdats. See PR48668. Also aliases must come after function itself to
1983 make one pass assemblers, like one on AIX happy. See PR 50689.
1984 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1985 groups. */
1986 assemble_thunks_and_aliases (node);
1987 cgraph_release_function_body (node);
1988 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1989 points to the dead function body. */
1990 cgraph_node_remove_callees (node);
1991 }
1992
1993 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1994
1995 bool
1996 cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
1997 {
1998 *reason = e->inline_failed;
1999 return !e->inline_failed;
2000 }
2001
2002
2003
2004 /* Expand all functions that must be output.
2005
2006 Attempt to topologically sort the nodes so function is output when
2007 all called functions are already assembled to allow data to be
2008 propagated across the callgraph. Use a stack to get smaller distance
2009 between a function and its callees (later we may choose to use a more
2010 sophisticated algorithm for function reordering; we will likely want
2011 to use subsections to make the output functions appear in top-down
2012 order). */
2013
2014 static void
2015 cgraph_expand_all_functions (void)
2016 {
2017 struct cgraph_node *node;
2018 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
2019 int order_pos, new_order_pos = 0;
2020 int i;
2021
2022 order_pos = ipa_reverse_postorder (order);
2023 gcc_assert (order_pos == cgraph_n_nodes);
2024
2025 /* Garbage collector may remove inline clones we eliminate during
2026 optimization. So we must be sure to not reference them. */
2027 for (i = 0; i < order_pos; i++)
2028 if (order[i]->process)
2029 order[new_order_pos++] = order[i];
2030
2031 for (i = new_order_pos - 1; i >= 0; i--)
2032 {
2033 node = order[i];
2034 if (node->process)
2035 {
2036 gcc_assert (node->reachable);
2037 node->process = 0;
2038 cgraph_expand_function (node);
2039 }
2040 }
2041 cgraph_process_new_functions ();
2042
2043 free (order);
2044
2045 }
2046
2047 /* This is used to sort the node types by the cgraph order number. */
2048
2049 enum cgraph_order_sort_kind
2050 {
2051 ORDER_UNDEFINED = 0,
2052 ORDER_FUNCTION,
2053 ORDER_VAR,
2054 ORDER_ASM
2055 };
2056
2057 struct cgraph_order_sort
2058 {
2059 enum cgraph_order_sort_kind kind;
2060 union
2061 {
2062 struct cgraph_node *f;
2063 struct varpool_node *v;
2064 struct cgraph_asm_node *a;
2065 } u;
2066 };
2067
2068 /* Output all functions, variables, and asm statements in the order
2069 according to their order fields, which is the order in which they
2070 appeared in the file. This implements -fno-toplevel-reorder. In
2071 this mode we may output functions and variables which don't really
2072 need to be output. */
2073
2074 static void
2075 cgraph_output_in_order (void)
2076 {
2077 int max;
2078 struct cgraph_order_sort *nodes;
2079 int i;
2080 struct cgraph_node *pf;
2081 struct varpool_node *pv;
2082 struct cgraph_asm_node *pa;
2083
2084 max = symtab_order;
2085 nodes = XCNEWVEC (struct cgraph_order_sort, max);
2086
2087 varpool_analyze_pending_decls ();
2088
2089 FOR_EACH_DEFINED_FUNCTION (pf)
2090 {
2091 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
2092 {
2093 i = pf->symbol.order;
2094 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2095 nodes[i].kind = ORDER_FUNCTION;
2096 nodes[i].u.f = pf;
2097 }
2098 }
2099
2100 FOR_EACH_DEFINED_VARIABLE (pv)
2101 {
2102 i = pv->symbol.order;
2103 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2104 nodes[i].kind = ORDER_VAR;
2105 nodes[i].u.v = pv;
2106 }
2107
2108 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
2109 {
2110 i = pa->order;
2111 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2112 nodes[i].kind = ORDER_ASM;
2113 nodes[i].u.a = pa;
2114 }
2115
2116 /* In toplevel reorder mode we output all statics; mark them as needed. */
2117 for (i = 0; i < max; ++i)
2118 {
2119 if (nodes[i].kind == ORDER_VAR)
2120 {
2121 varpool_mark_needed_node (nodes[i].u.v);
2122 }
2123 }
2124 varpool_empty_needed_queue ();
2125
2126 for (i = 0; i < max; ++i)
2127 if (nodes[i].kind == ORDER_VAR)
2128 varpool_finalize_named_section_flags (nodes[i].u.v);
2129
2130 for (i = 0; i < max; ++i)
2131 {
2132 switch (nodes[i].kind)
2133 {
2134 case ORDER_FUNCTION:
2135 nodes[i].u.f->process = 0;
2136 cgraph_expand_function (nodes[i].u.f);
2137 break;
2138
2139 case ORDER_VAR:
2140 varpool_assemble_decl (nodes[i].u.v);
2141 break;
2142
2143 case ORDER_ASM:
2144 assemble_asm (nodes[i].u.a->asm_str);
2145 break;
2146
2147 case ORDER_UNDEFINED:
2148 break;
2149
2150 default:
2151 gcc_unreachable ();
2152 }
2153 }
2154
2155 cgraph_asm_nodes = NULL;
2156 free (nodes);
2157 }
2158
2159 /* Return true when function body of DECL still needs to be kept around
2160 for later re-use. */
2161 bool
2162 cgraph_preserve_function_body_p (struct cgraph_node *node)
2163 {
2164 gcc_assert (cgraph_global_info_ready);
2165 gcc_assert (!node->alias && !node->thunk.thunk_p);
2166
2167 /* Look if there is any clone around. */
2168 if (node->clones)
2169 return true;
2170 return false;
2171 }
2172
2173 static void
2174 ipa_passes (void)
2175 {
2176 set_cfun (NULL);
2177 current_function_decl = NULL;
2178 gimple_register_cfg_hooks ();
2179 bitmap_obstack_initialize (NULL);
2180
2181 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2182
2183 if (!in_lto_p)
2184 {
2185 execute_ipa_pass_list (all_small_ipa_passes);
2186 if (seen_error ())
2187 return;
2188 }
2189
2190 /* We never run removal of unreachable nodes after early passes. This is
2191 because TODO is run before the subpasses. It is important to remove
2192 the unreachable functions to save works at IPA level and to get LTO
2193 symbol tables right. */
2194 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
2195
2196 /* If pass_all_early_optimizations was not scheduled, the state of
2197 the cgraph will not be properly updated. Update it now. */
2198 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
2199 cgraph_state = CGRAPH_STATE_IPA_SSA;
2200
2201 if (!in_lto_p)
2202 {
2203 /* Generate coverage variables and constructors. */
2204 coverage_finish ();
2205
2206 /* Process new functions added. */
2207 set_cfun (NULL);
2208 current_function_decl = NULL;
2209 cgraph_process_new_functions ();
2210
2211 execute_ipa_summary_passes
2212 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
2213 }
2214
2215 /* Some targets need to handle LTO assembler output specially. */
2216 if (flag_generate_lto)
2217 targetm.asm_out.lto_start ();
2218
2219 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
2220
2221 if (!in_lto_p)
2222 ipa_write_summaries ();
2223
2224 if (flag_generate_lto)
2225 targetm.asm_out.lto_end ();
2226
2227 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2228 execute_ipa_pass_list (all_regular_ipa_passes);
2229 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2230
2231 bitmap_obstack_release (NULL);
2232 }
2233
2234
2235 /* Return string alias is alias of. */
2236
2237 static tree
2238 get_alias_symbol (tree decl)
2239 {
2240 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2241 return get_identifier (TREE_STRING_POINTER
2242 (TREE_VALUE (TREE_VALUE (alias))));
2243 }
2244
2245
2246 /* Weakrefs may be associated to external decls and thus not output
2247 at expansion time. Emit all neccesary aliases. */
2248
2249 static void
2250 output_weakrefs (void)
2251 {
2252 struct cgraph_node *node;
2253 struct varpool_node *vnode;
2254 FOR_EACH_FUNCTION (node)
2255 if (node->alias && DECL_EXTERNAL (node->symbol.decl)
2256 && !TREE_ASM_WRITTEN (node->symbol.decl)
2257 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->symbol.decl)))
2258 assemble_alias (node->symbol.decl,
2259 node->thunk.alias ? DECL_ASSEMBLER_NAME (node->thunk.alias)
2260 : get_alias_symbol (node->symbol.decl));
2261 FOR_EACH_VARIABLE (vnode)
2262 if (vnode->alias && DECL_EXTERNAL (vnode->symbol.decl)
2263 && !TREE_ASM_WRITTEN (vnode->symbol.decl)
2264 && lookup_attribute ("weakref", DECL_ATTRIBUTES (vnode->symbol.decl)))
2265 assemble_alias (vnode->symbol.decl,
2266 vnode->alias_of ? DECL_ASSEMBLER_NAME (vnode->alias_of)
2267 : get_alias_symbol (vnode->symbol.decl));
2268 }
2269
2270
2271
2272 void
2273 init_cgraph (void)
2274 {
2275 if (!cgraph_dump_file)
2276 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
2277 }
2278
2279 /* The edges representing the callers of the NEW_VERSION node were
2280 fixed by cgraph_function_versioning (), now the call_expr in their
2281 respective tree code should be updated to call the NEW_VERSION. */
2282
2283 static void
2284 update_call_expr (struct cgraph_node *new_version)
2285 {
2286 struct cgraph_edge *e;
2287
2288 gcc_assert (new_version);
2289
2290 /* Update the call expr on the edges to call the new version. */
2291 for (e = new_version->callers; e; e = e->next_caller)
2292 {
2293 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->symbol.decl);
2294 gimple_call_set_fndecl (e->call_stmt, new_version->symbol.decl);
2295 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
2296 }
2297 }
2298
2299
2300 /* Create a new cgraph node which is the new version of
2301 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2302 edges which should be redirected to point to
2303 NEW_VERSION. ALL the callees edges of OLD_VERSION
2304 are cloned to the new version node. Return the new
2305 version node.
2306
2307 If non-NULL BLOCK_TO_COPY determine what basic blocks
2308 was copied to prevent duplications of calls that are dead
2309 in the clone. */
2310
2311 struct cgraph_node *
2312 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
2313 tree new_decl,
2314 VEC(cgraph_edge_p,heap) *redirect_callers,
2315 bitmap bbs_to_copy)
2316 {
2317 struct cgraph_node *new_version;
2318 struct cgraph_edge *e;
2319 unsigned i;
2320
2321 gcc_assert (old_version);
2322
2323 new_version = cgraph_create_node (new_decl);
2324
2325 new_version->analyzed = old_version->analyzed;
2326 new_version->local = old_version->local;
2327 new_version->symbol.externally_visible = false;
2328 new_version->local.local = old_version->analyzed;
2329 new_version->global = old_version->global;
2330 new_version->rtl = old_version->rtl;
2331 new_version->reachable = true;
2332 new_version->count = old_version->count;
2333
2334 for (e = old_version->callees; e; e=e->next_callee)
2335 if (!bbs_to_copy
2336 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2337 cgraph_clone_edge (e, new_version, e->call_stmt,
2338 e->lto_stmt_uid, REG_BR_PROB_BASE,
2339 CGRAPH_FREQ_BASE,
2340 true);
2341 for (e = old_version->indirect_calls; e; e=e->next_callee)
2342 if (!bbs_to_copy
2343 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2344 cgraph_clone_edge (e, new_version, e->call_stmt,
2345 e->lto_stmt_uid, REG_BR_PROB_BASE,
2346 CGRAPH_FREQ_BASE,
2347 true);
2348 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
2349 {
2350 /* Redirect calls to the old version node to point to its new
2351 version. */
2352 cgraph_redirect_edge_callee (e, new_version);
2353 }
2354
2355 cgraph_call_node_duplication_hooks (old_version, new_version);
2356
2357 return new_version;
2358 }
2359
2360 /* Perform function versioning.
2361 Function versioning includes copying of the tree and
2362 a callgraph update (creating a new cgraph node and updating
2363 its callees and callers).
2364
2365 REDIRECT_CALLERS varray includes the edges to be redirected
2366 to the new version.
2367
2368 TREE_MAP is a mapping of tree nodes we want to replace with
2369 new ones (according to results of prior analysis).
2370 OLD_VERSION_NODE is the node that is versioned.
2371
2372 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2373 from new version.
2374 If SKIP_RETURN is true, the new version will return void.
2375 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
2376 If non_NULL NEW_ENTRY determine new entry BB of the clone.
2377
2378 Return the new version's cgraph node. */
2379
2380 struct cgraph_node *
2381 cgraph_function_versioning (struct cgraph_node *old_version_node,
2382 VEC(cgraph_edge_p,heap) *redirect_callers,
2383 VEC (ipa_replace_map_p,gc)* tree_map,
2384 bitmap args_to_skip,
2385 bool skip_return,
2386 bitmap bbs_to_copy,
2387 basic_block new_entry_block,
2388 const char *clone_name)
2389 {
2390 tree old_decl = old_version_node->symbol.decl;
2391 struct cgraph_node *new_version_node = NULL;
2392 tree new_decl;
2393
2394 if (!tree_versionable_function_p (old_decl))
2395 return NULL;
2396
2397 gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
2398
2399 /* Make a new FUNCTION_DECL tree node for the new version. */
2400 if (!args_to_skip && !skip_return)
2401 new_decl = copy_node (old_decl);
2402 else
2403 new_decl
2404 = build_function_decl_skip_args (old_decl, args_to_skip, skip_return);
2405
2406 /* Generate a new name for the new version. */
2407 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2408 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2409 SET_DECL_RTL (new_decl, NULL);
2410
2411 /* When the old decl was a con-/destructor make sure the clone isn't. */
2412 DECL_STATIC_CONSTRUCTOR(new_decl) = 0;
2413 DECL_STATIC_DESTRUCTOR(new_decl) = 0;
2414
2415 /* Create the new version's call-graph node.
2416 and update the edges of the new node. */
2417 new_version_node =
2418 cgraph_copy_node_for_versioning (old_version_node, new_decl,
2419 redirect_callers, bbs_to_copy);
2420
2421 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2422 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2423 skip_return, bbs_to_copy, new_entry_block);
2424
2425 /* Update the new version's properties.
2426 Make The new version visible only within this translation unit. Make sure
2427 that is not weak also.
2428 ??? We cannot use COMDAT linkage because there is no
2429 ABI support for this. */
2430 cgraph_make_decl_local (new_version_node->symbol.decl);
2431 DECL_VIRTUAL_P (new_version_node->symbol.decl) = 0;
2432 new_version_node->symbol.externally_visible = 0;
2433 new_version_node->local.local = 1;
2434 new_version_node->lowered = true;
2435
2436 /* Update the call_expr on the edges to call the new version node. */
2437 update_call_expr (new_version_node);
2438
2439 cgraph_call_function_insertion_hooks (new_version_node);
2440 return new_version_node;
2441 }
2442
2443 /* Given virtual clone, turn it into actual clone. */
2444 static void
2445 cgraph_materialize_clone (struct cgraph_node *node)
2446 {
2447 bitmap_obstack_initialize (NULL);
2448 node->former_clone_of = node->clone_of->symbol.decl;
2449 if (node->clone_of->former_clone_of)
2450 node->former_clone_of = node->clone_of->former_clone_of;
2451 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2452 tree_function_versioning (node->clone_of->symbol.decl, node->symbol.decl,
2453 node->clone.tree_map, true,
2454 node->clone.args_to_skip, false,
2455 NULL, NULL);
2456 if (cgraph_dump_file)
2457 {
2458 dump_function_to_file (node->clone_of->symbol.decl, cgraph_dump_file, dump_flags);
2459 dump_function_to_file (node->symbol.decl, cgraph_dump_file, dump_flags);
2460 }
2461
2462 /* Function is no longer clone. */
2463 if (node->next_sibling_clone)
2464 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2465 if (node->prev_sibling_clone)
2466 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2467 else
2468 node->clone_of->clones = node->next_sibling_clone;
2469 node->next_sibling_clone = NULL;
2470 node->prev_sibling_clone = NULL;
2471 if (!node->clone_of->analyzed && !node->clone_of->clones)
2472 {
2473 cgraph_release_function_body (node->clone_of);
2474 cgraph_node_remove_callees (node->clone_of);
2475 ipa_remove_all_references (&node->clone_of->symbol.ref_list);
2476 }
2477 node->clone_of = NULL;
2478 bitmap_obstack_release (NULL);
2479 }
2480
2481 /* If necessary, change the function declaration in the call statement
2482 associated with E so that it corresponds to the edge callee. */
2483
2484 gimple
2485 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2486 {
2487 tree decl = gimple_call_fndecl (e->call_stmt);
2488 gimple new_stmt;
2489 gimple_stmt_iterator gsi;
2490 #ifdef ENABLE_CHECKING
2491 struct cgraph_node *node;
2492 #endif
2493
2494 if (e->indirect_unknown_callee
2495 || decl == e->callee->symbol.decl)
2496 return e->call_stmt;
2497
2498 #ifdef ENABLE_CHECKING
2499 if (decl)
2500 {
2501 node = cgraph_get_node (decl);
2502 gcc_assert (!node || !node->clone.combined_args_to_skip);
2503 }
2504 #endif
2505
2506 if (cgraph_dump_file)
2507 {
2508 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2509 cgraph_node_name (e->caller), e->caller->uid,
2510 cgraph_node_name (e->callee), e->callee->uid);
2511 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2512 if (e->callee->clone.combined_args_to_skip)
2513 {
2514 fprintf (cgraph_dump_file, " combined args to skip: ");
2515 dump_bitmap (cgraph_dump_file,
2516 e->callee->clone.combined_args_to_skip);
2517 }
2518 }
2519
2520 if (e->callee->clone.combined_args_to_skip)
2521 {
2522 int lp_nr;
2523
2524 new_stmt
2525 = gimple_call_copy_skip_args (e->call_stmt,
2526 e->callee->clone.combined_args_to_skip);
2527 gimple_call_set_fndecl (new_stmt, e->callee->symbol.decl);
2528
2529 if (gimple_vdef (new_stmt)
2530 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2531 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2532
2533 gsi = gsi_for_stmt (e->call_stmt);
2534 gsi_replace (&gsi, new_stmt, false);
2535 /* We need to defer cleaning EH info on the new statement to
2536 fixup-cfg. We may not have dominator information at this point
2537 and thus would end up with unreachable blocks and have no way
2538 to communicate that we need to run CFG cleanup then. */
2539 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2540 if (lp_nr != 0)
2541 {
2542 remove_stmt_from_eh_lp (e->call_stmt);
2543 add_stmt_to_eh_lp (new_stmt, lp_nr);
2544 }
2545 }
2546 else
2547 {
2548 new_stmt = e->call_stmt;
2549 gimple_call_set_fndecl (new_stmt, e->callee->symbol.decl);
2550 update_stmt (new_stmt);
2551 }
2552
2553 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2554
2555 if (cgraph_dump_file)
2556 {
2557 fprintf (cgraph_dump_file, " updated to:");
2558 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2559 }
2560 return new_stmt;
2561 }
2562
2563 /* Once all functions from compilation unit are in memory, produce all clones
2564 and update all calls. We might also do this on demand if we don't want to
2565 bring all functions to memory prior compilation, but current WHOPR
2566 implementation does that and it is is bit easier to keep everything right in
2567 this order. */
2568 static void
2569 cgraph_materialize_all_clones (void)
2570 {
2571 struct cgraph_node *node;
2572 bool stabilized = false;
2573
2574 if (cgraph_dump_file)
2575 fprintf (cgraph_dump_file, "Materializing clones\n");
2576 #ifdef ENABLE_CHECKING
2577 verify_cgraph ();
2578 #endif
2579
2580 /* We can also do topological order, but number of iterations should be
2581 bounded by number of IPA passes since single IPA pass is probably not
2582 going to create clones of clones it created itself. */
2583 while (!stabilized)
2584 {
2585 stabilized = true;
2586 FOR_EACH_FUNCTION (node)
2587 {
2588 if (node->clone_of && node->symbol.decl != node->clone_of->symbol.decl
2589 && !gimple_has_body_p (node->symbol.decl))
2590 {
2591 if (gimple_has_body_p (node->clone_of->symbol.decl))
2592 {
2593 if (cgraph_dump_file)
2594 {
2595 fprintf (cgraph_dump_file, "cloning %s to %s\n",
2596 cgraph_node_name (node->clone_of),
2597 cgraph_node_name (node));
2598 if (node->clone.tree_map)
2599 {
2600 unsigned int i;
2601 fprintf (cgraph_dump_file, " replace map: ");
2602 for (i = 0; i < VEC_length (ipa_replace_map_p,
2603 node->clone.tree_map);
2604 i++)
2605 {
2606 struct ipa_replace_map *replace_info;
2607 replace_info = VEC_index (ipa_replace_map_p,
2608 node->clone.tree_map,
2609 i);
2610 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2611 fprintf (cgraph_dump_file, " -> ");
2612 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2613 fprintf (cgraph_dump_file, "%s%s;",
2614 replace_info->replace_p ? "(replace)":"",
2615 replace_info->ref_p ? "(ref)":"");
2616 }
2617 fprintf (cgraph_dump_file, "\n");
2618 }
2619 if (node->clone.args_to_skip)
2620 {
2621 fprintf (cgraph_dump_file, " args_to_skip: ");
2622 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2623 }
2624 if (node->clone.args_to_skip)
2625 {
2626 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2627 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2628 }
2629 }
2630 cgraph_materialize_clone (node);
2631 stabilized = false;
2632 }
2633 }
2634 }
2635 }
2636 FOR_EACH_FUNCTION (node)
2637 if (!node->analyzed && node->callees)
2638 cgraph_node_remove_callees (node);
2639 if (cgraph_dump_file)
2640 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
2641 #ifdef ENABLE_CHECKING
2642 verify_cgraph ();
2643 #endif
2644 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2645 }
2646
2647
2648 /* Perform simple optimizations based on callgraph. */
2649
2650 void
2651 cgraph_optimize (void)
2652 {
2653 if (seen_error ())
2654 return;
2655
2656 #ifdef ENABLE_CHECKING
2657 verify_cgraph ();
2658 #endif
2659
2660 /* Frontend may output common variables after the unit has been finalized.
2661 It is safe to deal with them here as they are always zero initialized. */
2662 varpool_analyze_pending_decls ();
2663
2664 timevar_push (TV_CGRAPHOPT);
2665 if (pre_ipa_mem_report)
2666 {
2667 fprintf (stderr, "Memory consumption before IPA\n");
2668 dump_memory_report (false);
2669 }
2670 if (!quiet_flag)
2671 fprintf (stderr, "Performing interprocedural optimizations\n");
2672 cgraph_state = CGRAPH_STATE_IPA;
2673
2674 /* Don't run the IPA passes if there was any error or sorry messages. */
2675 if (!seen_error ())
2676 ipa_passes ();
2677
2678 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2679 if (seen_error ()
2680 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2681 {
2682 timevar_pop (TV_CGRAPHOPT);
2683 return;
2684 }
2685
2686 /* This pass remove bodies of extern inline functions we never inlined.
2687 Do this later so other IPA passes see what is really going on. */
2688 cgraph_remove_unreachable_nodes (false, dump_file);
2689 cgraph_global_info_ready = true;
2690 if (cgraph_dump_file)
2691 {
2692 fprintf (cgraph_dump_file, "Optimized ");
2693 dump_symtab (cgraph_dump_file);
2694 }
2695 if (post_ipa_mem_report)
2696 {
2697 fprintf (stderr, "Memory consumption after IPA\n");
2698 dump_memory_report (false);
2699 }
2700 timevar_pop (TV_CGRAPHOPT);
2701
2702 /* Output everything. */
2703 (*debug_hooks->assembly_start) ();
2704 if (!quiet_flag)
2705 fprintf (stderr, "Assembling functions:\n");
2706 #ifdef ENABLE_CHECKING
2707 verify_cgraph ();
2708 #endif
2709
2710 cgraph_materialize_all_clones ();
2711 bitmap_obstack_initialize (NULL);
2712 execute_ipa_pass_list (all_late_ipa_passes);
2713 cgraph_remove_unreachable_nodes (true, dump_file);
2714 #ifdef ENABLE_CHECKING
2715 verify_cgraph ();
2716 #endif
2717 bitmap_obstack_release (NULL);
2718 cgraph_mark_functions_to_output ();
2719 output_weakrefs ();
2720
2721 cgraph_state = CGRAPH_STATE_EXPANSION;
2722 if (!flag_toplevel_reorder)
2723 cgraph_output_in_order ();
2724 else
2725 {
2726 cgraph_output_pending_asms ();
2727
2728 cgraph_expand_all_functions ();
2729 varpool_remove_unreferenced_decls ();
2730
2731 varpool_assemble_pending_decls ();
2732 }
2733
2734 cgraph_process_new_functions ();
2735 cgraph_state = CGRAPH_STATE_FINISHED;
2736
2737 if (cgraph_dump_file)
2738 {
2739 fprintf (cgraph_dump_file, "\nFinal ");
2740 dump_symtab (cgraph_dump_file);
2741 }
2742 #ifdef ENABLE_CHECKING
2743 verify_cgraph ();
2744 /* Double check that all inline clones are gone and that all
2745 function bodies have been released from memory. */
2746 if (!seen_error ())
2747 {
2748 struct cgraph_node *node;
2749 bool error_found = false;
2750
2751 FOR_EACH_DEFINED_FUNCTION (node)
2752 if (node->global.inlined_to
2753 || gimple_has_body_p (node->symbol.decl))
2754 {
2755 error_found = true;
2756 dump_cgraph_node (stderr, node);
2757 }
2758 if (error_found)
2759 internal_error ("nodes with unreleased memory found");
2760 }
2761 #endif
2762 }
2763
2764
2765 /* Analyze the whole compilation unit once it is parsed completely. */
2766
2767 void
2768 cgraph_finalize_compilation_unit (void)
2769 {
2770 timevar_push (TV_CGRAPH);
2771
2772 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2773 if (flag_lto)
2774 lto_streamer_hooks_init ();
2775
2776 /* If we're here there's no current function anymore. Some frontends
2777 are lazy in clearing these. */
2778 current_function_decl = NULL;
2779 set_cfun (NULL);
2780
2781 /* Do not skip analyzing the functions if there were errors, we
2782 miss diagnostics for following functions otherwise. */
2783
2784 /* Emit size functions we didn't inline. */
2785 finalize_size_functions ();
2786
2787 /* Mark alias targets necessary and emit diagnostics. */
2788 finish_aliases_1 ();
2789 handle_alias_pairs ();
2790
2791 if (!quiet_flag)
2792 {
2793 fprintf (stderr, "\nAnalyzing compilation unit\n");
2794 fflush (stderr);
2795 }
2796
2797 if (flag_dump_passes)
2798 dump_passes ();
2799
2800 /* Gimplify and lower all functions, compute reachability and
2801 remove unreachable nodes. */
2802 cgraph_analyze_functions ();
2803
2804 /* Mark alias targets necessary and emit diagnostics. */
2805 finish_aliases_1 ();
2806 handle_alias_pairs ();
2807
2808 /* Gimplify and lower thunks. */
2809 cgraph_analyze_functions ();
2810
2811 /* Finally drive the pass manager. */
2812 cgraph_optimize ();
2813
2814 timevar_pop (TV_CGRAPH);
2815 }
2816
2817
2818 #include "gt-cgraphunit.h"