]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cgraphunit.c
Merge from transactional-memory branch.
[thirdparty/gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline
36 function.)
37
38 - varpool_finalize_variable
39
40 This function has same behavior as the above but is used for static
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
47
48 In the call-graph construction and local function analysis takes
49 place here. Bodies of unreachable functions are released to
50 conserve memory usage.
51
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
64
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
107
108
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "rtl.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
119 #include "toplev.h"
120 #include "flags.h"
121 #include "ggc.h"
122 #include "debug.h"
123 #include "target.h"
124 #include "cgraph.h"
125 #include "diagnostic.h"
126 #include "tree-pretty-print.h"
127 #include "gimple-pretty-print.h"
128 #include "timevar.h"
129 #include "params.h"
130 #include "fibheap.h"
131 #include "intl.h"
132 #include "function.h"
133 #include "ipa-prop.h"
134 #include "gimple.h"
135 #include "tree-iterator.h"
136 #include "tree-pass.h"
137 #include "tree-dump.h"
138 #include "output.h"
139 #include "coverage.h"
140 #include "plugin.h"
141 #include "ipa-inline.h"
142 #include "ipa-utils.h"
143 #include "lto-streamer.h"
144
145 static void cgraph_expand_all_functions (void);
146 static void cgraph_mark_functions_to_output (void);
147 static void cgraph_expand_function (struct cgraph_node *);
148 static void cgraph_output_pending_asms (void);
149
150 FILE *cgraph_dump_file;
151
152 /* Used for vtable lookup in thunk adjusting. */
153 static GTY (()) tree vtable_entry_type;
154
155 /* Determine if function DECL is needed. That is, visible to something
156 either outside this translation unit, something magic in the system
157 configury. */
158
159 bool
160 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
161 {
162 /* If the user told us it is used, then it must be so. */
163 if (node->local.externally_visible)
164 return true;
165
166 /* ??? If the assembler name is set by hand, it is possible to assemble
167 the name later after finalizing the function and the fact is noticed
168 in assemble_name then. This is arguably a bug. */
169 if (DECL_ASSEMBLER_NAME_SET_P (decl)
170 && (!node->thunk.thunk_p && !node->same_body_alias)
171 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
172 return true;
173
174 /* With -fkeep-inline-functions we are keeping all inline functions except
175 for extern inline ones. */
176 if (flag_keep_inline_functions
177 && DECL_DECLARED_INLINE_P (decl)
178 && !DECL_EXTERNAL (decl)
179 && !DECL_DISREGARD_INLINE_LIMITS (decl))
180 return true;
181
182 /* If we decided it was needed before, but at the time we didn't have
183 the body of the function available, then it's still needed. We have
184 to go back and re-check its dependencies now. */
185 if (node->needed)
186 return true;
187
188 /* Externally visible functions must be output. The exception is
189 COMDAT functions that must be output only when they are needed.
190
191 When not optimizing, also output the static functions. (see
192 PR24561), but don't do so for always_inline functions, functions
193 declared inline and nested functions. These were optimized out
194 in the original implementation and it is unclear whether we want
195 to change the behavior here. */
196 if (((TREE_PUBLIC (decl)
197 || (!optimize
198 && !DECL_DISREGARD_INLINE_LIMITS (decl)
199 && !DECL_DECLARED_INLINE_P (decl)
200 && !(DECL_CONTEXT (decl)
201 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
202 && !flag_whole_program
203 && !flag_lto)
204 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
205 return true;
206
207 return false;
208 }
209
210 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
211 functions into callgraph in a way so they look like ordinary reachable
212 functions inserted into callgraph already at construction time. */
213
214 bool
215 cgraph_process_new_functions (void)
216 {
217 bool output = false;
218 tree fndecl;
219 struct cgraph_node *node;
220
221 varpool_analyze_pending_decls ();
222 /* Note that this queue may grow as its being processed, as the new
223 functions may generate new ones. */
224 while (cgraph_new_nodes)
225 {
226 node = cgraph_new_nodes;
227 fndecl = node->decl;
228 cgraph_new_nodes = cgraph_new_nodes->next_needed;
229 switch (cgraph_state)
230 {
231 case CGRAPH_STATE_CONSTRUCTION:
232 /* At construction time we just need to finalize function and move
233 it into reachable functions list. */
234
235 node->next_needed = NULL;
236 cgraph_finalize_function (fndecl, false);
237 cgraph_mark_reachable_node (node);
238 output = true;
239 cgraph_call_function_insertion_hooks (node);
240 break;
241
242 case CGRAPH_STATE_IPA:
243 case CGRAPH_STATE_IPA_SSA:
244 /* When IPA optimization already started, do all essential
245 transformations that has been already performed on the whole
246 cgraph but not on this function. */
247
248 gimple_register_cfg_hooks ();
249 if (!node->analyzed)
250 cgraph_analyze_function (node);
251 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
252 current_function_decl = fndecl;
253 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
254 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
255 /* When not optimizing, be sure we run early local passes anyway
256 to expand OMP. */
257 || !optimize)
258 execute_pass_list (pass_early_local_passes.pass.sub);
259 else
260 compute_inline_parameters (node, true);
261 free_dominance_info (CDI_POST_DOMINATORS);
262 free_dominance_info (CDI_DOMINATORS);
263 pop_cfun ();
264 current_function_decl = NULL;
265 cgraph_call_function_insertion_hooks (node);
266 break;
267
268 case CGRAPH_STATE_EXPANSION:
269 /* Functions created during expansion shall be compiled
270 directly. */
271 node->process = 0;
272 cgraph_call_function_insertion_hooks (node);
273 cgraph_expand_function (node);
274 break;
275
276 default:
277 gcc_unreachable ();
278 break;
279 }
280 varpool_analyze_pending_decls ();
281 }
282 return output;
283 }
284
285 /* As an GCC extension we allow redefinition of the function. The
286 semantics when both copies of bodies differ is not well defined.
287 We replace the old body with new body so in unit at a time mode
288 we always use new body, while in normal mode we may end up with
289 old body inlined into some functions and new body expanded and
290 inlined in others.
291
292 ??? It may make more sense to use one body for inlining and other
293 body for expanding the function but this is difficult to do. */
294
295 static void
296 cgraph_reset_node (struct cgraph_node *node)
297 {
298 /* If node->process is set, then we have already begun whole-unit analysis.
299 This is *not* testing for whether we've already emitted the function.
300 That case can be sort-of legitimately seen with real function redefinition
301 errors. I would argue that the front end should never present us with
302 such a case, but don't enforce that for now. */
303 gcc_assert (!node->process);
304
305 /* Reset our data structures so we can analyze the function again. */
306 memset (&node->local, 0, sizeof (node->local));
307 memset (&node->global, 0, sizeof (node->global));
308 memset (&node->rtl, 0, sizeof (node->rtl));
309 node->analyzed = false;
310 node->local.finalized = false;
311
312 cgraph_node_remove_callees (node);
313 }
314
315 static void
316 cgraph_lower_function (struct cgraph_node *node)
317 {
318 if (node->lowered)
319 return;
320
321 if (node->nested)
322 lower_nested_functions (node->decl);
323 gcc_assert (!node->nested);
324
325 tree_lowering_passes (node->decl);
326 node->lowered = true;
327 }
328
329 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
330 logic in effect. If NESTED is true, then our caller cannot stand to have
331 the garbage collector run at the moment. We would need to either create
332 a new GC context, or just not compile right now. */
333
334 void
335 cgraph_finalize_function (tree decl, bool nested)
336 {
337 struct cgraph_node *node = cgraph_get_create_node (decl);
338
339 if (node->local.finalized)
340 {
341 cgraph_reset_node (node);
342 node->local.redefined_extern_inline = true;
343 }
344
345 notice_global_symbol (decl);
346 node->local.finalized = true;
347 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
348
349 if (cgraph_decide_is_function_needed (node, decl))
350 cgraph_mark_needed_node (node);
351
352 /* Since we reclaim unreachable nodes at the end of every language
353 level unit, we need to be conservative about possible entry points
354 there. */
355 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
356 || DECL_STATIC_CONSTRUCTOR (decl)
357 || DECL_STATIC_DESTRUCTOR (decl)
358 /* COMDAT virtual functions may be referenced by vtable from
359 other compilation unit. Still we want to devirtualize calls
360 to those so we need to analyze them.
361 FIXME: We should introduce may edges for this purpose and update
362 their handling in unreachable function removal and inliner too. */
363 || (DECL_VIRTUAL_P (decl)
364 && optimize && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
365 cgraph_mark_reachable_node (node);
366
367 /* If we've not yet emitted decl, tell the debug info about it. */
368 if (!TREE_ASM_WRITTEN (decl))
369 (*debug_hooks->deferred_inline_function) (decl);
370
371 /* Possibly warn about unused parameters. */
372 if (warn_unused_parameter)
373 do_warn_unused_parameter (decl);
374
375 if (!nested)
376 ggc_collect ();
377 }
378
379 /* C99 extern inline keywords allow changing of declaration after function
380 has been finalized. We need to re-decide if we want to mark the function as
381 needed then. */
382
383 void
384 cgraph_mark_if_needed (tree decl)
385 {
386 struct cgraph_node *node = cgraph_get_node (decl);
387 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
388 cgraph_mark_needed_node (node);
389 }
390
391 /* Return TRUE if NODE2 is equivalent to NODE or its clone. */
392 static bool
393 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
394 {
395 node = cgraph_function_or_thunk_node (node, NULL);
396 node2 = cgraph_function_or_thunk_node (node2, NULL);
397 while (node != node2 && node2)
398 node2 = node2->clone_of;
399 return node2 != NULL;
400 }
401
402 /* Verify edge E count and frequency. */
403
404 static bool
405 verify_edge_count_and_frequency (struct cgraph_edge *e)
406 {
407 bool error_found = false;
408 if (e->count < 0)
409 {
410 error ("caller edge count is negative");
411 error_found = true;
412 }
413 if (e->frequency < 0)
414 {
415 error ("caller edge frequency is negative");
416 error_found = true;
417 }
418 if (e->frequency > CGRAPH_FREQ_MAX)
419 {
420 error ("caller edge frequency is too large");
421 error_found = true;
422 }
423 if (gimple_has_body_p (e->caller->decl)
424 && !e->caller->global.inlined_to
425 /* FIXME: Inline-analysis sets frequency to 0 when edge is optimized out.
426 Remove this once edges are actualy removed from the function at that time. */
427 && (e->frequency
428 || (inline_edge_summary_vec
429 && ((VEC_length(inline_edge_summary_t, inline_edge_summary_vec)
430 <= (unsigned) e->uid)
431 || !inline_edge_summary (e)->predicate)))
432 && (e->frequency
433 != compute_call_stmt_bb_frequency (e->caller->decl,
434 gimple_bb (e->call_stmt))))
435 {
436 error ("caller edge frequency %i does not match BB frequency %i",
437 e->frequency,
438 compute_call_stmt_bb_frequency (e->caller->decl,
439 gimple_bb (e->call_stmt)));
440 error_found = true;
441 }
442 return error_found;
443 }
444
445 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
446 static void
447 cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt)
448 {
449 /* debug_gimple_stmt needs correct cfun */
450 if (cfun != this_cfun)
451 set_cfun (this_cfun);
452 debug_gimple_stmt (stmt);
453 }
454
455 /* Verify that call graph edge E corresponds to DECL from the associated
456 statement. Return true if the verification should fail. */
457
458 static bool
459 verify_edge_corresponds_to_fndecl (struct cgraph_edge *e, tree decl)
460 {
461 struct cgraph_node *node;
462
463 if (!decl || e->callee->global.inlined_to)
464 return false;
465 node = cgraph_get_node (decl);
466
467 /* We do not know if a node from a different partition is an alias or what it
468 aliases and therefore cannot do the former_clone_of check reliably. */
469 if (!node || node->in_other_partition)
470 return false;
471 node = cgraph_function_or_thunk_node (node, NULL);
472
473 if ((e->callee->former_clone_of != node->decl)
474 /* IPA-CP sometimes redirect edge to clone and then back to the former
475 function. This ping-pong has to go, eventaully. */
476 && (node != cgraph_function_or_thunk_node (e->callee, NULL))
477 && !clone_of_p (node, e->callee))
478 return true;
479 else
480 return false;
481 }
482
483 /* Verify cgraph nodes of given cgraph node. */
484 DEBUG_FUNCTION void
485 verify_cgraph_node (struct cgraph_node *node)
486 {
487 struct cgraph_edge *e;
488 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
489 basic_block this_block;
490 gimple_stmt_iterator gsi;
491 bool error_found = false;
492
493 if (seen_error ())
494 return;
495
496 timevar_push (TV_CGRAPH_VERIFY);
497 for (e = node->callees; e; e = e->next_callee)
498 if (e->aux)
499 {
500 error ("aux field set for edge %s->%s",
501 identifier_to_locale (cgraph_node_name (e->caller)),
502 identifier_to_locale (cgraph_node_name (e->callee)));
503 error_found = true;
504 }
505 if (node->count < 0)
506 {
507 error ("execution count is negative");
508 error_found = true;
509 }
510 if (node->global.inlined_to && node->local.externally_visible)
511 {
512 error ("externally visible inline clone");
513 error_found = true;
514 }
515 if (node->global.inlined_to && node->address_taken)
516 {
517 error ("inline clone with address taken");
518 error_found = true;
519 }
520 if (node->global.inlined_to && node->needed)
521 {
522 error ("inline clone is needed");
523 error_found = true;
524 }
525 for (e = node->indirect_calls; e; e = e->next_callee)
526 {
527 if (e->aux)
528 {
529 error ("aux field set for indirect edge from %s",
530 identifier_to_locale (cgraph_node_name (e->caller)));
531 error_found = true;
532 }
533 if (!e->indirect_unknown_callee
534 || !e->indirect_info)
535 {
536 error ("An indirect edge from %s is not marked as indirect or has "
537 "associated indirect_info, the corresponding statement is: ",
538 identifier_to_locale (cgraph_node_name (e->caller)));
539 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
540 error_found = true;
541 }
542 }
543 for (e = node->callers; e; e = e->next_caller)
544 {
545 if (verify_edge_count_and_frequency (e))
546 error_found = true;
547 if (!e->inline_failed)
548 {
549 if (node->global.inlined_to
550 != (e->caller->global.inlined_to
551 ? e->caller->global.inlined_to : e->caller))
552 {
553 error ("inlined_to pointer is wrong");
554 error_found = true;
555 }
556 if (node->callers->next_caller)
557 {
558 error ("multiple inline callers");
559 error_found = true;
560 }
561 }
562 else
563 if (node->global.inlined_to)
564 {
565 error ("inlined_to pointer set for noninline callers");
566 error_found = true;
567 }
568 }
569 for (e = node->indirect_calls; e; e = e->next_callee)
570 if (verify_edge_count_and_frequency (e))
571 error_found = true;
572 if (!node->callers && node->global.inlined_to)
573 {
574 error ("inlined_to pointer is set but no predecessors found");
575 error_found = true;
576 }
577 if (node->global.inlined_to == node)
578 {
579 error ("inlined_to pointer refers to itself");
580 error_found = true;
581 }
582
583 if (!cgraph_get_node (node->decl))
584 {
585 error ("node not found in cgraph_hash");
586 error_found = true;
587 }
588
589 if (node->clone_of)
590 {
591 struct cgraph_node *n;
592 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
593 if (n == node)
594 break;
595 if (!n)
596 {
597 error ("node has wrong clone_of");
598 error_found = true;
599 }
600 }
601 if (node->clones)
602 {
603 struct cgraph_node *n;
604 for (n = node->clones; n; n = n->next_sibling_clone)
605 if (n->clone_of != node)
606 break;
607 if (n)
608 {
609 error ("node has wrong clone list");
610 error_found = true;
611 }
612 }
613 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
614 {
615 error ("node is in clone list but it is not clone");
616 error_found = true;
617 }
618 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
619 {
620 error ("node has wrong prev_clone pointer");
621 error_found = true;
622 }
623 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
624 {
625 error ("double linked list of clones corrupted");
626 error_found = true;
627 }
628 if (node->same_comdat_group)
629 {
630 struct cgraph_node *n = node->same_comdat_group;
631
632 if (!DECL_ONE_ONLY (node->decl))
633 {
634 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
635 error_found = true;
636 }
637 if (n == node)
638 {
639 error ("node is alone in a comdat group");
640 error_found = true;
641 }
642 do
643 {
644 if (!n->same_comdat_group)
645 {
646 error ("same_comdat_group is not a circular list");
647 error_found = true;
648 break;
649 }
650 n = n->same_comdat_group;
651 }
652 while (n != node);
653 }
654
655 if (node->analyzed && node->alias)
656 {
657 bool ref_found = false;
658 int i;
659 struct ipa_ref *ref;
660
661 if (node->callees)
662 {
663 error ("Alias has call edges");
664 error_found = true;
665 }
666 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
667 if (ref->use != IPA_REF_ALIAS)
668 {
669 error ("Alias has non-alias refernece");
670 error_found = true;
671 }
672 else if (ref_found)
673 {
674 error ("Alias has more than one alias reference");
675 error_found = true;
676 }
677 else
678 ref_found = true;
679 if (!ref_found)
680 {
681 error ("Analyzed alias has no reference");
682 error_found = true;
683 }
684 }
685 if (node->analyzed && node->thunk.thunk_p)
686 {
687 if (!node->callees)
688 {
689 error ("No edge out of thunk node");
690 error_found = true;
691 }
692 else if (node->callees->next_callee)
693 {
694 error ("More than one edge out of thunk node");
695 error_found = true;
696 }
697 if (gimple_has_body_p (node->decl))
698 {
699 error ("Thunk is not supposed to have body");
700 error_found = true;
701 }
702 }
703 else if (node->analyzed && gimple_has_body_p (node->decl)
704 && !TREE_ASM_WRITTEN (node->decl)
705 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
706 && !flag_wpa)
707 {
708 if (this_cfun->cfg)
709 {
710 /* The nodes we're interested in are never shared, so walk
711 the tree ignoring duplicates. */
712 struct pointer_set_t *visited_nodes = pointer_set_create ();
713 /* Reach the trees by walking over the CFG, and note the
714 enclosing basic-blocks in the call edges. */
715 FOR_EACH_BB_FN (this_block, this_cfun)
716 for (gsi = gsi_start_bb (this_block);
717 !gsi_end_p (gsi);
718 gsi_next (&gsi))
719 {
720 gimple stmt = gsi_stmt (gsi);
721 if (is_gimple_call (stmt))
722 {
723 struct cgraph_edge *e = cgraph_edge (node, stmt);
724 tree decl = gimple_call_fndecl (stmt);
725 if (e)
726 {
727 if (e->aux)
728 {
729 error ("shared call_stmt:");
730 cgraph_debug_gimple_stmt (this_cfun, stmt);
731 error_found = true;
732 }
733 if (!e->indirect_unknown_callee)
734 {
735 if (verify_edge_corresponds_to_fndecl (e, decl))
736 {
737 error ("edge points to wrong declaration:");
738 debug_tree (e->callee->decl);
739 fprintf (stderr," Instead of:");
740 debug_tree (decl);
741 error_found = true;
742 }
743 }
744 else if (decl)
745 {
746 error ("an indirect edge with unknown callee "
747 "corresponding to a call_stmt with "
748 "a known declaration:");
749 error_found = true;
750 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
751 }
752 e->aux = (void *)1;
753 }
754 else if (decl)
755 {
756 error ("missing callgraph edge for call stmt:");
757 cgraph_debug_gimple_stmt (this_cfun, stmt);
758 error_found = true;
759 }
760 }
761 }
762 pointer_set_destroy (visited_nodes);
763 }
764 else
765 /* No CFG available?! */
766 gcc_unreachable ();
767
768 for (e = node->callees; e; e = e->next_callee)
769 {
770 if (!e->aux)
771 {
772 error ("edge %s->%s has no corresponding call_stmt",
773 identifier_to_locale (cgraph_node_name (e->caller)),
774 identifier_to_locale (cgraph_node_name (e->callee)));
775 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
776 error_found = true;
777 }
778 e->aux = 0;
779 }
780 for (e = node->indirect_calls; e; e = e->next_callee)
781 {
782 if (!e->aux)
783 {
784 error ("an indirect edge from %s has no corresponding call_stmt",
785 identifier_to_locale (cgraph_node_name (e->caller)));
786 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
787 error_found = true;
788 }
789 e->aux = 0;
790 }
791 }
792 if (error_found)
793 {
794 dump_cgraph_node (stderr, node);
795 internal_error ("verify_cgraph_node failed");
796 }
797 timevar_pop (TV_CGRAPH_VERIFY);
798 }
799
800 /* Verify whole cgraph structure. */
801 DEBUG_FUNCTION void
802 verify_cgraph (void)
803 {
804 struct cgraph_node *node;
805
806 if (seen_error ())
807 return;
808
809 for (node = cgraph_nodes; node; node = node->next)
810 verify_cgraph_node (node);
811 }
812
813 /* Output all asm statements we have stored up to be output. */
814
815 static void
816 cgraph_output_pending_asms (void)
817 {
818 struct cgraph_asm_node *can;
819
820 if (seen_error ())
821 return;
822
823 for (can = cgraph_asm_nodes; can; can = can->next)
824 assemble_asm (can->asm_str);
825 cgraph_asm_nodes = NULL;
826 }
827
828 /* Analyze the function scheduled to be output. */
829 void
830 cgraph_analyze_function (struct cgraph_node *node)
831 {
832 tree save = current_function_decl;
833 tree decl = node->decl;
834
835 if (node->alias && node->thunk.alias)
836 {
837 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
838 if (!VEC_length (ipa_ref_t, node->ref_list.references))
839 ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL);
840 if (node->same_body_alias)
841 {
842 DECL_VIRTUAL_P (node->decl) = DECL_VIRTUAL_P (node->thunk.alias);
843 DECL_DECLARED_INLINE_P (node->decl)
844 = DECL_DECLARED_INLINE_P (node->thunk.alias);
845 DECL_DISREGARD_INLINE_LIMITS (node->decl)
846 = DECL_DISREGARD_INLINE_LIMITS (node->thunk.alias);
847 }
848
849 /* Fixup visibility nonsences C++ frontend produce on same body aliases. */
850 if (TREE_PUBLIC (node->decl) && node->same_body_alias)
851 {
852 DECL_EXTERNAL (node->decl) = DECL_EXTERNAL (node->thunk.alias);
853 if (DECL_ONE_ONLY (node->thunk.alias))
854 {
855 DECL_COMDAT (node->decl) = DECL_COMDAT (node->thunk.alias);
856 DECL_COMDAT_GROUP (node->decl) = DECL_COMDAT_GROUP (node->thunk.alias);
857 if (DECL_ONE_ONLY (node->thunk.alias) && !node->same_comdat_group)
858 {
859 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
860 node->same_comdat_group = tgt;
861 if (!tgt->same_comdat_group)
862 tgt->same_comdat_group = node;
863 else
864 {
865 struct cgraph_node *n;
866 for (n = tgt->same_comdat_group;
867 n->same_comdat_group != tgt;
868 n = n->same_comdat_group)
869 ;
870 n->same_comdat_group = node;
871 }
872 }
873 }
874 }
875 cgraph_mark_reachable_node (cgraph_alias_aliased_node (node));
876 if (node->address_taken)
877 cgraph_mark_address_taken_node (cgraph_alias_aliased_node (node));
878 if (cgraph_decide_is_function_needed (node, node->decl))
879 cgraph_mark_needed_node (node);
880 }
881 else if (node->thunk.thunk_p)
882 {
883 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
884 NULL, 0, CGRAPH_FREQ_BASE);
885 }
886 else
887 {
888 current_function_decl = decl;
889 push_cfun (DECL_STRUCT_FUNCTION (decl));
890
891 assign_assembler_name_if_neeeded (node->decl);
892
893 /* Make sure to gimplify bodies only once. During analyzing a
894 function we lower it, which will require gimplified nested
895 functions, so we can end up here with an already gimplified
896 body. */
897 if (!gimple_body (decl))
898 gimplify_function_tree (decl);
899 dump_function (TDI_generic, decl);
900
901 cgraph_lower_function (node);
902 pop_cfun ();
903 }
904 node->analyzed = true;
905
906 current_function_decl = save;
907 }
908
909 /* C++ frontend produce same body aliases all over the place, even before PCH
910 gets streamed out. It relies on us linking the aliases with their function
911 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
912 first produce aliases without links, but once C++ FE is sure he won't sream
913 PCH we build the links via this function. */
914
915 void
916 cgraph_process_same_body_aliases (void)
917 {
918 struct cgraph_node *node;
919 for (node = cgraph_nodes; node; node = node->next)
920 if (node->same_body_alias
921 && !VEC_length (ipa_ref_t, node->ref_list.references))
922 {
923 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
924 ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL);
925 }
926 same_body_aliases_done = true;
927 }
928
929 /* Process attributes common for vars and functions. */
930
931 static void
932 process_common_attributes (tree decl)
933 {
934 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
935
936 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
937 {
938 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
939 "%<weakref%> attribute should be accompanied with"
940 " an %<alias%> attribute");
941 DECL_WEAK (decl) = 0;
942 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
943 DECL_ATTRIBUTES (decl));
944 }
945 }
946
947 /* Look for externally_visible and used attributes and mark cgraph nodes
948 accordingly.
949
950 We cannot mark the nodes at the point the attributes are processed (in
951 handle_*_attribute) because the copy of the declarations available at that
952 point may not be canonical. For example, in:
953
954 void f();
955 void f() __attribute__((used));
956
957 the declaration we see in handle_used_attribute will be the second
958 declaration -- but the front end will subsequently merge that declaration
959 with the original declaration and discard the second declaration.
960
961 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
962
963 void f() {}
964 void f() __attribute__((externally_visible));
965
966 is valid.
967
968 So, we walk the nodes at the end of the translation unit, applying the
969 attributes at that point. */
970
971 static void
972 process_function_and_variable_attributes (struct cgraph_node *first,
973 struct varpool_node *first_var)
974 {
975 struct cgraph_node *node;
976 struct varpool_node *vnode;
977
978 for (node = cgraph_nodes; node != first; node = node->next)
979 {
980 tree decl = node->decl;
981 if (DECL_PRESERVE_P (decl))
982 cgraph_mark_needed_node (node);
983 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
984 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
985 && TREE_PUBLIC (node->decl))
986 {
987 if (node->local.finalized)
988 cgraph_mark_needed_node (node);
989 }
990 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
991 {
992 if (! TREE_PUBLIC (node->decl))
993 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
994 "%<externally_visible%>"
995 " attribute have effect only on public objects");
996 else if (node->local.finalized)
997 cgraph_mark_needed_node (node);
998 }
999 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
1000 && (node->local.finalized && !node->alias))
1001 {
1002 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
1003 "%<weakref%> attribute ignored"
1004 " because function is defined");
1005 DECL_WEAK (decl) = 0;
1006 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
1007 DECL_ATTRIBUTES (decl));
1008 }
1009
1010 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
1011 && !DECL_DECLARED_INLINE_P (decl)
1012 /* redefining extern inline function makes it DECL_UNINLINABLE. */
1013 && !DECL_UNINLINABLE (decl))
1014 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
1015 "always_inline function might not be inlinable");
1016
1017 process_common_attributes (decl);
1018 }
1019 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
1020 {
1021 tree decl = vnode->decl;
1022 if (DECL_PRESERVE_P (decl))
1023 {
1024 vnode->force_output = true;
1025 if (vnode->finalized)
1026 varpool_mark_needed_node (vnode);
1027 }
1028 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
1029 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
1030 && TREE_PUBLIC (vnode->decl))
1031 {
1032 if (vnode->finalized)
1033 varpool_mark_needed_node (vnode);
1034 }
1035 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
1036 {
1037 if (! TREE_PUBLIC (vnode->decl))
1038 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
1039 "%<externally_visible%>"
1040 " attribute have effect only on public objects");
1041 else if (vnode->finalized)
1042 varpool_mark_needed_node (vnode);
1043 }
1044 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
1045 && vnode->finalized
1046 && DECL_INITIAL (decl))
1047 {
1048 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
1049 "%<weakref%> attribute ignored"
1050 " because variable is initialized");
1051 DECL_WEAK (decl) = 0;
1052 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
1053 DECL_ATTRIBUTES (decl));
1054 }
1055 process_common_attributes (decl);
1056 }
1057 }
1058
1059 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
1060 each reachable functions) and build cgraph.
1061 The function can be called multiple times after inserting new nodes
1062 into beginning of queue. Just the new part of queue is re-scanned then. */
1063
1064 static void
1065 cgraph_analyze_functions (void)
1066 {
1067 /* Keep track of already processed nodes when called multiple times for
1068 intermodule optimization. */
1069 static struct cgraph_node *first_analyzed;
1070 struct cgraph_node *first_processed = first_analyzed;
1071 static struct varpool_node *first_analyzed_var;
1072 struct cgraph_node *node, *next;
1073
1074 bitmap_obstack_initialize (NULL);
1075 process_function_and_variable_attributes (first_processed,
1076 first_analyzed_var);
1077 first_processed = cgraph_nodes;
1078 first_analyzed_var = varpool_nodes;
1079 varpool_analyze_pending_decls ();
1080 if (cgraph_dump_file)
1081 {
1082 fprintf (cgraph_dump_file, "Initial entry points:");
1083 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1084 if (node->needed)
1085 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1086 fprintf (cgraph_dump_file, "\n");
1087 }
1088 cgraph_process_new_functions ();
1089
1090 /* Propagate reachability flag and lower representation of all reachable
1091 functions. In the future, lowering will introduce new functions and
1092 new entry points on the way (by template instantiation and virtual
1093 method table generation for instance). */
1094 while (cgraph_nodes_queue)
1095 {
1096 struct cgraph_edge *edge;
1097 tree decl = cgraph_nodes_queue->decl;
1098
1099 node = cgraph_nodes_queue;
1100 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
1101 node->next_needed = NULL;
1102
1103 /* ??? It is possible to create extern inline function and later using
1104 weak alias attribute to kill its body. See
1105 gcc.c-torture/compile/20011119-1.c */
1106 if (!DECL_STRUCT_FUNCTION (decl)
1107 && (!node->alias || !node->thunk.alias)
1108 && !node->thunk.thunk_p)
1109 {
1110 cgraph_reset_node (node);
1111 node->local.redefined_extern_inline = true;
1112 continue;
1113 }
1114
1115 if (!node->analyzed)
1116 cgraph_analyze_function (node);
1117
1118 for (edge = node->callees; edge; edge = edge->next_callee)
1119 if (!edge->callee->reachable)
1120 cgraph_mark_reachable_node (edge->callee);
1121 for (edge = node->callers; edge; edge = edge->next_caller)
1122 if (!edge->caller->reachable && edge->caller->thunk.thunk_p)
1123 cgraph_mark_reachable_node (edge->caller);
1124
1125 if (node->same_comdat_group)
1126 {
1127 for (next = node->same_comdat_group;
1128 next != node;
1129 next = next->same_comdat_group)
1130 cgraph_mark_reachable_node (next);
1131 }
1132
1133 /* If decl is a clone of an abstract function, mark that abstract
1134 function so that we don't release its body. The DECL_INITIAL() of that
1135 abstract function declaration will be later needed to output debug
1136 info. */
1137 if (DECL_ABSTRACT_ORIGIN (decl))
1138 {
1139 struct cgraph_node *origin_node;
1140 origin_node = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
1141 origin_node->abstract_and_needed = true;
1142 }
1143
1144 /* We finalize local static variables during constructing callgraph
1145 edges. Process their attributes too. */
1146 process_function_and_variable_attributes (first_processed,
1147 first_analyzed_var);
1148 first_processed = cgraph_nodes;
1149 first_analyzed_var = varpool_nodes;
1150 varpool_analyze_pending_decls ();
1151 cgraph_process_new_functions ();
1152 }
1153
1154 /* Collect entry points to the unit. */
1155 if (cgraph_dump_file)
1156 {
1157 fprintf (cgraph_dump_file, "Unit entry points:");
1158 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1159 if (node->needed)
1160 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1161 fprintf (cgraph_dump_file, "\n\nInitial ");
1162 dump_cgraph (cgraph_dump_file);
1163 dump_varpool (cgraph_dump_file);
1164 }
1165
1166 if (cgraph_dump_file)
1167 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1168
1169 for (node = cgraph_nodes; node != first_analyzed; node = next)
1170 {
1171 tree decl = node->decl;
1172 next = node->next;
1173
1174 if (node->local.finalized && !gimple_has_body_p (decl)
1175 && (!node->alias || !node->thunk.alias)
1176 && !node->thunk.thunk_p)
1177 cgraph_reset_node (node);
1178
1179 if (!node->reachable
1180 && (gimple_has_body_p (decl) || node->thunk.thunk_p
1181 || (node->alias && node->thunk.alias)))
1182 {
1183 if (cgraph_dump_file)
1184 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1185 cgraph_remove_node (node);
1186 continue;
1187 }
1188 else
1189 node->next_needed = NULL;
1190 gcc_assert (!node->local.finalized || node->thunk.thunk_p
1191 || node->alias
1192 || gimple_has_body_p (decl));
1193 gcc_assert (node->analyzed == node->local.finalized);
1194 }
1195 if (cgraph_dump_file)
1196 {
1197 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1198 dump_cgraph (cgraph_dump_file);
1199 dump_varpool (cgraph_dump_file);
1200 }
1201 bitmap_obstack_release (NULL);
1202 first_analyzed = cgraph_nodes;
1203 ggc_collect ();
1204 }
1205
1206 /* Translate the ugly representation of aliases as alias pairs into nice
1207 representation in callgraph. We don't handle all cases yet,
1208 unforutnately. */
1209
1210 static void
1211 handle_alias_pairs (void)
1212 {
1213 alias_pair *p;
1214 unsigned i;
1215 struct cgraph_node *target_node;
1216 struct cgraph_node *src_node;
1217 struct varpool_node *target_vnode;
1218
1219 for (i = 0; VEC_iterate (alias_pair, alias_pairs, i, p);)
1220 {
1221 if (TREE_CODE (p->decl) == FUNCTION_DECL
1222 && (target_node = cgraph_node_for_asm (p->target)) != NULL)
1223 {
1224 src_node = cgraph_get_node (p->decl);
1225 if (src_node && src_node->local.finalized)
1226 cgraph_reset_node (src_node);
1227 /* Normally EXTERNAL flag is used to mark external inlines,
1228 however for aliases it seems to be allowed to use it w/o
1229 any meaning. See gcc.dg/attr-alias-3.c
1230 However for weakref we insist on EXTERNAL flag being set.
1231 See gcc.dg/attr-alias-5.c */
1232 if (DECL_EXTERNAL (p->decl))
1233 DECL_EXTERNAL (p->decl) = lookup_attribute ("weakref",
1234 DECL_ATTRIBUTES (p->decl)) != NULL;
1235 cgraph_create_function_alias (p->decl, target_node->decl);
1236 VEC_unordered_remove (alias_pair, alias_pairs, i);
1237 }
1238 else if (TREE_CODE (p->decl) == VAR_DECL
1239 && (target_vnode = varpool_node_for_asm (p->target)) != NULL)
1240 {
1241 /* Normally EXTERNAL flag is used to mark external inlines,
1242 however for aliases it seems to be allowed to use it w/o
1243 any meaning. See gcc.dg/attr-alias-3.c
1244 However for weakref we insist on EXTERNAL flag being set.
1245 See gcc.dg/attr-alias-5.c */
1246 if (DECL_EXTERNAL (p->decl))
1247 DECL_EXTERNAL (p->decl) = lookup_attribute ("weakref",
1248 DECL_ATTRIBUTES (p->decl)) != NULL;
1249 varpool_create_variable_alias (p->decl, target_vnode->decl);
1250 VEC_unordered_remove (alias_pair, alias_pairs, i);
1251 }
1252 /* Weakrefs with target not defined in current unit are easy to handle; they
1253 behave just as external variables except we need to note the alias flag
1254 to later output the weakref pseudo op into asm file. */
1255 else if (lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL
1256 && (TREE_CODE (p->decl) == FUNCTION_DECL
1257 ? (varpool_node_for_asm (p->target) == NULL)
1258 : (cgraph_node_for_asm (p->target) == NULL)))
1259 {
1260 if (TREE_CODE (p->decl) == FUNCTION_DECL)
1261 cgraph_get_create_node (p->decl)->alias = true;
1262 else
1263 varpool_get_node (p->decl)->alias = true;
1264 DECL_EXTERNAL (p->decl) = 1;
1265 VEC_unordered_remove (alias_pair, alias_pairs, i);
1266 }
1267 else
1268 {
1269 if (dump_file)
1270 fprintf (dump_file, "Unhandled alias %s->%s\n",
1271 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
1272 IDENTIFIER_POINTER (p->target));
1273
1274 i++;
1275 }
1276 }
1277 }
1278
1279
1280 /* Analyze the whole compilation unit once it is parsed completely. */
1281
1282 void
1283 cgraph_finalize_compilation_unit (void)
1284 {
1285 timevar_push (TV_CGRAPH);
1286
1287 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
1288 if (flag_lto)
1289 lto_streamer_hooks_init ();
1290
1291 /* If we're here there's no current function anymore. Some frontends
1292 are lazy in clearing these. */
1293 current_function_decl = NULL;
1294 set_cfun (NULL);
1295
1296 /* Do not skip analyzing the functions if there were errors, we
1297 miss diagnostics for following functions otherwise. */
1298
1299 /* Emit size functions we didn't inline. */
1300 finalize_size_functions ();
1301
1302 /* Mark alias targets necessary and emit diagnostics. */
1303 finish_aliases_1 ();
1304 handle_alias_pairs ();
1305
1306 if (!quiet_flag)
1307 {
1308 fprintf (stderr, "\nAnalyzing compilation unit\n");
1309 fflush (stderr);
1310 }
1311
1312 if (flag_dump_passes)
1313 dump_passes ();
1314
1315 /* Gimplify and lower all functions, compute reachability and
1316 remove unreachable nodes. */
1317 cgraph_analyze_functions ();
1318
1319 /* Mark alias targets necessary and emit diagnostics. */
1320 finish_aliases_1 ();
1321 handle_alias_pairs ();
1322
1323 /* Gimplify and lower thunks. */
1324 cgraph_analyze_functions ();
1325
1326 /* Finally drive the pass manager. */
1327 cgraph_optimize ();
1328
1329 timevar_pop (TV_CGRAPH);
1330 }
1331
1332
1333 /* Figure out what functions we want to assemble. */
1334
1335 static void
1336 cgraph_mark_functions_to_output (void)
1337 {
1338 struct cgraph_node *node;
1339 #ifdef ENABLE_CHECKING
1340 bool check_same_comdat_groups = false;
1341
1342 for (node = cgraph_nodes; node; node = node->next)
1343 gcc_assert (!node->process);
1344 #endif
1345
1346 for (node = cgraph_nodes; node; node = node->next)
1347 {
1348 tree decl = node->decl;
1349 struct cgraph_edge *e;
1350
1351 gcc_assert (!node->process || node->same_comdat_group);
1352 if (node->process)
1353 continue;
1354
1355 for (e = node->callers; e; e = e->next_caller)
1356 if (e->inline_failed)
1357 break;
1358
1359 /* We need to output all local functions that are used and not
1360 always inlined, as well as those that are reachable from
1361 outside the current compilation unit. */
1362 if (node->analyzed
1363 && !node->thunk.thunk_p
1364 && !node->alias
1365 && !node->global.inlined_to
1366 && (!cgraph_only_called_directly_p (node)
1367 || ((e || ipa_ref_has_aliases_p (&node->ref_list))
1368 && node->reachable))
1369 && !TREE_ASM_WRITTEN (decl)
1370 && !DECL_EXTERNAL (decl))
1371 {
1372 node->process = 1;
1373 if (node->same_comdat_group)
1374 {
1375 struct cgraph_node *next;
1376 for (next = node->same_comdat_group;
1377 next != node;
1378 next = next->same_comdat_group)
1379 if (!next->thunk.thunk_p && !next->alias)
1380 next->process = 1;
1381 }
1382 }
1383 else if (node->same_comdat_group)
1384 {
1385 #ifdef ENABLE_CHECKING
1386 check_same_comdat_groups = true;
1387 #endif
1388 }
1389 else
1390 {
1391 /* We should've reclaimed all functions that are not needed. */
1392 #ifdef ENABLE_CHECKING
1393 if (!node->global.inlined_to
1394 && gimple_has_body_p (decl)
1395 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1396 are inside partition, we can end up not removing the body since we no longer
1397 have analyzed node pointing to it. */
1398 && !node->in_other_partition
1399 && !node->alias
1400 && !DECL_EXTERNAL (decl))
1401 {
1402 dump_cgraph_node (stderr, node);
1403 internal_error ("failed to reclaim unneeded function");
1404 }
1405 #endif
1406 gcc_assert (node->global.inlined_to
1407 || !gimple_has_body_p (decl)
1408 || node->in_other_partition
1409 || DECL_EXTERNAL (decl));
1410
1411 }
1412
1413 }
1414 #ifdef ENABLE_CHECKING
1415 if (check_same_comdat_groups)
1416 for (node = cgraph_nodes; node; node = node->next)
1417 if (node->same_comdat_group && !node->process)
1418 {
1419 tree decl = node->decl;
1420 if (!node->global.inlined_to
1421 && gimple_has_body_p (decl)
1422 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1423 are inside partition, we can end up not removing the body since we no longer
1424 have analyzed node pointing to it. */
1425 && !node->in_other_partition
1426 && !DECL_EXTERNAL (decl))
1427 {
1428 dump_cgraph_node (stderr, node);
1429 internal_error ("failed to reclaim unneeded functionin same comdat group");
1430 }
1431 }
1432 #endif
1433 }
1434
1435 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1436 in lowered gimple form.
1437
1438 Set current_function_decl and cfun to newly constructed empty function body.
1439 return basic block in the function body. */
1440
1441 static basic_block
1442 init_lowered_empty_function (tree decl)
1443 {
1444 basic_block bb;
1445
1446 current_function_decl = decl;
1447 allocate_struct_function (decl, false);
1448 gimple_register_cfg_hooks ();
1449 init_empty_tree_cfg ();
1450 init_tree_ssa (cfun);
1451 init_ssa_operands ();
1452 cfun->gimple_df->in_ssa_p = true;
1453 DECL_INITIAL (decl) = make_node (BLOCK);
1454
1455 DECL_SAVED_TREE (decl) = error_mark_node;
1456 cfun->curr_properties |=
1457 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1458 PROP_ssa | PROP_gimple_any);
1459
1460 /* Create BB for body of the function and connect it properly. */
1461 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1462 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1463 make_edge (bb, EXIT_BLOCK_PTR, 0);
1464
1465 return bb;
1466 }
1467
1468 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1469 offset indicated by VIRTUAL_OFFSET, if that is
1470 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1471 zero for a result adjusting thunk. */
1472
1473 static tree
1474 thunk_adjust (gimple_stmt_iterator * bsi,
1475 tree ptr, bool this_adjusting,
1476 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1477 {
1478 gimple stmt;
1479 tree ret;
1480
1481 if (this_adjusting
1482 && fixed_offset != 0)
1483 {
1484 stmt = gimple_build_assign
1485 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1486 ptr,
1487 fixed_offset));
1488 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1489 }
1490
1491 /* If there's a virtual offset, look up that value in the vtable and
1492 adjust the pointer again. */
1493 if (virtual_offset)
1494 {
1495 tree vtabletmp;
1496 tree vtabletmp2;
1497 tree vtabletmp3;
1498
1499 if (!vtable_entry_type)
1500 {
1501 tree vfunc_type = make_node (FUNCTION_TYPE);
1502 TREE_TYPE (vfunc_type) = integer_type_node;
1503 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1504 layout_type (vfunc_type);
1505
1506 vtable_entry_type = build_pointer_type (vfunc_type);
1507 }
1508
1509 vtabletmp =
1510 create_tmp_var (build_pointer_type
1511 (build_pointer_type (vtable_entry_type)), "vptr");
1512
1513 /* The vptr is always at offset zero in the object. */
1514 stmt = gimple_build_assign (vtabletmp,
1515 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1516 ptr));
1517 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1518 mark_symbols_for_renaming (stmt);
1519 find_referenced_vars_in (stmt);
1520
1521 /* Form the vtable address. */
1522 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1523 "vtableaddr");
1524 stmt = gimple_build_assign (vtabletmp2,
1525 build_simple_mem_ref (vtabletmp));
1526 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1527 mark_symbols_for_renaming (stmt);
1528 find_referenced_vars_in (stmt);
1529
1530 /* Find the entry with the vcall offset. */
1531 stmt = gimple_build_assign (vtabletmp2,
1532 fold_build_pointer_plus_loc (input_location,
1533 vtabletmp2,
1534 virtual_offset));
1535 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1536
1537 /* Get the offset itself. */
1538 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1539 "vcalloffset");
1540 stmt = gimple_build_assign (vtabletmp3,
1541 build_simple_mem_ref (vtabletmp2));
1542 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1543 mark_symbols_for_renaming (stmt);
1544 find_referenced_vars_in (stmt);
1545
1546 /* Adjust the `this' pointer. */
1547 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1548 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1549 GSI_CONTINUE_LINKING);
1550 }
1551
1552 if (!this_adjusting
1553 && fixed_offset != 0)
1554 /* Adjust the pointer by the constant. */
1555 {
1556 tree ptrtmp;
1557
1558 if (TREE_CODE (ptr) == VAR_DECL)
1559 ptrtmp = ptr;
1560 else
1561 {
1562 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1563 stmt = gimple_build_assign (ptrtmp, ptr);
1564 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1565 mark_symbols_for_renaming (stmt);
1566 find_referenced_vars_in (stmt);
1567 }
1568 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1569 ptrtmp, fixed_offset);
1570 }
1571
1572 /* Emit the statement and gimplify the adjustment expression. */
1573 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1574 stmt = gimple_build_assign (ret, ptr);
1575 mark_symbols_for_renaming (stmt);
1576 find_referenced_vars_in (stmt);
1577 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1578
1579 return ret;
1580 }
1581
1582 /* Produce assembler for thunk NODE. */
1583
1584 static void
1585 assemble_thunk (struct cgraph_node *node)
1586 {
1587 bool this_adjusting = node->thunk.this_adjusting;
1588 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1589 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1590 tree virtual_offset = NULL;
1591 tree alias = node->thunk.alias;
1592 tree thunk_fndecl = node->decl;
1593 tree a = DECL_ARGUMENTS (thunk_fndecl);
1594
1595 current_function_decl = thunk_fndecl;
1596
1597 /* Ensure thunks are emitted in their correct sections. */
1598 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1599
1600 if (this_adjusting
1601 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1602 virtual_value, alias))
1603 {
1604 const char *fnname;
1605 tree fn_block;
1606 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1607
1608 DECL_RESULT (thunk_fndecl)
1609 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1610 RESULT_DECL, 0, restype);
1611 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1612
1613 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1614 create one. */
1615 fn_block = make_node (BLOCK);
1616 BLOCK_VARS (fn_block) = a;
1617 DECL_INITIAL (thunk_fndecl) = fn_block;
1618 init_function_start (thunk_fndecl);
1619 cfun->is_thunk = 1;
1620 assemble_start_function (thunk_fndecl, fnname);
1621
1622 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1623 fixed_offset, virtual_value, alias);
1624
1625 assemble_end_function (thunk_fndecl, fnname);
1626 init_insn_lengths ();
1627 free_after_compilation (cfun);
1628 set_cfun (NULL);
1629 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1630 node->thunk.thunk_p = false;
1631 node->analyzed = false;
1632 }
1633 else
1634 {
1635 tree restype;
1636 basic_block bb, then_bb, else_bb, return_bb;
1637 gimple_stmt_iterator bsi;
1638 int nargs = 0;
1639 tree arg;
1640 int i;
1641 tree resdecl;
1642 tree restmp = NULL;
1643 VEC(tree, heap) *vargs;
1644
1645 gimple call;
1646 gimple ret;
1647
1648 DECL_IGNORED_P (thunk_fndecl) = 1;
1649 bitmap_obstack_initialize (NULL);
1650
1651 if (node->thunk.virtual_offset_p)
1652 virtual_offset = size_int (virtual_value);
1653
1654 /* Build the return declaration for the function. */
1655 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1656 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1657 {
1658 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1659 DECL_ARTIFICIAL (resdecl) = 1;
1660 DECL_IGNORED_P (resdecl) = 1;
1661 DECL_RESULT (thunk_fndecl) = resdecl;
1662 }
1663 else
1664 resdecl = DECL_RESULT (thunk_fndecl);
1665
1666 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1667
1668 bsi = gsi_start_bb (bb);
1669
1670 /* Build call to the function being thunked. */
1671 if (!VOID_TYPE_P (restype))
1672 {
1673 if (!is_gimple_reg_type (restype))
1674 {
1675 restmp = resdecl;
1676 add_local_decl (cfun, restmp);
1677 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1678 }
1679 else
1680 restmp = create_tmp_var_raw (restype, "retval");
1681 }
1682
1683 for (arg = a; arg; arg = DECL_CHAIN (arg))
1684 nargs++;
1685 vargs = VEC_alloc (tree, heap, nargs);
1686 if (this_adjusting)
1687 VEC_quick_push (tree, vargs,
1688 thunk_adjust (&bsi,
1689 a, 1, fixed_offset,
1690 virtual_offset));
1691 else
1692 VEC_quick_push (tree, vargs, a);
1693 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1694 VEC_quick_push (tree, vargs, arg);
1695 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1696 VEC_free (tree, heap, vargs);
1697 gimple_call_set_cannot_inline (call, true);
1698 gimple_call_set_from_thunk (call, true);
1699 if (restmp)
1700 gimple_call_set_lhs (call, restmp);
1701 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1702 mark_symbols_for_renaming (call);
1703 find_referenced_vars_in (call);
1704 update_stmt (call);
1705
1706 if (restmp && !this_adjusting)
1707 {
1708 tree true_label = NULL_TREE;
1709
1710 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1711 {
1712 gimple stmt;
1713 /* If the return type is a pointer, we need to
1714 protect against NULL. We know there will be an
1715 adjustment, because that's why we're emitting a
1716 thunk. */
1717 then_bb = create_basic_block (NULL, (void *) 0, bb);
1718 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1719 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1720 remove_edge (single_succ_edge (bb));
1721 true_label = gimple_block_label (then_bb);
1722 stmt = gimple_build_cond (NE_EXPR, restmp,
1723 build_zero_cst (TREE_TYPE (restmp)),
1724 NULL_TREE, NULL_TREE);
1725 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1726 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1727 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1728 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1729 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1730 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1731 bsi = gsi_last_bb (then_bb);
1732 }
1733
1734 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1735 fixed_offset, virtual_offset);
1736 if (true_label)
1737 {
1738 gimple stmt;
1739 bsi = gsi_last_bb (else_bb);
1740 stmt = gimple_build_assign (restmp,
1741 build_zero_cst (TREE_TYPE (restmp)));
1742 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1743 bsi = gsi_last_bb (return_bb);
1744 }
1745 }
1746 else
1747 gimple_call_set_tail (call, true);
1748
1749 /* Build return value. */
1750 ret = gimple_build_return (restmp);
1751 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1752
1753 delete_unreachable_blocks ();
1754 update_ssa (TODO_update_ssa);
1755
1756 /* Since we want to emit the thunk, we explicitly mark its name as
1757 referenced. */
1758 node->thunk.thunk_p = false;
1759 cgraph_node_remove_callees (node);
1760 cgraph_add_new_function (thunk_fndecl, true);
1761 bitmap_obstack_release (NULL);
1762 }
1763 current_function_decl = NULL;
1764 }
1765
1766
1767
1768 /* Assemble thunks and aliases asociated to NODE. */
1769
1770 static void
1771 assemble_thunks_and_aliases (struct cgraph_node *node)
1772 {
1773 struct cgraph_edge *e;
1774 int i;
1775 struct ipa_ref *ref;
1776
1777 for (e = node->callers; e;)
1778 if (e->caller->thunk.thunk_p)
1779 {
1780 struct cgraph_node *thunk = e->caller;
1781
1782 e = e->next_caller;
1783 assemble_thunks_and_aliases (thunk);
1784 assemble_thunk (thunk);
1785 }
1786 else
1787 e = e->next_caller;
1788 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
1789 if (ref->use == IPA_REF_ALIAS)
1790 {
1791 struct cgraph_node *alias = ipa_ref_refering_node (ref);
1792 bool saved_written = TREE_ASM_WRITTEN (alias->thunk.alias);
1793
1794 /* Force assemble_alias to really output the alias this time instead
1795 of buffering it in same alias pairs. */
1796 TREE_ASM_WRITTEN (alias->thunk.alias) = 1;
1797 assemble_alias (alias->decl,
1798 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1799 assemble_thunks_and_aliases (alias);
1800 TREE_ASM_WRITTEN (alias->thunk.alias) = saved_written;
1801 }
1802 }
1803
1804 /* Expand function specified by NODE. */
1805
1806 static void
1807 cgraph_expand_function (struct cgraph_node *node)
1808 {
1809 tree decl = node->decl;
1810
1811 /* We ought to not compile any inline clones. */
1812 gcc_assert (!node->global.inlined_to);
1813
1814 announce_function (decl);
1815 node->process = 0;
1816 gcc_assert (node->lowered);
1817
1818 /* Generate RTL for the body of DECL. */
1819 tree_rest_of_compilation (decl);
1820
1821 /* Make sure that BE didn't give up on compiling. */
1822 gcc_assert (TREE_ASM_WRITTEN (decl));
1823 current_function_decl = NULL;
1824 gcc_assert (!cgraph_preserve_function_body_p (node));
1825
1826 /* It would make a lot more sense to output thunks before function body to get more
1827 forward and lest backwarding jumps. This is however would need solving problem
1828 with comdats. See PR48668. Also aliases must come after function itself to
1829 make one pass assemblers, like one on AIX happy. See PR 50689.
1830 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1831 groups. */
1832 assemble_thunks_and_aliases (node);
1833 cgraph_release_function_body (node);
1834 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1835 points to the dead function body. */
1836 cgraph_node_remove_callees (node);
1837
1838 cgraph_function_flags_ready = true;
1839 }
1840
1841 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1842
1843 bool
1844 cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
1845 {
1846 *reason = e->inline_failed;
1847 return !e->inline_failed;
1848 }
1849
1850
1851
1852 /* Expand all functions that must be output.
1853
1854 Attempt to topologically sort the nodes so function is output when
1855 all called functions are already assembled to allow data to be
1856 propagated across the callgraph. Use a stack to get smaller distance
1857 between a function and its callees (later we may choose to use a more
1858 sophisticated algorithm for function reordering; we will likely want
1859 to use subsections to make the output functions appear in top-down
1860 order). */
1861
1862 static void
1863 cgraph_expand_all_functions (void)
1864 {
1865 struct cgraph_node *node;
1866 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1867 int order_pos, new_order_pos = 0;
1868 int i;
1869
1870 order_pos = ipa_reverse_postorder (order);
1871 gcc_assert (order_pos == cgraph_n_nodes);
1872
1873 /* Garbage collector may remove inline clones we eliminate during
1874 optimization. So we must be sure to not reference them. */
1875 for (i = 0; i < order_pos; i++)
1876 if (order[i]->process)
1877 order[new_order_pos++] = order[i];
1878
1879 for (i = new_order_pos - 1; i >= 0; i--)
1880 {
1881 node = order[i];
1882 if (node->process)
1883 {
1884 gcc_assert (node->reachable);
1885 node->process = 0;
1886 cgraph_expand_function (node);
1887 }
1888 }
1889 cgraph_process_new_functions ();
1890
1891 free (order);
1892
1893 }
1894
1895 /* This is used to sort the node types by the cgraph order number. */
1896
1897 enum cgraph_order_sort_kind
1898 {
1899 ORDER_UNDEFINED = 0,
1900 ORDER_FUNCTION,
1901 ORDER_VAR,
1902 ORDER_ASM
1903 };
1904
1905 struct cgraph_order_sort
1906 {
1907 enum cgraph_order_sort_kind kind;
1908 union
1909 {
1910 struct cgraph_node *f;
1911 struct varpool_node *v;
1912 struct cgraph_asm_node *a;
1913 } u;
1914 };
1915
1916 /* Output all functions, variables, and asm statements in the order
1917 according to their order fields, which is the order in which they
1918 appeared in the file. This implements -fno-toplevel-reorder. In
1919 this mode we may output functions and variables which don't really
1920 need to be output. */
1921
1922 static void
1923 cgraph_output_in_order (void)
1924 {
1925 int max;
1926 struct cgraph_order_sort *nodes;
1927 int i;
1928 struct cgraph_node *pf;
1929 struct varpool_node *pv;
1930 struct cgraph_asm_node *pa;
1931
1932 max = cgraph_order;
1933 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1934
1935 varpool_analyze_pending_decls ();
1936
1937 for (pf = cgraph_nodes; pf; pf = pf->next)
1938 {
1939 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
1940 {
1941 i = pf->order;
1942 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1943 nodes[i].kind = ORDER_FUNCTION;
1944 nodes[i].u.f = pf;
1945 }
1946 }
1947
1948 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1949 {
1950 i = pv->order;
1951 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1952 nodes[i].kind = ORDER_VAR;
1953 nodes[i].u.v = pv;
1954 }
1955
1956 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1957 {
1958 i = pa->order;
1959 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1960 nodes[i].kind = ORDER_ASM;
1961 nodes[i].u.a = pa;
1962 }
1963
1964 /* In toplevel reorder mode we output all statics; mark them as needed. */
1965 for (i = 0; i < max; ++i)
1966 {
1967 if (nodes[i].kind == ORDER_VAR)
1968 {
1969 varpool_mark_needed_node (nodes[i].u.v);
1970 }
1971 }
1972 varpool_empty_needed_queue ();
1973
1974 for (i = 0; i < max; ++i)
1975 if (nodes[i].kind == ORDER_VAR)
1976 varpool_finalize_named_section_flags (nodes[i].u.v);
1977
1978 for (i = 0; i < max; ++i)
1979 {
1980 switch (nodes[i].kind)
1981 {
1982 case ORDER_FUNCTION:
1983 nodes[i].u.f->process = 0;
1984 cgraph_expand_function (nodes[i].u.f);
1985 break;
1986
1987 case ORDER_VAR:
1988 varpool_assemble_decl (nodes[i].u.v);
1989 break;
1990
1991 case ORDER_ASM:
1992 assemble_asm (nodes[i].u.a->asm_str);
1993 break;
1994
1995 case ORDER_UNDEFINED:
1996 break;
1997
1998 default:
1999 gcc_unreachable ();
2000 }
2001 }
2002
2003 cgraph_asm_nodes = NULL;
2004 free (nodes);
2005 }
2006
2007 /* Return true when function body of DECL still needs to be kept around
2008 for later re-use. */
2009 bool
2010 cgraph_preserve_function_body_p (struct cgraph_node *node)
2011 {
2012 gcc_assert (cgraph_global_info_ready);
2013 gcc_assert (!node->alias && !node->thunk.thunk_p);
2014
2015 /* Look if there is any clone around. */
2016 if (node->clones)
2017 return true;
2018 return false;
2019 }
2020
2021 static void
2022 ipa_passes (void)
2023 {
2024 set_cfun (NULL);
2025 current_function_decl = NULL;
2026 gimple_register_cfg_hooks ();
2027 bitmap_obstack_initialize (NULL);
2028
2029 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2030
2031 if (!in_lto_p)
2032 {
2033 execute_ipa_pass_list (all_small_ipa_passes);
2034 if (seen_error ())
2035 return;
2036 }
2037
2038 /* We never run removal of unreachable nodes after early passes. This is
2039 because TODO is run before the subpasses. It is important to remove
2040 the unreachable functions to save works at IPA level and to get LTO
2041 symbol tables right. */
2042 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
2043
2044 /* If pass_all_early_optimizations was not scheduled, the state of
2045 the cgraph will not be properly updated. Update it now. */
2046 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
2047 cgraph_state = CGRAPH_STATE_IPA_SSA;
2048
2049 if (!in_lto_p)
2050 {
2051 /* Generate coverage variables and constructors. */
2052 coverage_finish ();
2053
2054 /* Process new functions added. */
2055 set_cfun (NULL);
2056 current_function_decl = NULL;
2057 cgraph_process_new_functions ();
2058
2059 execute_ipa_summary_passes
2060 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
2061 }
2062
2063 /* Some targets need to handle LTO assembler output specially. */
2064 if (flag_generate_lto)
2065 targetm.asm_out.lto_start ();
2066
2067 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
2068
2069 if (!in_lto_p)
2070 ipa_write_summaries ();
2071
2072 if (flag_generate_lto)
2073 targetm.asm_out.lto_end ();
2074
2075 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2076 execute_ipa_pass_list (all_regular_ipa_passes);
2077 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2078
2079 bitmap_obstack_release (NULL);
2080 }
2081
2082
2083 /* Return string alias is alias of. */
2084
2085 static tree
2086 get_alias_symbol (tree decl)
2087 {
2088 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2089 return get_identifier (TREE_STRING_POINTER
2090 (TREE_VALUE (TREE_VALUE (alias))));
2091 }
2092
2093
2094 /* Weakrefs may be associated to external decls and thus not output
2095 at expansion time. Emit all neccesary aliases. */
2096
2097 static void
2098 output_weakrefs (void)
2099 {
2100 struct cgraph_node *node;
2101 struct varpool_node *vnode;
2102 for (node = cgraph_nodes; node; node = node->next)
2103 if (node->alias && DECL_EXTERNAL (node->decl)
2104 && !TREE_ASM_WRITTEN (node->decl))
2105 assemble_alias (node->decl,
2106 node->thunk.alias ? DECL_ASSEMBLER_NAME (node->thunk.alias)
2107 : get_alias_symbol (node->decl));
2108 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
2109 if (vnode->alias && DECL_EXTERNAL (vnode->decl)
2110 && !TREE_ASM_WRITTEN (vnode->decl))
2111 assemble_alias (vnode->decl,
2112 vnode->alias_of ? DECL_ASSEMBLER_NAME (vnode->alias_of)
2113 : get_alias_symbol (vnode->decl));
2114 }
2115
2116
2117 /* Perform simple optimizations based on callgraph. */
2118
2119 void
2120 cgraph_optimize (void)
2121 {
2122 if (seen_error ())
2123 return;
2124
2125 #ifdef ENABLE_CHECKING
2126 verify_cgraph ();
2127 #endif
2128
2129 /* Frontend may output common variables after the unit has been finalized.
2130 It is safe to deal with them here as they are always zero initialized. */
2131 varpool_analyze_pending_decls ();
2132
2133 timevar_push (TV_CGRAPHOPT);
2134 if (pre_ipa_mem_report)
2135 {
2136 fprintf (stderr, "Memory consumption before IPA\n");
2137 dump_memory_report (false);
2138 }
2139 if (!quiet_flag)
2140 fprintf (stderr, "Performing interprocedural optimizations\n");
2141 cgraph_state = CGRAPH_STATE_IPA;
2142
2143 /* Don't run the IPA passes if there was any error or sorry messages. */
2144 if (!seen_error ())
2145 ipa_passes ();
2146
2147 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2148 if (seen_error ()
2149 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2150 {
2151 timevar_pop (TV_CGRAPHOPT);
2152 return;
2153 }
2154
2155 /* This pass remove bodies of extern inline functions we never inlined.
2156 Do this later so other IPA passes see what is really going on. */
2157 cgraph_remove_unreachable_nodes (false, dump_file);
2158 cgraph_global_info_ready = true;
2159 if (cgraph_dump_file)
2160 {
2161 fprintf (cgraph_dump_file, "Optimized ");
2162 dump_cgraph (cgraph_dump_file);
2163 dump_varpool (cgraph_dump_file);
2164 }
2165 if (post_ipa_mem_report)
2166 {
2167 fprintf (stderr, "Memory consumption after IPA\n");
2168 dump_memory_report (false);
2169 }
2170 timevar_pop (TV_CGRAPHOPT);
2171
2172 /* Output everything. */
2173 (*debug_hooks->assembly_start) ();
2174 if (!quiet_flag)
2175 fprintf (stderr, "Assembling functions:\n");
2176 #ifdef ENABLE_CHECKING
2177 verify_cgraph ();
2178 #endif
2179
2180 cgraph_materialize_all_clones ();
2181 bitmap_obstack_initialize (NULL);
2182 execute_ipa_pass_list (all_late_ipa_passes);
2183 cgraph_remove_unreachable_nodes (true, dump_file);
2184 #ifdef ENABLE_CHECKING
2185 verify_cgraph ();
2186 #endif
2187 bitmap_obstack_release (NULL);
2188 cgraph_mark_functions_to_output ();
2189
2190 cgraph_state = CGRAPH_STATE_EXPANSION;
2191 if (!flag_toplevel_reorder)
2192 cgraph_output_in_order ();
2193 else
2194 {
2195 cgraph_output_pending_asms ();
2196
2197 cgraph_expand_all_functions ();
2198 varpool_remove_unreferenced_decls ();
2199
2200 varpool_assemble_pending_decls ();
2201 }
2202
2203 output_weakrefs ();
2204 cgraph_process_new_functions ();
2205 cgraph_state = CGRAPH_STATE_FINISHED;
2206
2207 if (cgraph_dump_file)
2208 {
2209 fprintf (cgraph_dump_file, "\nFinal ");
2210 dump_cgraph (cgraph_dump_file);
2211 dump_varpool (cgraph_dump_file);
2212 }
2213 #ifdef ENABLE_CHECKING
2214 verify_cgraph ();
2215 /* Double check that all inline clones are gone and that all
2216 function bodies have been released from memory. */
2217 if (!seen_error ())
2218 {
2219 struct cgraph_node *node;
2220 bool error_found = false;
2221
2222 for (node = cgraph_nodes; node; node = node->next)
2223 if (node->analyzed
2224 && (node->global.inlined_to
2225 || gimple_has_body_p (node->decl)))
2226 {
2227 error_found = true;
2228 dump_cgraph_node (stderr, node);
2229 }
2230 if (error_found)
2231 internal_error ("nodes with unreleased memory found");
2232 }
2233 #endif
2234 }
2235
2236 void
2237 init_cgraph (void)
2238 {
2239 if (!cgraph_dump_file)
2240 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
2241 }
2242
2243 /* The edges representing the callers of the NEW_VERSION node were
2244 fixed by cgraph_function_versioning (), now the call_expr in their
2245 respective tree code should be updated to call the NEW_VERSION. */
2246
2247 static void
2248 update_call_expr (struct cgraph_node *new_version)
2249 {
2250 struct cgraph_edge *e;
2251
2252 gcc_assert (new_version);
2253
2254 /* Update the call expr on the edges to call the new version. */
2255 for (e = new_version->callers; e; e = e->next_caller)
2256 {
2257 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2258 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
2259 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
2260 }
2261 }
2262
2263
2264 /* Create a new cgraph node which is the new version of
2265 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2266 edges which should be redirected to point to
2267 NEW_VERSION. ALL the callees edges of OLD_VERSION
2268 are cloned to the new version node. Return the new
2269 version node.
2270
2271 If non-NULL BLOCK_TO_COPY determine what basic blocks
2272 was copied to prevent duplications of calls that are dead
2273 in the clone. */
2274
2275 struct cgraph_node *
2276 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
2277 tree new_decl,
2278 VEC(cgraph_edge_p,heap) *redirect_callers,
2279 bitmap bbs_to_copy)
2280 {
2281 struct cgraph_node *new_version;
2282 struct cgraph_edge *e;
2283 unsigned i;
2284
2285 gcc_assert (old_version);
2286
2287 new_version = cgraph_create_node (new_decl);
2288
2289 new_version->analyzed = old_version->analyzed;
2290 new_version->local = old_version->local;
2291 new_version->local.externally_visible = false;
2292 new_version->local.local = true;
2293 new_version->global = old_version->global;
2294 new_version->rtl = old_version->rtl;
2295 new_version->reachable = true;
2296 new_version->count = old_version->count;
2297
2298 for (e = old_version->callees; e; e=e->next_callee)
2299 if (!bbs_to_copy
2300 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2301 cgraph_clone_edge (e, new_version, e->call_stmt,
2302 e->lto_stmt_uid, REG_BR_PROB_BASE,
2303 CGRAPH_FREQ_BASE,
2304 true);
2305 for (e = old_version->indirect_calls; e; e=e->next_callee)
2306 if (!bbs_to_copy
2307 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2308 cgraph_clone_edge (e, new_version, e->call_stmt,
2309 e->lto_stmt_uid, REG_BR_PROB_BASE,
2310 CGRAPH_FREQ_BASE,
2311 true);
2312 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
2313 {
2314 /* Redirect calls to the old version node to point to its new
2315 version. */
2316 cgraph_redirect_edge_callee (e, new_version);
2317 }
2318
2319 return new_version;
2320 }
2321
2322 /* Perform function versioning.
2323 Function versioning includes copying of the tree and
2324 a callgraph update (creating a new cgraph node and updating
2325 its callees and callers).
2326
2327 REDIRECT_CALLERS varray includes the edges to be redirected
2328 to the new version.
2329
2330 TREE_MAP is a mapping of tree nodes we want to replace with
2331 new ones (according to results of prior analysis).
2332 OLD_VERSION_NODE is the node that is versioned.
2333 It returns the new version's cgraph node.
2334 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2335 from new version.
2336 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
2337 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
2338
2339 struct cgraph_node *
2340 cgraph_function_versioning (struct cgraph_node *old_version_node,
2341 VEC(cgraph_edge_p,heap) *redirect_callers,
2342 VEC (ipa_replace_map_p,gc)* tree_map,
2343 bitmap args_to_skip,
2344 bitmap bbs_to_copy,
2345 basic_block new_entry_block,
2346 const char *clone_name)
2347 {
2348 tree old_decl = old_version_node->decl;
2349 struct cgraph_node *new_version_node = NULL;
2350 tree new_decl;
2351
2352 if (!tree_versionable_function_p (old_decl))
2353 return NULL;
2354
2355 gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
2356
2357 /* Make a new FUNCTION_DECL tree node for the
2358 new version. */
2359 if (!args_to_skip)
2360 new_decl = copy_node (old_decl);
2361 else
2362 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2363
2364 /* Generate a new name for the new version. */
2365 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2366 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2367 SET_DECL_RTL (new_decl, NULL);
2368
2369 /* When the old decl was a con-/destructor make sure the clone isn't. */
2370 DECL_STATIC_CONSTRUCTOR(new_decl) = 0;
2371 DECL_STATIC_DESTRUCTOR(new_decl) = 0;
2372
2373 /* Create the new version's call-graph node.
2374 and update the edges of the new node. */
2375 new_version_node =
2376 cgraph_copy_node_for_versioning (old_version_node, new_decl,
2377 redirect_callers, bbs_to_copy);
2378
2379 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2380 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2381 bbs_to_copy, new_entry_block);
2382
2383 /* Update the new version's properties.
2384 Make The new version visible only within this translation unit. Make sure
2385 that is not weak also.
2386 ??? We cannot use COMDAT linkage because there is no
2387 ABI support for this. */
2388 cgraph_make_decl_local (new_version_node->decl);
2389 DECL_VIRTUAL_P (new_version_node->decl) = 0;
2390 new_version_node->local.externally_visible = 0;
2391 new_version_node->local.local = 1;
2392 new_version_node->lowered = true;
2393
2394 /* Update the call_expr on the edges to call the new version node. */
2395 update_call_expr (new_version_node);
2396
2397 cgraph_call_function_insertion_hooks (new_version_node);
2398 return new_version_node;
2399 }
2400
2401 /* Given virtual clone, turn it into actual clone. */
2402 static void
2403 cgraph_materialize_clone (struct cgraph_node *node)
2404 {
2405 bitmap_obstack_initialize (NULL);
2406 node->former_clone_of = node->clone_of->decl;
2407 if (node->clone_of->former_clone_of)
2408 node->former_clone_of = node->clone_of->former_clone_of;
2409 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2410 tree_function_versioning (node->clone_of->decl, node->decl,
2411 node->clone.tree_map, true,
2412 node->clone.args_to_skip, NULL, NULL);
2413 if (cgraph_dump_file)
2414 {
2415 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2416 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2417 }
2418
2419 /* Function is no longer clone. */
2420 if (node->next_sibling_clone)
2421 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2422 if (node->prev_sibling_clone)
2423 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2424 else
2425 node->clone_of->clones = node->next_sibling_clone;
2426 node->next_sibling_clone = NULL;
2427 node->prev_sibling_clone = NULL;
2428 if (!node->clone_of->analyzed && !node->clone_of->clones)
2429 {
2430 cgraph_release_function_body (node->clone_of);
2431 cgraph_node_remove_callees (node->clone_of);
2432 ipa_remove_all_references (&node->clone_of->ref_list);
2433 }
2434 node->clone_of = NULL;
2435 bitmap_obstack_release (NULL);
2436 }
2437
2438 /* If necessary, change the function declaration in the call statement
2439 associated with E so that it corresponds to the edge callee. */
2440
2441 gimple
2442 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2443 {
2444 tree decl = gimple_call_fndecl (e->call_stmt);
2445 gimple new_stmt;
2446 gimple_stmt_iterator gsi;
2447 #ifdef ENABLE_CHECKING
2448 struct cgraph_node *node;
2449 #endif
2450
2451 if (e->indirect_unknown_callee
2452 || decl == e->callee->decl)
2453 return e->call_stmt;
2454
2455 #ifdef ENABLE_CHECKING
2456 if (decl)
2457 {
2458 node = cgraph_get_node (decl);
2459 gcc_assert (!node || !node->clone.combined_args_to_skip);
2460 }
2461 #endif
2462
2463 if (cgraph_dump_file)
2464 {
2465 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2466 cgraph_node_name (e->caller), e->caller->uid,
2467 cgraph_node_name (e->callee), e->callee->uid);
2468 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2469 if (e->callee->clone.combined_args_to_skip)
2470 {
2471 fprintf (cgraph_dump_file, " combined args to skip: ");
2472 dump_bitmap (cgraph_dump_file,
2473 e->callee->clone.combined_args_to_skip);
2474 }
2475 }
2476
2477 if (e->callee->clone.combined_args_to_skip)
2478 {
2479 int lp_nr;
2480
2481 new_stmt
2482 = gimple_call_copy_skip_args (e->call_stmt,
2483 e->callee->clone.combined_args_to_skip);
2484 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2485
2486 if (gimple_vdef (new_stmt)
2487 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2488 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2489
2490 gsi = gsi_for_stmt (e->call_stmt);
2491 gsi_replace (&gsi, new_stmt, false);
2492 /* We need to defer cleaning EH info on the new statement to
2493 fixup-cfg. We may not have dominator information at this point
2494 and thus would end up with unreachable blocks and have no way
2495 to communicate that we need to run CFG cleanup then. */
2496 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2497 if (lp_nr != 0)
2498 {
2499 remove_stmt_from_eh_lp (e->call_stmt);
2500 add_stmt_to_eh_lp (new_stmt, lp_nr);
2501 }
2502 }
2503 else
2504 {
2505 new_stmt = e->call_stmt;
2506 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2507 update_stmt (new_stmt);
2508 }
2509
2510 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2511
2512 if (cgraph_dump_file)
2513 {
2514 fprintf (cgraph_dump_file, " updated to:");
2515 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2516 }
2517 return new_stmt;
2518 }
2519
2520 /* Once all functions from compilation unit are in memory, produce all clones
2521 and update all calls. We might also do this on demand if we don't want to
2522 bring all functions to memory prior compilation, but current WHOPR
2523 implementation does that and it is is bit easier to keep everything right in
2524 this order. */
2525 void
2526 cgraph_materialize_all_clones (void)
2527 {
2528 struct cgraph_node *node;
2529 bool stabilized = false;
2530
2531 if (cgraph_dump_file)
2532 fprintf (cgraph_dump_file, "Materializing clones\n");
2533 #ifdef ENABLE_CHECKING
2534 verify_cgraph ();
2535 #endif
2536
2537 /* We can also do topological order, but number of iterations should be
2538 bounded by number of IPA passes since single IPA pass is probably not
2539 going to create clones of clones it created itself. */
2540 while (!stabilized)
2541 {
2542 stabilized = true;
2543 for (node = cgraph_nodes; node; node = node->next)
2544 {
2545 if (node->clone_of && node->decl != node->clone_of->decl
2546 && !gimple_has_body_p (node->decl))
2547 {
2548 if (gimple_has_body_p (node->clone_of->decl))
2549 {
2550 if (cgraph_dump_file)
2551 {
2552 fprintf (cgraph_dump_file, "cloning %s to %s\n",
2553 cgraph_node_name (node->clone_of),
2554 cgraph_node_name (node));
2555 if (node->clone.tree_map)
2556 {
2557 unsigned int i;
2558 fprintf (cgraph_dump_file, " replace map: ");
2559 for (i = 0; i < VEC_length (ipa_replace_map_p,
2560 node->clone.tree_map);
2561 i++)
2562 {
2563 struct ipa_replace_map *replace_info;
2564 replace_info = VEC_index (ipa_replace_map_p,
2565 node->clone.tree_map,
2566 i);
2567 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2568 fprintf (cgraph_dump_file, " -> ");
2569 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2570 fprintf (cgraph_dump_file, "%s%s;",
2571 replace_info->replace_p ? "(replace)":"",
2572 replace_info->ref_p ? "(ref)":"");
2573 }
2574 fprintf (cgraph_dump_file, "\n");
2575 }
2576 if (node->clone.args_to_skip)
2577 {
2578 fprintf (cgraph_dump_file, " args_to_skip: ");
2579 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2580 }
2581 if (node->clone.args_to_skip)
2582 {
2583 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2584 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2585 }
2586 }
2587 cgraph_materialize_clone (node);
2588 stabilized = false;
2589 }
2590 }
2591 }
2592 }
2593 for (node = cgraph_nodes; node; node = node->next)
2594 if (!node->analyzed && node->callees)
2595 cgraph_node_remove_callees (node);
2596 if (cgraph_dump_file)
2597 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
2598 #ifdef ENABLE_CHECKING
2599 verify_cgraph ();
2600 #endif
2601 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2602 }
2603
2604 #include "gt-cgraphunit.h"