]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
Daily bump.
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
cd6bca02 1/* Callgraph based interprocedural optimizations.
aed6e608 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011 Free Software Foundation, Inc.
ae01b312 4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
ae01b312 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
ae01b312 21
b0cdf642 22/* This module implements main driver of compilation process as well as
cd6bca02 23 few basic interprocedural optimizers.
b0cdf642 24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
b326746d 35 (There is one exception needed for implementing GCC extern inline
36 function.)
b0cdf642 37
1d416bd7 38 - varpool_finalize_variable
b0cdf642 39
7bd28bba 40 This function has same behavior as the above but is used for static
b0cdf642 41 variables.
42
43 - cgraph_finalize_compilation_unit
44
b326746d 45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
b0cdf642 47
851d9296 48 In the call-graph construction and local function analysis takes
49 place here. Bodies of unreachable functions are released to
50 conserve memory usage.
b0cdf642 51
b326746d 52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
b0cdf642 54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
b0cdf642 62 - cgraph_mark_needed_node
1d416bd7 63 - varpool_mark_needed_node
b0cdf642 64
b326746d 65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
ccd2f3d1 69 used by C++ frontend to explicitly mark the keyed methods.
b0cdf642 70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
6329636b 80 Analyzing of all functions is deferred
b0cdf642 81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
ca67a72b 91 The intra-procedural information is produced and its existence
b0cdf642 92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
6329636b 106 where reference has been optimized out. */
121f3051 107
acc70efa 108
ae01b312 109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
b5530559 114#include "rtl.h"
acc70efa 115#include "tree-flow.h"
ae01b312 116#include "tree-inline.h"
117#include "langhooks.h"
c6224531 118#include "pointer-set.h"
ae01b312 119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
80a85d8a 125#include "diagnostic.h"
ce084dfc 126#include "tree-pretty-print.h"
127#include "gimple-pretty-print.h"
f79b6507 128#include "timevar.h"
d7c6d889 129#include "params.h"
130#include "fibheap.h"
611e5405 131#include "intl.h"
b69eb0ff 132#include "function.h"
b5d36404 133#include "ipa-prop.h"
75a70cf9 134#include "gimple.h"
135#include "tree-iterator.h"
f1e2a033 136#include "tree-pass.h"
bfec3452 137#include "tree-dump.h"
c1dcd13c 138#include "output.h"
9ed5b1f5 139#include "coverage.h"
c9036234 140#include "plugin.h"
a41f2a28 141#include "ipa-inline.h"
7771d558 142#include "ipa-utils.h"
a0605d65 143#include "lto-streamer.h"
d7c6d889 144
a6868229 145static void cgraph_expand_all_functions (void);
d9d9733a 146static void cgraph_mark_functions_to_output (void);
147static void cgraph_expand_function (struct cgraph_node *);
f788fff2 148static void cgraph_output_pending_asms (void);
25bb88de 149
ecb08119 150FILE *cgraph_dump_file;
121f3051 151
28454517 152/* Used for vtable lookup in thunk adjusting. */
153static GTY (()) tree vtable_entry_type;
154
2c0b522d 155/* Determine if function DECL is needed. That is, visible to something
156 either outside this translation unit, something magic in the system
6329636b 157 configury. */
2c0b522d 158
7bfefa9d 159bool
160cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
2c0b522d 161{
3f82b628 162 /* If the user told us it is used, then it must be so. */
05806473 163 if (node->local.externally_visible)
164 return true;
165
3f82b628 166 /* ??? If the assembler name is set by hand, it is possible to assemble
167 the name later after finalizing the function and the fact is noticed
168 in assemble_name then. This is arguably a bug. */
169 if (DECL_ASSEMBLER_NAME_SET_P (decl)
170 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
171 return true;
172
55680bef 173 /* With -fkeep-inline-functions we are keeping all inline functions except
174 for extern inline ones. */
175 if (flag_keep_inline_functions
176 && DECL_DECLARED_INLINE_P (decl)
316ef6d8 177 && !DECL_EXTERNAL (decl)
cbd7f5a0 178 && !DECL_DISREGARD_INLINE_LIMITS (decl))
55680bef 179 return true;
180
2c0b522d 181 /* If we decided it was needed before, but at the time we didn't have
182 the body of the function available, then it's still needed. We have
183 to go back and re-check its dependencies now. */
184 if (node->needed)
185 return true;
186
187 /* Externally visible functions must be output. The exception is
a0c938f0 188 COMDAT functions that must be output only when they are needed.
8baa9d15 189
190 When not optimizing, also output the static functions. (see
95da6220 191 PR24561), but don't do so for always_inline functions, functions
0f9238c0 192 declared inline and nested functions. These were optimized out
d3d410e1 193 in the original implementation and it is unclear whether we want
554f2707 194 to change the behavior here. */
bba7ddf8 195 if (((TREE_PUBLIC (decl)
0f9238c0 196 || (!optimize
cbd7f5a0 197 && !DECL_DISREGARD_INLINE_LIMITS (decl)
d3d410e1 198 && !DECL_DECLARED_INLINE_P (decl)
0f9238c0 199 && !(DECL_CONTEXT (decl)
200 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
59dd4830 201 && !flag_whole_program
cbcf2791 202 && !flag_lto)
62eec3b4 203 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
2c0b522d 204 return true;
205
2c0b522d 206 return false;
207}
208
bdc40eb8 209/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
523c1122 210 functions into callgraph in a way so they look like ordinary reachable
211 functions inserted into callgraph already at construction time. */
212
213bool
214cgraph_process_new_functions (void)
215{
216 bool output = false;
217 tree fndecl;
218 struct cgraph_node *node;
219
0cddb138 220 varpool_analyze_pending_decls ();
523c1122 221 /* Note that this queue may grow as its being processed, as the new
222 functions may generate new ones. */
223 while (cgraph_new_nodes)
224 {
225 node = cgraph_new_nodes;
226 fndecl = node->decl;
227 cgraph_new_nodes = cgraph_new_nodes->next_needed;
228 switch (cgraph_state)
229 {
230 case CGRAPH_STATE_CONSTRUCTION:
231 /* At construction time we just need to finalize function and move
232 it into reachable functions list. */
233
234 node->next_needed = NULL;
235 cgraph_finalize_function (fndecl, false);
236 cgraph_mark_reachable_node (node);
237 output = true;
4f7a1122 238 cgraph_call_function_insertion_hooks (node);
523c1122 239 break;
240
241 case CGRAPH_STATE_IPA:
f517b36e 242 case CGRAPH_STATE_IPA_SSA:
523c1122 243 /* When IPA optimization already started, do all essential
244 transformations that has been already performed on the whole
245 cgraph but not on this function. */
246
75a70cf9 247 gimple_register_cfg_hooks ();
523c1122 248 if (!node->analyzed)
249 cgraph_analyze_function (node);
250 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
251 current_function_decl = fndecl;
f517b36e 252 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
253 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
254 /* When not optimizing, be sure we run early local passes anyway
255 to expand OMP. */
256 || !optimize)
20099e35 257 execute_pass_list (pass_early_local_passes.pass.sub);
649597af 258 else
a41f2a28 259 compute_inline_parameters (node, true);
523c1122 260 free_dominance_info (CDI_POST_DOMINATORS);
261 free_dominance_info (CDI_DOMINATORS);
262 pop_cfun ();
263 current_function_decl = NULL;
4f7a1122 264 cgraph_call_function_insertion_hooks (node);
523c1122 265 break;
266
267 case CGRAPH_STATE_EXPANSION:
268 /* Functions created during expansion shall be compiled
269 directly. */
09fc9532 270 node->process = 0;
4f7a1122 271 cgraph_call_function_insertion_hooks (node);
523c1122 272 cgraph_expand_function (node);
273 break;
274
275 default:
276 gcc_unreachable ();
277 break;
278 }
0cddb138 279 varpool_analyze_pending_decls ();
523c1122 280 }
281 return output;
282}
283
9b8fb23a 284/* As an GCC extension we allow redefinition of the function. The
285 semantics when both copies of bodies differ is not well defined.
286 We replace the old body with new body so in unit at a time mode
287 we always use new body, while in normal mode we may end up with
288 old body inlined into some functions and new body expanded and
289 inlined in others.
290
291 ??? It may make more sense to use one body for inlining and other
292 body for expanding the function but this is difficult to do. */
293
294static void
295cgraph_reset_node (struct cgraph_node *node)
296{
09fc9532 297 /* If node->process is set, then we have already begun whole-unit analysis.
6329636b 298 This is *not* testing for whether we've already emitted the function.
299 That case can be sort-of legitimately seen with real function redefinition
300 errors. I would argue that the front end should never present us with
301 such a case, but don't enforce that for now. */
09fc9532 302 gcc_assert (!node->process);
9b8fb23a 303
304 /* Reset our data structures so we can analyze the function again. */
305 memset (&node->local, 0, sizeof (node->local));
306 memset (&node->global, 0, sizeof (node->global));
307 memset (&node->rtl, 0, sizeof (node->rtl));
308 node->analyzed = false;
309 node->local.redefined_extern_inline = true;
310 node->local.finalized = false;
311
9b8fb23a 312 cgraph_node_remove_callees (node);
313
314 /* We may need to re-queue the node for assembling in case
46beef9a 315 we already proceeded it and ignored as not needed or got
316 a re-declaration in IMA mode. */
317 if (node->reachable)
9b8fb23a 318 {
319 struct cgraph_node *n;
320
321 for (n = cgraph_nodes_queue; n; n = n->next_needed)
322 if (n == node)
323 break;
324 if (!n)
325 node->reachable = 0;
326 }
327}
c08871a9 328
1e8e9920 329static void
330cgraph_lower_function (struct cgraph_node *node)
331{
332 if (node->lowered)
333 return;
bfec3452 334
335 if (node->nested)
336 lower_nested_functions (node->decl);
337 gcc_assert (!node->nested);
338
1e8e9920 339 tree_lowering_passes (node->decl);
340 node->lowered = true;
341}
342
28df663b 343/* DECL has been parsed. Take it, queue it, compile it at the whim of the
344 logic in effect. If NESTED is true, then our caller cannot stand to have
345 the garbage collector run at the moment. We would need to either create
346 a new GC context, or just not compile right now. */
ae01b312 347
348void
28df663b 349cgraph_finalize_function (tree decl, bool nested)
ae01b312 350{
5a90471f 351 struct cgraph_node *node = cgraph_get_create_node (decl);
ae01b312 352
c08871a9 353 if (node->local.finalized)
9b8fb23a 354 cgraph_reset_node (node);
28df663b 355
c08871a9 356 notice_global_symbol (decl);
79bb87b4 357 node->local.finalized = true;
e27482aa 358 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
ae01b312 359
7bfefa9d 360 if (cgraph_decide_is_function_needed (node, decl))
2c0b522d 361 cgraph_mark_needed_node (node);
362
ecda6e51 363 /* Since we reclaim unreachable nodes at the end of every language
3f82b628 364 level unit, we need to be conservative about possible entry points
365 there. */
1e3aebec 366 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
367 || DECL_STATIC_CONSTRUCTOR (decl)
d050bafd 368 || DECL_STATIC_DESTRUCTOR (decl)
369 /* COMDAT virtual functions may be referenced by vtable from
0a10fd82 370 other compilation unit. Still we want to devirtualize calls
d050bafd 371 to those so we need to analyze them.
372 FIXME: We should introduce may edges for this purpose and update
373 their handling in unreachable function removal and inliner too. */
91bf9d9a 374 || (DECL_VIRTUAL_P (decl)
375 && optimize && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
3f82b628 376 cgraph_mark_reachable_node (node);
377
2c0b522d 378 /* If we've not yet emitted decl, tell the debug info about it. */
28df663b 379 if (!TREE_ASM_WRITTEN (decl))
2c0b522d 380 (*debug_hooks->deferred_inline_function) (decl);
4e8871a0 381
b69eb0ff 382 /* Possibly warn about unused parameters. */
383 if (warn_unused_parameter)
384 do_warn_unused_parameter (decl);
6329636b 385
386 if (!nested)
387 ggc_collect ();
ae01b312 388}
389
0da03d11 390/* C99 extern inline keywords allow changing of declaration after function
391 has been finalized. We need to re-decide if we want to mark the function as
392 needed then. */
393
394void
395cgraph_mark_if_needed (tree decl)
396{
fd6a3c41 397 struct cgraph_node *node = cgraph_get_node (decl);
7bfefa9d 398 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
0da03d11 399 cgraph_mark_needed_node (node);
400}
401
ccf4ab6b 402/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
403static bool
404clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
405{
406 while (node != node2 && node2)
407 node2 = node2->clone_of;
408 return node2 != NULL;
409}
410
1a036a3b 411/* Verify edge E count and frequency. */
412
413static bool
414verify_edge_count_and_frequency (struct cgraph_edge *e)
415{
416 bool error_found = false;
417 if (e->count < 0)
418 {
419 error ("caller edge count is negative");
420 error_found = true;
421 }
422 if (e->frequency < 0)
423 {
424 error ("caller edge frequency is negative");
425 error_found = true;
426 }
427 if (e->frequency > CGRAPH_FREQ_MAX)
428 {
429 error ("caller edge frequency is too large");
430 error_found = true;
431 }
432 if (gimple_has_body_p (e->caller->decl)
433 && !e->caller->global.inlined_to
8bae3ea4 434 /* FIXME: Inline-analysis sets frequency to 0 when edge is optimized out.
435 Remove this once edges are actualy removed from the function at that time. */
436 && (e->frequency
437 || (inline_edge_summary_vec
438 && !inline_edge_summary (e)->predicate))
1a036a3b 439 && (e->frequency
440 != compute_call_stmt_bb_frequency (e->caller->decl,
441 gimple_bb (e->call_stmt))))
442 {
0a10fd82 443 error ("caller edge frequency %i does not match BB frequency %i",
1a036a3b 444 e->frequency,
445 compute_call_stmt_bb_frequency (e->caller->decl,
446 gimple_bb (e->call_stmt)));
447 error_found = true;
448 }
449 return error_found;
450}
451
7b29dd2f 452/* Switch to THIS_CFUN if needed and print STMT to stderr. */
453static void
454cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt)
455{
456 /* debug_gimple_stmt needs correct cfun */
457 if (cfun != this_cfun)
458 set_cfun (this_cfun);
459 debug_gimple_stmt (stmt);
460}
461
b0cdf642 462/* Verify cgraph nodes of given cgraph node. */
4b987fac 463DEBUG_FUNCTION void
b0cdf642 464verify_cgraph_node (struct cgraph_node *node)
465{
466 struct cgraph_edge *e;
e27482aa 467 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
468 basic_block this_block;
75a70cf9 469 gimple_stmt_iterator gsi;
9bfec7c2 470 bool error_found = false;
b0cdf642 471
852f689e 472 if (seen_error ())
bd09cd3e 473 return;
474
b0cdf642 475 timevar_push (TV_CGRAPH_VERIFY);
b0cdf642 476 for (e = node->callees; e; e = e->next_callee)
477 if (e->aux)
478 {
0a81f5a0 479 error ("aux field set for edge %s->%s",
abd3e6b5 480 identifier_to_locale (cgraph_node_name (e->caller)),
481 identifier_to_locale (cgraph_node_name (e->callee)));
b0cdf642 482 error_found = true;
483 }
a2cb9b3b 484 if (node->count < 0)
485 {
bf776685 486 error ("execution count is negative");
a2cb9b3b 487 error_found = true;
488 }
59dd4830 489 if (node->global.inlined_to && node->local.externally_visible)
490 {
bf776685 491 error ("externally visible inline clone");
59dd4830 492 error_found = true;
493 }
494 if (node->global.inlined_to && node->address_taken)
495 {
bf776685 496 error ("inline clone with address taken");
59dd4830 497 error_found = true;
498 }
499 if (node->global.inlined_to && node->needed)
500 {
bf776685 501 error ("inline clone is needed");
59dd4830 502 error_found = true;
503 }
799c8711 504 for (e = node->indirect_calls; e; e = e->next_callee)
505 {
506 if (e->aux)
507 {
508 error ("aux field set for indirect edge from %s",
509 identifier_to_locale (cgraph_node_name (e->caller)));
510 error_found = true;
511 }
512 if (!e->indirect_unknown_callee
513 || !e->indirect_info)
514 {
515 error ("An indirect edge from %s is not marked as indirect or has "
516 "associated indirect_info, the corresponding statement is: ",
517 identifier_to_locale (cgraph_node_name (e->caller)));
7b29dd2f 518 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
799c8711 519 error_found = true;
520 }
521 }
b0cdf642 522 for (e = node->callers; e; e = e->next_caller)
523 {
1a036a3b 524 if (verify_edge_count_and_frequency (e))
525 error_found = true;
b0cdf642 526 if (!e->inline_failed)
527 {
528 if (node->global.inlined_to
529 != (e->caller->global.inlined_to
530 ? e->caller->global.inlined_to : e->caller))
531 {
0a81f5a0 532 error ("inlined_to pointer is wrong");
b0cdf642 533 error_found = true;
534 }
535 if (node->callers->next_caller)
536 {
0a81f5a0 537 error ("multiple inline callers");
b0cdf642 538 error_found = true;
539 }
540 }
541 else
542 if (node->global.inlined_to)
543 {
0a81f5a0 544 error ("inlined_to pointer set for noninline callers");
b0cdf642 545 error_found = true;
546 }
547 }
1a036a3b 548 for (e = node->indirect_calls; e; e = e->next_callee)
549 if (verify_edge_count_and_frequency (e))
550 error_found = true;
b0cdf642 551 if (!node->callers && node->global.inlined_to)
552 {
5cd75817 553 error ("inlined_to pointer is set but no predecessors found");
b0cdf642 554 error_found = true;
555 }
556 if (node->global.inlined_to == node)
557 {
0a81f5a0 558 error ("inlined_to pointer refers to itself");
b0cdf642 559 error_found = true;
560 }
561
7019fd3f 562 if (!cgraph_get_node (node->decl))
b0cdf642 563 {
0f6439b9 564 error ("node not found in cgraph_hash");
b0cdf642 565 error_found = true;
566 }
a0c938f0 567
ccf4ab6b 568 if (node->clone_of)
569 {
570 struct cgraph_node *n;
571 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
572 if (n == node)
573 break;
574 if (!n)
575 {
576 error ("node has wrong clone_of");
577 error_found = true;
578 }
579 }
580 if (node->clones)
581 {
582 struct cgraph_node *n;
583 for (n = node->clones; n; n = n->next_sibling_clone)
584 if (n->clone_of != node)
585 break;
586 if (n)
587 {
588 error ("node has wrong clone list");
589 error_found = true;
590 }
591 }
592 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
593 {
594 error ("node is in clone list but it is not clone");
595 error_found = true;
596 }
597 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
598 {
599 error ("node has wrong prev_clone pointer");
600 error_found = true;
601 }
602 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
603 {
604 error ("double linked list of clones corrupted");
605 error_found = true;
606 }
c524ac5d 607 if (node->same_comdat_group)
608 {
609 struct cgraph_node *n = node->same_comdat_group;
610
611 if (!DECL_ONE_ONLY (node->decl))
612 {
613 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
614 error_found = true;
615 }
616 if (n == node)
617 {
618 error ("node is alone in a comdat group");
619 error_found = true;
620 }
621 do
622 {
623 if (!n->same_comdat_group)
624 {
625 error ("same_comdat_group is not a circular list");
626 error_found = true;
627 break;
628 }
629 n = n->same_comdat_group;
630 }
631 while (n != node);
632 }
ccf4ab6b 633
91bf9d9a 634 if (node->analyzed && node->thunk.thunk_p)
635 {
636 if (!node->callees)
637 {
638 error ("No edge out of thunk node");
639 error_found = true;
640 }
641 else if (node->callees->next_callee)
642 {
643 error ("More than one edge out of thunk node");
644 error_found = true;
645 }
646 if (gimple_has_body_p (node->decl))
647 {
648 error ("Thunk is not supposed to have body");
649 error_found = true;
650 }
651 }
652 else if (node->analyzed && gimple_has_body_p (node->decl)
653 && !TREE_ASM_WRITTEN (node->decl)
654 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
655 && !flag_wpa)
b0cdf642 656 {
e27482aa 657 if (this_cfun->cfg)
658 {
659 /* The nodes we're interested in are never shared, so walk
660 the tree ignoring duplicates. */
e7c352d1 661 struct pointer_set_t *visited_nodes = pointer_set_create ();
e27482aa 662 /* Reach the trees by walking over the CFG, and note the
663 enclosing basic-blocks in the call edges. */
664 FOR_EACH_BB_FN (this_block, this_cfun)
75a70cf9 665 for (gsi = gsi_start_bb (this_block);
666 !gsi_end_p (gsi);
667 gsi_next (&gsi))
9bfec7c2 668 {
75a70cf9 669 gimple stmt = gsi_stmt (gsi);
799c8711 670 if (is_gimple_call (stmt))
9bfec7c2 671 {
672 struct cgraph_edge *e = cgraph_edge (node, stmt);
799c8711 673 tree decl = gimple_call_fndecl (stmt);
9bfec7c2 674 if (e)
675 {
676 if (e->aux)
677 {
0a81f5a0 678 error ("shared call_stmt:");
7b29dd2f 679 cgraph_debug_gimple_stmt (this_cfun, stmt);
9bfec7c2 680 error_found = true;
681 }
799c8711 682 if (!e->indirect_unknown_callee)
28454517 683 {
799c8711 684 if (e->callee->same_body_alias)
685 {
686 error ("edge points to same body alias:");
687 debug_tree (e->callee->decl);
688 error_found = true;
689 }
e748b31d 690 else if (!e->callee->global.inlined_to
799c8711 691 && decl
e748b31d 692 && cgraph_get_node (decl)
693 && (e->callee->former_clone_of
694 != cgraph_get_node (decl)->decl)
fd6a3c41 695 && !clone_of_p (cgraph_get_node (decl),
799c8711 696 e->callee))
697 {
698 error ("edge points to wrong declaration:");
699 debug_tree (e->callee->decl);
700 fprintf (stderr," Instead of:");
701 debug_tree (decl);
702 error_found = true;
703 }
28454517 704 }
799c8711 705 else if (decl)
9bfec7c2 706 {
799c8711 707 error ("an indirect edge with unknown callee "
708 "corresponding to a call_stmt with "
709 "a known declaration:");
ee3f5fc0 710 error_found = true;
7b29dd2f 711 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
9bfec7c2 712 }
713 e->aux = (void *)1;
714 }
799c8711 715 else if (decl)
9bfec7c2 716 {
0a81f5a0 717 error ("missing callgraph edge for call stmt:");
7b29dd2f 718 cgraph_debug_gimple_stmt (this_cfun, stmt);
9bfec7c2 719 error_found = true;
720 }
721 }
722 }
e27482aa 723 pointer_set_destroy (visited_nodes);
e27482aa 724 }
725 else
726 /* No CFG available?! */
727 gcc_unreachable ();
728
b0cdf642 729 for (e = node->callees; e; e = e->next_callee)
730 {
799c8711 731 if (!e->aux)
b0cdf642 732 {
0a81f5a0 733 error ("edge %s->%s has no corresponding call_stmt",
abd3e6b5 734 identifier_to_locale (cgraph_node_name (e->caller)),
735 identifier_to_locale (cgraph_node_name (e->callee)));
7b29dd2f 736 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
b0cdf642 737 error_found = true;
738 }
739 e->aux = 0;
740 }
799c8711 741 for (e = node->indirect_calls; e; e = e->next_callee)
742 {
743 if (!e->aux)
744 {
745 error ("an indirect edge from %s has no corresponding call_stmt",
746 identifier_to_locale (cgraph_node_name (e->caller)));
7b29dd2f 747 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
799c8711 748 error_found = true;
749 }
750 e->aux = 0;
751 }
b0cdf642 752 }
753 if (error_found)
754 {
755 dump_cgraph_node (stderr, node);
0a81f5a0 756 internal_error ("verify_cgraph_node failed");
b0cdf642 757 }
758 timevar_pop (TV_CGRAPH_VERIFY);
759}
760
761/* Verify whole cgraph structure. */
4b987fac 762DEBUG_FUNCTION void
b0cdf642 763verify_cgraph (void)
764{
765 struct cgraph_node *node;
766
852f689e 767 if (seen_error ())
8ec2a798 768 return;
769
b0cdf642 770 for (node = cgraph_nodes; node; node = node->next)
771 verify_cgraph_node (node);
772}
773
56af936e 774/* Output all asm statements we have stored up to be output. */
775
776static void
777cgraph_output_pending_asms (void)
778{
779 struct cgraph_asm_node *can;
780
852f689e 781 if (seen_error ())
56af936e 782 return;
783
784 for (can = cgraph_asm_nodes; can; can = can->next)
785 assemble_asm (can->asm_str);
786 cgraph_asm_nodes = NULL;
787}
788
0785e435 789/* Analyze the function scheduled to be output. */
222bc9b9 790void
0785e435 791cgraph_analyze_function (struct cgraph_node *node)
792{
bfec3452 793 tree save = current_function_decl;
0785e435 794 tree decl = node->decl;
795
91bf9d9a 796 if (node->thunk.thunk_p)
797 {
798 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
799 NULL, 0, CGRAPH_FREQ_BASE);
800 }
801 else
802 {
803 current_function_decl = decl;
804 push_cfun (DECL_STRUCT_FUNCTION (decl));
bfec3452 805
91bf9d9a 806 assign_assembler_name_if_neeeded (node->decl);
6816d0c4 807
91bf9d9a 808 /* Make sure to gimplify bodies only once. During analyzing a
809 function we lower it, which will require gimplified nested
810 functions, so we can end up here with an already gimplified
811 body. */
812 if (!gimple_body (decl))
813 gimplify_function_tree (decl);
814 dump_function (TDI_generic, decl);
bfec3452 815
91bf9d9a 816 cgraph_lower_function (node);
817 pop_cfun ();
818 }
6e8d6e86 819 node->analyzed = true;
0785e435 820
bfec3452 821 current_function_decl = save;
0785e435 822}
823
d05db70d 824/* Process attributes common for vars and functions. */
825
826static void
827process_common_attributes (tree decl)
828{
829 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
830
831 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
832 {
833 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
834 "%<weakref%> attribute should be accompanied with"
835 " an %<alias%> attribute");
836 DECL_WEAK (decl) = 0;
40b32d93 837 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
838 DECL_ATTRIBUTES (decl));
d05db70d 839 }
840}
841
05806473 842/* Look for externally_visible and used attributes and mark cgraph nodes
843 accordingly.
844
845 We cannot mark the nodes at the point the attributes are processed (in
846 handle_*_attribute) because the copy of the declarations available at that
847 point may not be canonical. For example, in:
848
849 void f();
850 void f() __attribute__((used));
851
852 the declaration we see in handle_used_attribute will be the second
853 declaration -- but the front end will subsequently merge that declaration
854 with the original declaration and discard the second declaration.
855
856 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
857
858 void f() {}
859 void f() __attribute__((externally_visible));
860
861 is valid.
862
863 So, we walk the nodes at the end of the translation unit, applying the
864 attributes at that point. */
865
866static void
867process_function_and_variable_attributes (struct cgraph_node *first,
1d416bd7 868 struct varpool_node *first_var)
05806473 869{
870 struct cgraph_node *node;
1d416bd7 871 struct varpool_node *vnode;
05806473 872
873 for (node = cgraph_nodes; node != first; node = node->next)
874 {
875 tree decl = node->decl;
83a23b05 876 if (DECL_PRESERVE_P (decl))
0b49f8f8 877 cgraph_mark_needed_node (node);
62433d51 878 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
879 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
880 && TREE_PUBLIC (node->decl))
881 {
882 if (node->local.finalized)
883 cgraph_mark_needed_node (node);
884 }
885 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
05806473 886 {
ba12ea31 887 if (! TREE_PUBLIC (node->decl))
712d2297 888 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
889 "%<externally_visible%>"
890 " attribute have effect only on public objects");
59dd4830 891 else if (node->local.finalized)
892 cgraph_mark_needed_node (node);
05806473 893 }
40b32d93 894 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
895 && node->local.finalized)
896 {
897 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
898 "%<weakref%> attribute ignored"
899 " because function is defined");
900 DECL_WEAK (decl) = 0;
901 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
902 DECL_ATTRIBUTES (decl));
903 }
d05db70d 904 process_common_attributes (decl);
05806473 905 }
1d416bd7 906 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
05806473 907 {
908 tree decl = vnode->decl;
83a23b05 909 if (DECL_PRESERVE_P (decl))
05806473 910 {
22671757 911 vnode->force_output = true;
05806473 912 if (vnode->finalized)
1d416bd7 913 varpool_mark_needed_node (vnode);
05806473 914 }
62433d51 915 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
916 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
0d9d5d69 917 && TREE_PUBLIC (vnode->decl))
62433d51 918 {
919 if (vnode->finalized)
920 varpool_mark_needed_node (vnode);
921 }
922 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
05806473 923 {
ba12ea31 924 if (! TREE_PUBLIC (vnode->decl))
712d2297 925 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
926 "%<externally_visible%>"
927 " attribute have effect only on public objects");
59dd4830 928 else if (vnode->finalized)
929 varpool_mark_needed_node (vnode);
05806473 930 }
40b32d93 931 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
932 && vnode->finalized
933 && DECL_INITIAL (decl))
934 {
935 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
936 "%<weakref%> attribute ignored"
937 " because variable is initialized");
938 DECL_WEAK (decl) = 0;
939 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
940 DECL_ATTRIBUTES (decl));
941 }
d05db70d 942 process_common_attributes (decl);
05806473 943 }
944}
945
aeeb194b 946/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
947 each reachable functions) and build cgraph.
948 The function can be called multiple times after inserting new nodes
0d424440 949 into beginning of queue. Just the new part of queue is re-scanned then. */
ae01b312 950
aeeb194b 951static void
952cgraph_analyze_functions (void)
ae01b312 953{
c1dcd13c 954 /* Keep track of already processed nodes when called multiple times for
06b27565 955 intermodule optimization. */
c1dcd13c 956 static struct cgraph_node *first_analyzed;
c17d0de1 957 struct cgraph_node *first_processed = first_analyzed;
1d416bd7 958 static struct varpool_node *first_analyzed_var;
aeeb194b 959 struct cgraph_node *node, *next;
ae01b312 960
f1c35659 961 bitmap_obstack_initialize (NULL);
c17d0de1 962 process_function_and_variable_attributes (first_processed,
963 first_analyzed_var);
964 first_processed = cgraph_nodes;
1d416bd7 965 first_analyzed_var = varpool_nodes;
966 varpool_analyze_pending_decls ();
f79b6507 967 if (cgraph_dump_file)
ae01b312 968 {
e4200070 969 fprintf (cgraph_dump_file, "Initial entry points:");
c1dcd13c 970 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 971 if (node->needed)
f79b6507 972 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
973 fprintf (cgraph_dump_file, "\n");
ae01b312 974 }
aeeb194b 975 cgraph_process_new_functions ();
ae01b312 976
e6d2b2d8 977 /* Propagate reachability flag and lower representation of all reachable
978 functions. In the future, lowering will introduce new functions and
979 new entry points on the way (by template instantiation and virtual
980 method table generation for instance). */
3d7bfc56 981 while (cgraph_nodes_queue)
ae01b312 982 {
0785e435 983 struct cgraph_edge *edge;
3d7bfc56 984 tree decl = cgraph_nodes_queue->decl;
985
986 node = cgraph_nodes_queue;
d87976fb 987 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 988 node->next_needed = NULL;
ae01b312 989
638531ad 990 /* ??? It is possible to create extern inline function and later using
bbd5cba2 991 weak alias attribute to kill its body. See
638531ad 992 gcc.c-torture/compile/20011119-1.c */
91bf9d9a 993 if (!DECL_STRUCT_FUNCTION (decl)
994 && !node->thunk.thunk_p)
9b8fb23a 995 {
996 cgraph_reset_node (node);
997 continue;
998 }
638531ad 999
7bfefa9d 1000 if (!node->analyzed)
1001 cgraph_analyze_function (node);
2c0b522d 1002
ae01b312 1003 for (edge = node->callees; edge; edge = edge->next_callee)
0785e435 1004 if (!edge->callee->reachable)
2c0b522d 1005 cgraph_mark_reachable_node (edge->callee);
91bf9d9a 1006 for (edge = node->callers; edge; edge = edge->next_caller)
1007 if (!edge->caller->reachable && edge->caller->thunk.thunk_p)
1008 cgraph_mark_reachable_node (edge->caller);
2c0b522d 1009
61c2c7b1 1010 if (node->same_comdat_group)
1011 {
1012 for (next = node->same_comdat_group;
1013 next != node;
1014 next = next->same_comdat_group)
1015 cgraph_mark_reachable_node (next);
1016 }
1017
d544ceff 1018 /* If decl is a clone of an abstract function, mark that abstract
1019 function so that we don't release its body. The DECL_INITIAL() of that
fd6a3c41 1020 abstract function declaration will be later needed to output debug
1021 info. */
d544ceff 1022 if (DECL_ABSTRACT_ORIGIN (decl))
1023 {
fd6a3c41 1024 struct cgraph_node *origin_node;
1025 origin_node = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
d544ceff 1026 origin_node->abstract_and_needed = true;
1027 }
1028
c17d0de1 1029 /* We finalize local static variables during constructing callgraph
1030 edges. Process their attributes too. */
1031 process_function_and_variable_attributes (first_processed,
1032 first_analyzed_var);
1033 first_processed = cgraph_nodes;
1d416bd7 1034 first_analyzed_var = varpool_nodes;
1035 varpool_analyze_pending_decls ();
aeeb194b 1036 cgraph_process_new_functions ();
ae01b312 1037 }
2c0b522d 1038
aa5e06c7 1039 /* Collect entry points to the unit. */
f79b6507 1040 if (cgraph_dump_file)
3d7bfc56 1041 {
e4200070 1042 fprintf (cgraph_dump_file, "Unit entry points:");
c1dcd13c 1043 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 1044 if (node->needed)
f79b6507 1045 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
e4200070 1046 fprintf (cgraph_dump_file, "\n\nInitial ");
0785e435 1047 dump_cgraph (cgraph_dump_file);
7410370b 1048 dump_varpool (cgraph_dump_file);
3d7bfc56 1049 }
e6d2b2d8 1050
f79b6507 1051 if (cgraph_dump_file)
1052 fprintf (cgraph_dump_file, "\nReclaiming functions:");
ae01b312 1053
f4ec5ce1 1054 for (node = cgraph_nodes; node != first_analyzed; node = next)
ae01b312 1055 {
1056 tree decl = node->decl;
f4ec5ce1 1057 next = node->next;
ae01b312 1058
91bf9d9a 1059 if (node->local.finalized && !gimple_has_body_p (decl)
1060 && !node->thunk.thunk_p)
a0c938f0 1061 cgraph_reset_node (node);
9b8fb23a 1062
91bf9d9a 1063 if (!node->reachable
1064 && (gimple_has_body_p (decl) || node->thunk.thunk_p))
ae01b312 1065 {
f79b6507 1066 if (cgraph_dump_file)
1067 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
b0cdf642 1068 cgraph_remove_node (node);
9b8fb23a 1069 continue;
ae01b312 1070 }
bc5cab3b 1071 else
1072 node->next_needed = NULL;
91bf9d9a 1073 gcc_assert (!node->local.finalized || node->thunk.thunk_p
1074 || gimple_has_body_p (decl));
9b8fb23a 1075 gcc_assert (node->analyzed == node->local.finalized);
ae01b312 1076 }
f79b6507 1077 if (cgraph_dump_file)
e4200070 1078 {
1079 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1080 dump_cgraph (cgraph_dump_file);
7410370b 1081 dump_varpool (cgraph_dump_file);
e4200070 1082 }
f1c35659 1083 bitmap_obstack_release (NULL);
c1dcd13c 1084 first_analyzed = cgraph_nodes;
ae01b312 1085 ggc_collect ();
aeeb194b 1086}
1087
8f69fd82 1088
aeeb194b 1089/* Analyze the whole compilation unit once it is parsed completely. */
1090
1091void
1092cgraph_finalize_compilation_unit (void)
1093{
9929334e 1094 timevar_push (TV_CGRAPH);
1095
a0605d65 1096 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
1097 if (flag_lto)
1098 lto_streamer_hooks_init ();
1099
bb903e9c 1100 /* If we're here there's no current function anymore. Some frontends
1101 are lazy in clearing these. */
1102 current_function_decl = NULL;
1103 set_cfun (NULL);
1104
bfec3452 1105 /* Do not skip analyzing the functions if there were errors, we
1106 miss diagnostics for following functions otherwise. */
aeeb194b 1107
8f69fd82 1108 /* Emit size functions we didn't inline. */
4189e677 1109 finalize_size_functions ();
8f69fd82 1110
9929334e 1111 /* Mark alias targets necessary and emit diagnostics. */
1112 finish_aliases_1 ();
1113
aeeb194b 1114 if (!quiet_flag)
1115 {
1116 fprintf (stderr, "\nAnalyzing compilation unit\n");
1117 fflush (stderr);
1118 }
1119
9929334e 1120 /* Gimplify and lower all functions, compute reachability and
1121 remove unreachable nodes. */
1122 cgraph_analyze_functions ();
1123
8f69fd82 1124 /* Mark alias targets necessary and emit diagnostics. */
1125 finish_aliases_1 ();
1126
9929334e 1127 /* Gimplify and lower thunks. */
aeeb194b 1128 cgraph_analyze_functions ();
bfec3452 1129
9929334e 1130 /* Finally drive the pass manager. */
bfec3452 1131 cgraph_optimize ();
9929334e 1132
1133 timevar_pop (TV_CGRAPH);
ae01b312 1134}
9ed5b1f5 1135
1136
ae01b312 1137/* Figure out what functions we want to assemble. */
1138
1139static void
d9d9733a 1140cgraph_mark_functions_to_output (void)
ae01b312 1141{
1142 struct cgraph_node *node;
61c2c7b1 1143#ifdef ENABLE_CHECKING
1144 bool check_same_comdat_groups = false;
1145
1146 for (node = cgraph_nodes; node; node = node->next)
1147 gcc_assert (!node->process);
1148#endif
ae01b312 1149
ae01b312 1150 for (node = cgraph_nodes; node; node = node->next)
1151 {
1152 tree decl = node->decl;
d7c6d889 1153 struct cgraph_edge *e;
a0c938f0 1154
61c2c7b1 1155 gcc_assert (!node->process || node->same_comdat_group);
1156 if (node->process)
1157 continue;
d7c6d889 1158
1159 for (e = node->callers; e; e = e->next_caller)
611e5405 1160 if (e->inline_failed)
d7c6d889 1161 break;
ae01b312 1162
e6d2b2d8 1163 /* We need to output all local functions that are used and not
1164 always inlined, as well as those that are reachable from
1165 outside the current compilation unit. */
1a1a827a 1166 if (node->analyzed
91bf9d9a 1167 && !node->thunk.thunk_p
b0cdf642 1168 && !node->global.inlined_to
1e3aebec 1169 && (!cgraph_only_called_directly_p (node)
d7c6d889 1170 || (e && node->reachable))
4ee9c684 1171 && !TREE_ASM_WRITTEN (decl)
ae01b312 1172 && !DECL_EXTERNAL (decl))
61c2c7b1 1173 {
1174 node->process = 1;
1175 if (node->same_comdat_group)
1176 {
1177 struct cgraph_node *next;
1178 for (next = node->same_comdat_group;
1179 next != node;
1180 next = next->same_comdat_group)
91bf9d9a 1181 if (!next->thunk.thunk_p)
1182 next->process = 1;
61c2c7b1 1183 }
1184 }
1185 else if (node->same_comdat_group)
1186 {
1187#ifdef ENABLE_CHECKING
1188 check_same_comdat_groups = true;
1189#endif
1190 }
cc636d56 1191 else
9cee7c3f 1192 {
1193 /* We should've reclaimed all functions that are not needed. */
1194#ifdef ENABLE_CHECKING
75a70cf9 1195 if (!node->global.inlined_to
1a1a827a 1196 && gimple_has_body_p (decl)
08843223 1197 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1198 are inside partition, we can end up not removing the body since we no longer
1199 have analyzed node pointing to it. */
1200 && !node->in_other_partition
9cee7c3f 1201 && !DECL_EXTERNAL (decl))
1202 {
1203 dump_cgraph_node (stderr, node);
1204 internal_error ("failed to reclaim unneeded function");
1205 }
1206#endif
75a70cf9 1207 gcc_assert (node->global.inlined_to
1a1a827a 1208 || !gimple_has_body_p (decl)
08843223 1209 || node->in_other_partition
9cee7c3f 1210 || DECL_EXTERNAL (decl));
1211
1212 }
a0c938f0 1213
961e3b13 1214 }
61c2c7b1 1215#ifdef ENABLE_CHECKING
1216 if (check_same_comdat_groups)
1217 for (node = cgraph_nodes; node; node = node->next)
1218 if (node->same_comdat_group && !node->process)
1219 {
1220 tree decl = node->decl;
1221 if (!node->global.inlined_to
1222 && gimple_has_body_p (decl)
08843223 1223 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1224 are inside partition, we can end up not removing the body since we no longer
1225 have analyzed node pointing to it. */
1226 && !node->in_other_partition
61c2c7b1 1227 && !DECL_EXTERNAL (decl))
1228 {
1229 dump_cgraph_node (stderr, node);
1230 internal_error ("failed to reclaim unneeded function");
1231 }
1232 }
1233#endif
961e3b13 1234}
1235
28454517 1236/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1237 in lowered gimple form.
1238
1239 Set current_function_decl and cfun to newly constructed empty function body.
1240 return basic block in the function body. */
1241
1242static basic_block
1243init_lowered_empty_function (tree decl)
1244{
1245 basic_block bb;
1246
1247 current_function_decl = decl;
1248 allocate_struct_function (decl, false);
1249 gimple_register_cfg_hooks ();
1250 init_empty_tree_cfg ();
1251 init_tree_ssa (cfun);
1252 init_ssa_operands ();
1253 cfun->gimple_df->in_ssa_p = true;
1254 DECL_INITIAL (decl) = make_node (BLOCK);
1255
1256 DECL_SAVED_TREE (decl) = error_mark_node;
1257 cfun->curr_properties |=
1258 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1259 PROP_ssa);
1260
1261 /* Create BB for body of the function and connect it properly. */
1262 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1263 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1264 make_edge (bb, EXIT_BLOCK_PTR, 0);
1265
1266 return bb;
1267}
1268
1269/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1270 offset indicated by VIRTUAL_OFFSET, if that is
1271 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1272 zero for a result adjusting thunk. */
1273
1274static tree
1275thunk_adjust (gimple_stmt_iterator * bsi,
1276 tree ptr, bool this_adjusting,
1277 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1278{
1279 gimple stmt;
1280 tree ret;
1281
55d6cb23 1282 if (this_adjusting
1283 && fixed_offset != 0)
28454517 1284 {
1285 stmt = gimple_build_assign (ptr,
1286 fold_build2_loc (input_location,
1287 POINTER_PLUS_EXPR,
1288 TREE_TYPE (ptr), ptr,
1289 size_int (fixed_offset)));
1290 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1291 }
1292
1293 /* If there's a virtual offset, look up that value in the vtable and
1294 adjust the pointer again. */
1295 if (virtual_offset)
1296 {
1297 tree vtabletmp;
1298 tree vtabletmp2;
1299 tree vtabletmp3;
1300 tree offsettmp;
1301
1302 if (!vtable_entry_type)
1303 {
1304 tree vfunc_type = make_node (FUNCTION_TYPE);
1305 TREE_TYPE (vfunc_type) = integer_type_node;
1306 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1307 layout_type (vfunc_type);
1308
1309 vtable_entry_type = build_pointer_type (vfunc_type);
1310 }
1311
1312 vtabletmp =
1313 create_tmp_var (build_pointer_type
1314 (build_pointer_type (vtable_entry_type)), "vptr");
1315
1316 /* The vptr is always at offset zero in the object. */
1317 stmt = gimple_build_assign (vtabletmp,
1318 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1319 ptr));
1320 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1321 mark_symbols_for_renaming (stmt);
1322 find_referenced_vars_in (stmt);
1323
1324 /* Form the vtable address. */
1325 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1326 "vtableaddr");
1327 stmt = gimple_build_assign (vtabletmp2,
182cf5a9 1328 build_simple_mem_ref (vtabletmp));
28454517 1329 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1330 mark_symbols_for_renaming (stmt);
1331 find_referenced_vars_in (stmt);
1332
1333 /* Find the entry with the vcall offset. */
1334 stmt = gimple_build_assign (vtabletmp2,
1335 fold_build2_loc (input_location,
1336 POINTER_PLUS_EXPR,
1337 TREE_TYPE (vtabletmp2),
1338 vtabletmp2,
1339 fold_convert (sizetype,
1340 virtual_offset)));
1341 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1342
1343 /* Get the offset itself. */
1344 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1345 "vcalloffset");
1346 stmt = gimple_build_assign (vtabletmp3,
182cf5a9 1347 build_simple_mem_ref (vtabletmp2));
28454517 1348 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1349 mark_symbols_for_renaming (stmt);
1350 find_referenced_vars_in (stmt);
1351
1352 /* Cast to sizetype. */
1353 offsettmp = create_tmp_var (sizetype, "offset");
1354 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1355 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1356 mark_symbols_for_renaming (stmt);
1357 find_referenced_vars_in (stmt);
1358
1359 /* Adjust the `this' pointer. */
1360 ptr = fold_build2_loc (input_location,
1361 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1362 offsettmp);
1363 }
1364
55d6cb23 1365 if (!this_adjusting
1366 && fixed_offset != 0)
28454517 1367 /* Adjust the pointer by the constant. */
1368 {
1369 tree ptrtmp;
1370
1371 if (TREE_CODE (ptr) == VAR_DECL)
1372 ptrtmp = ptr;
1373 else
1374 {
1375 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1376 stmt = gimple_build_assign (ptrtmp, ptr);
1377 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1378 mark_symbols_for_renaming (stmt);
1379 find_referenced_vars_in (stmt);
1380 }
1381 ptr = fold_build2_loc (input_location,
1382 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1383 size_int (fixed_offset));
1384 }
1385
1386 /* Emit the statement and gimplify the adjustment expression. */
1387 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1388 stmt = gimple_build_assign (ret, ptr);
1389 mark_symbols_for_renaming (stmt);
1390 find_referenced_vars_in (stmt);
1391 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1392
1393 return ret;
1394}
1395
1396/* Produce assembler for thunk NODE. */
1397
1398static void
1399assemble_thunk (struct cgraph_node *node)
1400{
1401 bool this_adjusting = node->thunk.this_adjusting;
1402 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1403 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1404 tree virtual_offset = NULL;
1405 tree alias = node->thunk.alias;
1406 tree thunk_fndecl = node->decl;
1407 tree a = DECL_ARGUMENTS (thunk_fndecl);
1408
1409 current_function_decl = thunk_fndecl;
1410
aed6e608 1411 /* Ensure thunks are emitted in their correct sections. */
1412 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1413
28454517 1414 if (this_adjusting
1415 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1416 virtual_value, alias))
1417 {
1418 const char *fnname;
1419 tree fn_block;
1420
1421 DECL_RESULT (thunk_fndecl)
1422 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1423 RESULT_DECL, 0, integer_type_node);
22ea3b47 1424 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
28454517 1425
1426 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1427 create one. */
1428 fn_block = make_node (BLOCK);
1429 BLOCK_VARS (fn_block) = a;
1430 DECL_INITIAL (thunk_fndecl) = fn_block;
1431 init_function_start (thunk_fndecl);
1432 cfun->is_thunk = 1;
1433 assemble_start_function (thunk_fndecl, fnname);
1434
1435 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1436 fixed_offset, virtual_value, alias);
1437
1438 assemble_end_function (thunk_fndecl, fnname);
1439 init_insn_lengths ();
1440 free_after_compilation (cfun);
1441 set_cfun (NULL);
1442 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
91bf9d9a 1443 node->thunk.thunk_p = false;
1444 node->analyzed = false;
28454517 1445 }
1446 else
1447 {
1448 tree restype;
1449 basic_block bb, then_bb, else_bb, return_bb;
1450 gimple_stmt_iterator bsi;
1451 int nargs = 0;
1452 tree arg;
1453 int i;
1454 tree resdecl;
1455 tree restmp = NULL;
1456 VEC(tree, heap) *vargs;
1457
1458 gimple call;
1459 gimple ret;
1460
1461 DECL_IGNORED_P (thunk_fndecl) = 1;
1462 bitmap_obstack_initialize (NULL);
1463
1464 if (node->thunk.virtual_offset_p)
1465 virtual_offset = size_int (virtual_value);
1466
1467 /* Build the return declaration for the function. */
1468 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1469 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1470 {
1471 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1472 DECL_ARTIFICIAL (resdecl) = 1;
1473 DECL_IGNORED_P (resdecl) = 1;
1474 DECL_RESULT (thunk_fndecl) = resdecl;
1475 }
1476 else
1477 resdecl = DECL_RESULT (thunk_fndecl);
1478
1479 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1480
1481 bsi = gsi_start_bb (bb);
1482
1483 /* Build call to the function being thunked. */
1484 if (!VOID_TYPE_P (restype))
1485 {
1486 if (!is_gimple_reg_type (restype))
1487 {
1488 restmp = resdecl;
2ab2ce89 1489 add_local_decl (cfun, restmp);
28454517 1490 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1491 }
1492 else
1493 restmp = create_tmp_var_raw (restype, "retval");
1494 }
1495
1767a056 1496 for (arg = a; arg; arg = DECL_CHAIN (arg))
28454517 1497 nargs++;
1498 vargs = VEC_alloc (tree, heap, nargs);
1499 if (this_adjusting)
1500 VEC_quick_push (tree, vargs,
1501 thunk_adjust (&bsi,
1502 a, 1, fixed_offset,
1503 virtual_offset));
1504 else
1505 VEC_quick_push (tree, vargs, a);
1767a056 1506 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
28454517 1507 VEC_quick_push (tree, vargs, arg);
1508 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1509 VEC_free (tree, heap, vargs);
1510 gimple_call_set_cannot_inline (call, true);
1511 gimple_call_set_from_thunk (call, true);
1512 if (restmp)
1513 gimple_call_set_lhs (call, restmp);
1514 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1515 mark_symbols_for_renaming (call);
1516 find_referenced_vars_in (call);
1517 update_stmt (call);
1518
1519 if (restmp && !this_adjusting)
1520 {
57ab8ec3 1521 tree true_label = NULL_TREE;
28454517 1522
1523 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1524 {
1525 gimple stmt;
1526 /* If the return type is a pointer, we need to
1527 protect against NULL. We know there will be an
1528 adjustment, because that's why we're emitting a
1529 thunk. */
1530 then_bb = create_basic_block (NULL, (void *) 0, bb);
1531 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1532 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1533 remove_edge (single_succ_edge (bb));
1534 true_label = gimple_block_label (then_bb);
28454517 1535 stmt = gimple_build_cond (NE_EXPR, restmp,
385f3f36 1536 build_zero_cst (TREE_TYPE (restmp)),
28454517 1537 NULL_TREE, NULL_TREE);
1538 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1539 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1540 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1541 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1542 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1543 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1544 bsi = gsi_last_bb (then_bb);
1545 }
1546
1547 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1548 fixed_offset, virtual_offset);
1549 if (true_label)
1550 {
1551 gimple stmt;
1552 bsi = gsi_last_bb (else_bb);
385f3f36 1553 stmt = gimple_build_assign (restmp,
1554 build_zero_cst (TREE_TYPE (restmp)));
28454517 1555 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1556 bsi = gsi_last_bb (return_bb);
1557 }
1558 }
1559 else
1560 gimple_call_set_tail (call, true);
1561
1562 /* Build return value. */
1563 ret = gimple_build_return (restmp);
1564 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1565
1566 delete_unreachable_blocks ();
1567 update_ssa (TODO_update_ssa);
1568
28454517 1569 /* Since we want to emit the thunk, we explicitly mark its name as
1570 referenced. */
91bf9d9a 1571 node->thunk.thunk_p = false;
1572 cgraph_node_remove_callees (node);
28454517 1573 cgraph_add_new_function (thunk_fndecl, true);
1574 bitmap_obstack_release (NULL);
1575 }
1576 current_function_decl = NULL;
1577}
1578
91bf9d9a 1579
1580/* Assemble thunks asociated to NODE. */
1581
1582static void
1583assemble_thunks (struct cgraph_node *node)
1584{
1585 struct cgraph_edge *e;
1586 for (e = node->callers; e;)
1587 if (e->caller->thunk.thunk_p)
1588 {
1589 struct cgraph_node *thunk = e->caller;
1590
1591 e = e->next_caller;
1592 assemble_thunks (thunk);
1593 assemble_thunk (thunk);
1594 }
1595 else
1596 e = e->next_caller;
1597}
1598
ae01b312 1599/* Expand function specified by NODE. */
e6d2b2d8 1600
ae01b312 1601static void
d9d9733a 1602cgraph_expand_function (struct cgraph_node *node)
ae01b312 1603{
1604 tree decl = node->decl;
1605
b0cdf642 1606 /* We ought to not compile any inline clones. */
cc636d56 1607 gcc_assert (!node->global.inlined_to);
b0cdf642 1608
6329636b 1609 announce_function (decl);
09fc9532 1610 node->process = 0;
ed772161 1611 if (node->same_body)
1612 {
28454517 1613 struct cgraph_node *alias, *next;
ed772161 1614 bool saved_alias = node->alias;
28454517 1615 for (alias = node->same_body;
1616 alias && alias->next; alias = alias->next)
1617 ;
1618 /* Walk aliases in the order they were created; it is possible that
0a10fd82 1619 thunks refers to the aliases made earlier. */
28454517 1620 for (; alias; alias = next)
1621 {
1622 next = alias->previous;
1623 if (!alias->thunk.thunk_p)
1624 assemble_alias (alias->decl,
1625 DECL_ASSEMBLER_NAME (alias->thunk.alias));
28454517 1626 }
ed772161 1627 node->alias = saved_alias;
f7777314 1628 cgraph_process_new_functions ();
ed772161 1629 }
f7777314 1630
91bf9d9a 1631 assemble_thunks (node);
f7777314 1632 gcc_assert (node->lowered);
1633
1634 /* Generate RTL for the body of DECL. */
1635 tree_rest_of_compilation (decl);
1636
1637 /* Make sure that BE didn't give up on compiling. */
1638 gcc_assert (TREE_ASM_WRITTEN (decl));
1639 current_function_decl = NULL;
cc91b414 1640 gcc_assert (!cgraph_preserve_function_body_p (node));
1a1a827a 1641 cgraph_release_function_body (node);
1642 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1643 points to the dead function body. */
1644 cgraph_node_remove_callees (node);
e1be32b8 1645
1646 cgraph_function_flags_ready = true;
ae01b312 1647}
1648
b0cdf642 1649/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
d7c6d889 1650
1651bool
326a9581 1652cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
d7c6d889 1653{
b0cdf642 1654 *reason = e->inline_failed;
1655 return !e->inline_failed;
d7c6d889 1656}
b0cdf642 1657
acc70efa 1658
acc70efa 1659
d9d9733a 1660/* Expand all functions that must be output.
1661
d7c6d889 1662 Attempt to topologically sort the nodes so function is output when
1663 all called functions are already assembled to allow data to be
91c82c20 1664 propagated across the callgraph. Use a stack to get smaller distance
3927afe0 1665 between a function and its callees (later we may choose to use a more
d7c6d889 1666 sophisticated algorithm for function reordering; we will likely want
1667 to use subsections to make the output functions appear in top-down
1668 order). */
1669
1670static void
a6868229 1671cgraph_expand_all_functions (void)
d7c6d889 1672{
1673 struct cgraph_node *node;
4c36ffe6 1674 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
c04e3894 1675 int order_pos, new_order_pos = 0;
d7c6d889 1676 int i;
1677
7771d558 1678 order_pos = ipa_reverse_postorder (order);
cc636d56 1679 gcc_assert (order_pos == cgraph_n_nodes);
d7c6d889 1680
7bd28bba 1681 /* Garbage collector may remove inline clones we eliminate during
b0cdf642 1682 optimization. So we must be sure to not reference them. */
1683 for (i = 0; i < order_pos; i++)
09fc9532 1684 if (order[i]->process)
b0cdf642 1685 order[new_order_pos++] = order[i];
1686
1687 for (i = new_order_pos - 1; i >= 0; i--)
d7c6d889 1688 {
1689 node = order[i];
09fc9532 1690 if (node->process)
d7c6d889 1691 {
cc636d56 1692 gcc_assert (node->reachable);
09fc9532 1693 node->process = 0;
d7c6d889 1694 cgraph_expand_function (node);
1695 }
1696 }
523c1122 1697 cgraph_process_new_functions ();
773c5ba7 1698
d7c6d889 1699 free (order);
773c5ba7 1700
d7c6d889 1701}
1702
56af936e 1703/* This is used to sort the node types by the cgraph order number. */
1704
0b09525f 1705enum cgraph_order_sort_kind
1706{
1707 ORDER_UNDEFINED = 0,
1708 ORDER_FUNCTION,
1709 ORDER_VAR,
1710 ORDER_ASM
1711};
1712
56af936e 1713struct cgraph_order_sort
1714{
0b09525f 1715 enum cgraph_order_sort_kind kind;
56af936e 1716 union
1717 {
1718 struct cgraph_node *f;
1d416bd7 1719 struct varpool_node *v;
56af936e 1720 struct cgraph_asm_node *a;
1721 } u;
1722};
1723
1724/* Output all functions, variables, and asm statements in the order
1725 according to their order fields, which is the order in which they
1726 appeared in the file. This implements -fno-toplevel-reorder. In
1727 this mode we may output functions and variables which don't really
1728 need to be output. */
1729
1730static void
1731cgraph_output_in_order (void)
1732{
1733 int max;
56af936e 1734 struct cgraph_order_sort *nodes;
1735 int i;
1736 struct cgraph_node *pf;
1d416bd7 1737 struct varpool_node *pv;
56af936e 1738 struct cgraph_asm_node *pa;
1739
1740 max = cgraph_order;
3e1cde87 1741 nodes = XCNEWVEC (struct cgraph_order_sort, max);
56af936e 1742
1d416bd7 1743 varpool_analyze_pending_decls ();
56af936e 1744
1745 for (pf = cgraph_nodes; pf; pf = pf->next)
1746 {
91bf9d9a 1747 if (pf->process && !pf->thunk.thunk_p)
56af936e 1748 {
1749 i = pf->order;
1750 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1751 nodes[i].kind = ORDER_FUNCTION;
1752 nodes[i].u.f = pf;
1753 }
1754 }
1755
1d416bd7 1756 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
56af936e 1757 {
1758 i = pv->order;
1759 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1760 nodes[i].kind = ORDER_VAR;
1761 nodes[i].u.v = pv;
1762 }
1763
1764 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1765 {
1766 i = pa->order;
1767 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1768 nodes[i].kind = ORDER_ASM;
1769 nodes[i].u.a = pa;
1770 }
56af936e 1771
304e5318 1772 /* In toplevel reorder mode we output all statics; mark them as needed. */
1773 for (i = 0; i < max; ++i)
1774 {
1775 if (nodes[i].kind == ORDER_VAR)
1776 {
1777 varpool_mark_needed_node (nodes[i].u.v);
1778 }
1779 }
1780 varpool_empty_needed_queue ();
1781
91da0f1c 1782 for (i = 0; i < max; ++i)
1783 if (nodes[i].kind == ORDER_VAR)
1784 varpool_finalize_named_section_flags (nodes[i].u.v);
1785
56af936e 1786 for (i = 0; i < max; ++i)
1787 {
1788 switch (nodes[i].kind)
1789 {
1790 case ORDER_FUNCTION:
09fc9532 1791 nodes[i].u.f->process = 0;
56af936e 1792 cgraph_expand_function (nodes[i].u.f);
1793 break;
1794
1795 case ORDER_VAR:
1d416bd7 1796 varpool_assemble_decl (nodes[i].u.v);
56af936e 1797 break;
1798
1799 case ORDER_ASM:
1800 assemble_asm (nodes[i].u.a->asm_str);
1801 break;
1802
1803 case ORDER_UNDEFINED:
1804 break;
1805
1806 default:
1807 gcc_unreachable ();
1808 }
1809 }
4b4ea2db 1810
1811 cgraph_asm_nodes = NULL;
3e1cde87 1812 free (nodes);
56af936e 1813}
1814
b0cdf642 1815/* Return true when function body of DECL still needs to be kept around
1816 for later re-use. */
1817bool
cc91b414 1818cgraph_preserve_function_body_p (struct cgraph_node *node)
b0cdf642 1819{
8d8c4c8d 1820 gcc_assert (cgraph_global_info_ready);
cc91b414 1821 gcc_assert (!node->same_body_alias);
1822
b0cdf642 1823 /* Look if there is any clone around. */
ccf4ab6b 1824 if (node->clones)
1825 return true;
b0cdf642 1826 return false;
1827}
1828
77fce4cd 1829static void
1830ipa_passes (void)
1831{
87d4aa85 1832 set_cfun (NULL);
4b14adf9 1833 current_function_decl = NULL;
75a70cf9 1834 gimple_register_cfg_hooks ();
77fce4cd 1835 bitmap_obstack_initialize (NULL);
59dd4830 1836
c9036234 1837 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1838
59dd4830 1839 if (!in_lto_p)
7b2e8956 1840 {
1841 execute_ipa_pass_list (all_small_ipa_passes);
1842 if (seen_error ())
1843 return;
1844 }
9ed5b1f5 1845
7bfefa9d 1846 /* If pass_all_early_optimizations was not scheduled, the state of
1847 the cgraph will not be properly updated. Update it now. */
1848 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1849 cgraph_state = CGRAPH_STATE_IPA_SSA;
9ed5b1f5 1850
7bfefa9d 1851 if (!in_lto_p)
1852 {
1853 /* Generate coverage variables and constructors. */
1854 coverage_finish ();
1855
1856 /* Process new functions added. */
1857 set_cfun (NULL);
1858 current_function_decl = NULL;
1859 cgraph_process_new_functions ();
7bfefa9d 1860
c9036234 1861 execute_ipa_summary_passes
1862 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
8867b500 1863 }
23433d72 1864
1865 /* Some targets need to handle LTO assembler output specially. */
1866 if (flag_generate_lto)
1867 targetm.asm_out.lto_start ();
1868
7bfefa9d 1869 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1870
1871 if (!in_lto_p)
1872 ipa_write_summaries ();
1873
23433d72 1874 if (flag_generate_lto)
1875 targetm.asm_out.lto_end ();
1876
8867b500 1877 if (!flag_ltrans)
1878 execute_ipa_pass_list (all_regular_ipa_passes);
c9036234 1879 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
9ed5b1f5 1880
77fce4cd 1881 bitmap_obstack_release (NULL);
1882}
1883
34e5cced 1884
ae01b312 1885/* Perform simple optimizations based on callgraph. */
1886
7bfefa9d 1887void
d9d9733a 1888cgraph_optimize (void)
ae01b312 1889{
852f689e 1890 if (seen_error ())
cb2b5570 1891 return;
1892
b0cdf642 1893#ifdef ENABLE_CHECKING
1894 verify_cgraph ();
1895#endif
a861fe52 1896
c1dcd13c 1897 /* Frontend may output common variables after the unit has been finalized.
1898 It is safe to deal with them here as they are always zero initialized. */
1d416bd7 1899 varpool_analyze_pending_decls ();
e9f08e82 1900
f79b6507 1901 timevar_push (TV_CGRAPHOPT);
51949610 1902 if (pre_ipa_mem_report)
1903 {
1904 fprintf (stderr, "Memory consumption before IPA\n");
1905 dump_memory_report (false);
1906 }
d7c6d889 1907 if (!quiet_flag)
cd6bca02 1908 fprintf (stderr, "Performing interprocedural optimizations\n");
523c1122 1909 cgraph_state = CGRAPH_STATE_IPA;
c04e3894 1910
be4d0974 1911 /* Don't run the IPA passes if there was any error or sorry messages. */
852f689e 1912 if (!seen_error ())
be4d0974 1913 ipa_passes ();
1914
34e5cced 1915 /* Do nothing else if any IPA pass found errors. */
852f689e 1916 if (seen_error ())
021c1c18 1917 {
1918 timevar_pop (TV_CGRAPHOPT);
1919 return;
1920 }
34e5cced 1921
e1be32b8 1922 /* This pass remove bodies of extern inline functions we never inlined.
1923 Do this later so other IPA passes see what is really going on. */
1924 cgraph_remove_unreachable_nodes (false, dump_file);
80a85d8a 1925 cgraph_global_info_ready = true;
f79b6507 1926 if (cgraph_dump_file)
1927 {
e4200070 1928 fprintf (cgraph_dump_file, "Optimized ");
f79b6507 1929 dump_cgraph (cgraph_dump_file);
c1dcd13c 1930 dump_varpool (cgraph_dump_file);
f79b6507 1931 }
51949610 1932 if (post_ipa_mem_report)
1933 {
defa2fa6 1934 fprintf (stderr, "Memory consumption after IPA\n");
51949610 1935 dump_memory_report (false);
1936 }
f79b6507 1937 timevar_pop (TV_CGRAPHOPT);
ae01b312 1938
d7c6d889 1939 /* Output everything. */
47306a5d 1940 (*debug_hooks->assembly_start) ();
e4200070 1941 if (!quiet_flag)
1942 fprintf (stderr, "Assembling functions:\n");
b0cdf642 1943#ifdef ENABLE_CHECKING
1944 verify_cgraph ();
1945#endif
56af936e 1946
ccf4ab6b 1947 cgraph_materialize_all_clones ();
acc70efa 1948 cgraph_mark_functions_to_output ();
c1dcd13c 1949
523c1122 1950 cgraph_state = CGRAPH_STATE_EXPANSION;
56af936e 1951 if (!flag_toplevel_reorder)
1952 cgraph_output_in_order ();
1953 else
1954 {
1955 cgraph_output_pending_asms ();
1956
1957 cgraph_expand_all_functions ();
1d416bd7 1958 varpool_remove_unreferenced_decls ();
56af936e 1959
1d416bd7 1960 varpool_assemble_pending_decls ();
56af936e 1961 }
523c1122 1962 cgraph_process_new_functions ();
1963 cgraph_state = CGRAPH_STATE_FINISHED;
c1dcd13c 1964
f79b6507 1965 if (cgraph_dump_file)
1966 {
e4200070 1967 fprintf (cgraph_dump_file, "\nFinal ");
f79b6507 1968 dump_cgraph (cgraph_dump_file);
7410370b 1969 dump_varpool (cgraph_dump_file);
f79b6507 1970 }
b0cdf642 1971#ifdef ENABLE_CHECKING
1972 verify_cgraph ();
4ee9c684 1973 /* Double check that all inline clones are gone and that all
1974 function bodies have been released from memory. */
852f689e 1975 if (!seen_error ())
4ee9c684 1976 {
1977 struct cgraph_node *node;
1978 bool error_found = false;
1979
1980 for (node = cgraph_nodes; node; node = node->next)
1981 if (node->analyzed
1982 && (node->global.inlined_to
1a1a827a 1983 || gimple_has_body_p (node->decl)))
4ee9c684 1984 {
1985 error_found = true;
1986 dump_cgraph_node (stderr, node);
a0c938f0 1987 }
4ee9c684 1988 if (error_found)
c04e3894 1989 internal_error ("nodes with unreleased memory found");
4ee9c684 1990 }
b0cdf642 1991#endif
ae01b312 1992}
34e5cced 1993
121f3051 1994void
1995init_cgraph (void)
1996{
01ec0a6c 1997 if (!cgraph_dump_file)
1998 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
121f3051 1999}
b5d36404 2000
a0c938f0 2001/* The edges representing the callers of the NEW_VERSION node were
b5d36404 2002 fixed by cgraph_function_versioning (), now the call_expr in their
2003 respective tree code should be updated to call the NEW_VERSION. */
2004
2005static void
2006update_call_expr (struct cgraph_node *new_version)
2007{
2008 struct cgraph_edge *e;
2009
2010 gcc_assert (new_version);
75a70cf9 2011
2012 /* Update the call expr on the edges to call the new version. */
b5d36404 2013 for (e = new_version->callers; e; e = e->next_caller)
e03a95e7 2014 {
2015 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2016 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
e38def9c 2017 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
e03a95e7 2018 }
b5d36404 2019}
2020
2021
2022/* Create a new cgraph node which is the new version of
2023 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2024 edges which should be redirected to point to
2025 NEW_VERSION. ALL the callees edges of OLD_VERSION
2026 are cloned to the new version node. Return the new
b06ab5fa 2027 version node.
2028
2029 If non-NULL BLOCK_TO_COPY determine what basic blocks
2030 was copied to prevent duplications of calls that are dead
2031 in the clone. */
b5d36404 2032
2033static struct cgraph_node *
2034cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
4460a647 2035 tree new_decl,
b06ab5fa 2036 VEC(cgraph_edge_p,heap) *redirect_callers,
2037 bitmap bbs_to_copy)
2038 {
b5d36404 2039 struct cgraph_node *new_version;
32936803 2040 struct cgraph_edge *e;
b5d36404 2041 unsigned i;
2042
2043 gcc_assert (old_version);
a0c938f0 2044
5a90471f 2045 new_version = cgraph_create_node (new_decl);
b5d36404 2046
2047 new_version->analyzed = true;
2048 new_version->local = old_version->local;
a70a5e2c 2049 new_version->local.externally_visible = false;
2050 new_version->local.local = true;
b5d36404 2051 new_version->global = old_version->global;
a93f1c3b 2052 new_version->rtl = old_version->rtl;
b5d36404 2053 new_version->reachable = true;
2054 new_version->count = old_version->count;
2055
a70a5e2c 2056 for (e = old_version->callees; e; e=e->next_callee)
b06ab5fa 2057 if (!bbs_to_copy
2058 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2059 cgraph_clone_edge (e, new_version, e->call_stmt,
2060 e->lto_stmt_uid, REG_BR_PROB_BASE,
2061 CGRAPH_FREQ_BASE,
0835ad03 2062 true);
a70a5e2c 2063 for (e = old_version->indirect_calls; e; e=e->next_callee)
b06ab5fa 2064 if (!bbs_to_copy
2065 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2066 cgraph_clone_edge (e, new_version, e->call_stmt,
2067 e->lto_stmt_uid, REG_BR_PROB_BASE,
2068 CGRAPH_FREQ_BASE,
0835ad03 2069 true);
48148244 2070 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
4460a647 2071 {
2072 /* Redirect calls to the old version node to point to its new
2073 version. */
2074 cgraph_redirect_edge_callee (e, new_version);
2075 }
b5d36404 2076
2077 return new_version;
2078 }
2079
2080 /* Perform function versioning.
a0c938f0 2081 Function versioning includes copying of the tree and
b5d36404 2082 a callgraph update (creating a new cgraph node and updating
2083 its callees and callers).
2084
2085 REDIRECT_CALLERS varray includes the edges to be redirected
2086 to the new version.
2087
2088 TREE_MAP is a mapping of tree nodes we want to replace with
2089 new ones (according to results of prior analysis).
2090 OLD_VERSION_NODE is the node that is versioned.
48e1416a 2091 It returns the new version's cgraph node.
b06ab5fa 2092 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2093 from new version.
2094 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
2095 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
b5d36404 2096
2097struct cgraph_node *
2098cgraph_function_versioning (struct cgraph_node *old_version_node,
4460a647 2099 VEC(cgraph_edge_p,heap) *redirect_callers,
ccf4ab6b 2100 VEC (ipa_replace_map_p,gc)* tree_map,
a70a5e2c 2101 bitmap args_to_skip,
b06ab5fa 2102 bitmap bbs_to_copy,
2103 basic_block new_entry_block,
a70a5e2c 2104 const char *clone_name)
b5d36404 2105{
2106 tree old_decl = old_version_node->decl;
2107 struct cgraph_node *new_version_node = NULL;
2108 tree new_decl;
2109
2110 if (!tree_versionable_function_p (old_decl))
2111 return NULL;
2112
3c97c75d 2113 gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
2114
b5d36404 2115 /* Make a new FUNCTION_DECL tree node for the
2116 new version. */
5afe38fe 2117 if (!args_to_skip)
2118 new_decl = copy_node (old_decl);
2119 else
2120 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
b5d36404 2121
df0b8dfb 2122 /* Generate a new name for the new version. */
2123 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2124 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2125 SET_DECL_RTL (new_decl, NULL);
2126
b5d36404 2127 /* Create the new version's call-graph node.
2128 and update the edges of the new node. */
2129 new_version_node =
2130 cgraph_copy_node_for_versioning (old_version_node, new_decl,
b06ab5fa 2131 redirect_callers, bbs_to_copy);
b5d36404 2132
2133 /* Copy the OLD_VERSION_NODE function tree to the new version. */
b06ab5fa 2134 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2135 bbs_to_copy, new_entry_block);
b5d36404 2136
a0c938f0 2137 /* Update the new version's properties.
e03a95e7 2138 Make The new version visible only within this translation unit. Make sure
2139 that is not weak also.
a0c938f0 2140 ??? We cannot use COMDAT linkage because there is no
b5d36404 2141 ABI support for this. */
6137cc9f 2142 cgraph_make_decl_local (new_version_node->decl);
f014e39d 2143 DECL_VIRTUAL_P (new_version_node->decl) = 0;
b5d36404 2144 new_version_node->local.externally_visible = 0;
2145 new_version_node->local.local = 1;
2146 new_version_node->lowered = true;
f014e39d 2147
e03a95e7 2148 /* Update the call_expr on the edges to call the new version node. */
2149 update_call_expr (new_version_node);
48e1416a 2150
50828ed8 2151 cgraph_call_function_insertion_hooks (new_version_node);
b5d36404 2152 return new_version_node;
2153}
469679ab 2154
ccf4ab6b 2155/* Given virtual clone, turn it into actual clone. */
2156static void
2157cgraph_materialize_clone (struct cgraph_node *node)
2158{
2159 bitmap_obstack_initialize (NULL);
e748b31d 2160 node->former_clone_of = node->clone_of->decl;
2161 if (node->clone_of->former_clone_of)
2162 node->former_clone_of = node->clone_of->former_clone_of;
ccf4ab6b 2163 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2164 tree_function_versioning (node->clone_of->decl, node->decl,
2165 node->clone.tree_map, true,
b06ab5fa 2166 node->clone.args_to_skip, NULL, NULL);
e20422ea 2167 if (cgraph_dump_file)
2168 {
2169 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2170 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2171 }
ccf4ab6b 2172
2173 /* Function is no longer clone. */
2174 if (node->next_sibling_clone)
2175 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2176 if (node->prev_sibling_clone)
2177 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2178 else
2179 node->clone_of->clones = node->next_sibling_clone;
2180 node->next_sibling_clone = NULL;
2181 node->prev_sibling_clone = NULL;
6d1cc52c 2182 if (!node->clone_of->analyzed && !node->clone_of->clones)
7d6a1ec8 2183 {
2184 cgraph_release_function_body (node->clone_of);
2185 cgraph_node_remove_callees (node->clone_of);
2186 ipa_remove_all_references (&node->clone_of->ref_list);
2187 }
ccf4ab6b 2188 node->clone_of = NULL;
2189 bitmap_obstack_release (NULL);
2190}
2191
c596d830 2192/* If necessary, change the function declaration in the call statement
2193 associated with E so that it corresponds to the edge callee. */
2194
2195gimple
2196cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2197{
2198 tree decl = gimple_call_fndecl (e->call_stmt);
2199 gimple new_stmt;
3fd0ca33 2200 gimple_stmt_iterator gsi;
2201 bool gsi_computed = false;
1f449108 2202#ifdef ENABLE_CHECKING
2203 struct cgraph_node *node;
2204#endif
c596d830 2205
1caef38b 2206 if (e->indirect_unknown_callee
2207 || decl == e->callee->decl
c596d830 2208 /* Don't update call from same body alias to the real function. */
1caef38b 2209 || (decl && cgraph_get_node (decl) == cgraph_get_node (e->callee->decl)))
c596d830 2210 return e->call_stmt;
2211
1f449108 2212#ifdef ENABLE_CHECKING
1caef38b 2213 if (decl)
2214 {
2215 node = cgraph_get_node (decl);
2216 gcc_assert (!node || !node->clone.combined_args_to_skip);
2217 }
1f449108 2218#endif
e748b31d 2219
c596d830 2220 if (cgraph_dump_file)
2221 {
2222 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2223 cgraph_node_name (e->caller), e->caller->uid,
2224 cgraph_node_name (e->callee), e->callee->uid);
2225 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
e748b31d 2226 if (e->callee->clone.combined_args_to_skip)
91aba934 2227 {
2228 fprintf (cgraph_dump_file, " combined args to skip: ");
2229 dump_bitmap (cgraph_dump_file,
2230 e->callee->clone.combined_args_to_skip);
e748b31d 2231 }
c596d830 2232 }
2233
9bab6a70 2234 if (e->indirect_info &&
2235 e->indirect_info->thunk_delta != 0
3fd0ca33 2236 && (!e->callee->clone.combined_args_to_skip
2237 || !bitmap_bit_p (e->callee->clone.combined_args_to_skip, 0)))
2238 {
2239 if (cgraph_dump_file)
9bab6a70 2240 fprintf (cgraph_dump_file, " Thunk delta is "
2241 HOST_WIDE_INT_PRINT_DEC "\n", e->indirect_info->thunk_delta);
3fd0ca33 2242 gsi = gsi_for_stmt (e->call_stmt);
2243 gsi_computed = true;
9bab6a70 2244 gimple_adjust_this_by_delta (&gsi,
2245 build_int_cst (sizetype,
2246 e->indirect_info->thunk_delta));
2247 e->indirect_info->thunk_delta = 0;
3fd0ca33 2248 }
2249
c596d830 2250 if (e->callee->clone.combined_args_to_skip)
91aba934 2251 {
092cd838 2252 int lp_nr;
91aba934 2253
2254 new_stmt
2255 = gimple_call_copy_skip_args (e->call_stmt,
2256 e->callee->clone.combined_args_to_skip);
75c7f5a5 2257 gimple_call_set_fndecl (new_stmt, e->callee->decl);
91aba934 2258
2259 if (gimple_vdef (new_stmt)
2260 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2261 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2262
3fd0ca33 2263 if (!gsi_computed)
2264 gsi = gsi_for_stmt (e->call_stmt);
9126b675 2265 gsi_replace (&gsi, new_stmt, false);
092cd838 2266 /* We need to defer cleaning EH info on the new statement to
2267 fixup-cfg. We may not have dominator information at this point
2268 and thus would end up with unreachable blocks and have no way
2269 to communicate that we need to run CFG cleanup then. */
2270 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2271 if (lp_nr != 0)
2272 {
2273 remove_stmt_from_eh_lp (e->call_stmt);
2274 add_stmt_to_eh_lp (new_stmt, lp_nr);
2275 }
91aba934 2276 }
c596d830 2277 else
75c7f5a5 2278 {
2279 new_stmt = e->call_stmt;
2280 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2281 update_stmt (new_stmt);
2282 }
c596d830 2283
c596d830 2284 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2285
2286 if (cgraph_dump_file)
2287 {
2288 fprintf (cgraph_dump_file, " updated to:");
2289 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2290 }
2291 return new_stmt;
2292}
2293
ccf4ab6b 2294/* Once all functions from compilation unit are in memory, produce all clones
c596d830 2295 and update all calls. We might also do this on demand if we don't want to
2296 bring all functions to memory prior compilation, but current WHOPR
2297 implementation does that and it is is bit easier to keep everything right in
2298 this order. */
ccf4ab6b 2299void
2300cgraph_materialize_all_clones (void)
2301{
2302 struct cgraph_node *node;
2303 bool stabilized = false;
2304
2305 if (cgraph_dump_file)
2306 fprintf (cgraph_dump_file, "Materializing clones\n");
2307#ifdef ENABLE_CHECKING
2308 verify_cgraph ();
2309#endif
2310
2311 /* We can also do topological order, but number of iterations should be
2312 bounded by number of IPA passes since single IPA pass is probably not
2313 going to create clones of clones it created itself. */
2314 while (!stabilized)
2315 {
2316 stabilized = true;
2317 for (node = cgraph_nodes; node; node = node->next)
2318 {
2319 if (node->clone_of && node->decl != node->clone_of->decl
2320 && !gimple_has_body_p (node->decl))
2321 {
2322 if (gimple_has_body_p (node->clone_of->decl))
2323 {
2324 if (cgraph_dump_file)
e20422ea 2325 {
0a10fd82 2326 fprintf (cgraph_dump_file, "cloning %s to %s\n",
e20422ea 2327 cgraph_node_name (node->clone_of),
2328 cgraph_node_name (node));
2329 if (node->clone.tree_map)
2330 {
2331 unsigned int i;
2332 fprintf (cgraph_dump_file, " replace map: ");
2333 for (i = 0; i < VEC_length (ipa_replace_map_p,
2334 node->clone.tree_map);
2335 i++)
2336 {
2337 struct ipa_replace_map *replace_info;
2338 replace_info = VEC_index (ipa_replace_map_p,
2339 node->clone.tree_map,
2340 i);
2341 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2342 fprintf (cgraph_dump_file, " -> ");
2343 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2344 fprintf (cgraph_dump_file, "%s%s;",
2345 replace_info->replace_p ? "(replace)":"",
2346 replace_info->ref_p ? "(ref)":"");
2347 }
2348 fprintf (cgraph_dump_file, "\n");
2349 }
2350 if (node->clone.args_to_skip)
2351 {
2352 fprintf (cgraph_dump_file, " args_to_skip: ");
2353 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2354 }
2355 if (node->clone.args_to_skip)
2356 {
2357 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2358 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2359 }
2360 }
ccf4ab6b 2361 cgraph_materialize_clone (node);
a510bd8d 2362 stabilized = false;
ccf4ab6b 2363 }
ccf4ab6b 2364 }
2365 }
2366 }
ee3f5fc0 2367 for (node = cgraph_nodes; node; node = node->next)
2368 if (!node->analyzed && node->callees)
2369 cgraph_node_remove_callees (node);
c596d830 2370 if (cgraph_dump_file)
2371 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
947781ac 2372#ifdef ENABLE_CHECKING
2373 verify_cgraph ();
2374#endif
ccf4ab6b 2375 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2376}
2377
a861fe52 2378#include "gt-cgraphunit.h"