]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
* config/darwin.h (SECTION_NO_ANCHOR): Remove.
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
cd6bca02 1/* Callgraph based interprocedural optimizations.
aed6e608 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011 Free Software Foundation, Inc.
ae01b312 4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
ae01b312 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
ae01b312 21
b0cdf642 22/* This module implements main driver of compilation process as well as
cd6bca02 23 few basic interprocedural optimizers.
b0cdf642 24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
b326746d 35 (There is one exception needed for implementing GCC extern inline
36 function.)
b0cdf642 37
1d416bd7 38 - varpool_finalize_variable
b0cdf642 39
7bd28bba 40 This function has same behavior as the above but is used for static
b0cdf642 41 variables.
42
43 - cgraph_finalize_compilation_unit
44
b326746d 45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
b0cdf642 47
6329636b 48 In the the call-graph construction and local function
b0cdf642 49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
b326746d 52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
b0cdf642 54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
b0cdf642 62 - cgraph_mark_needed_node
1d416bd7 63 - varpool_mark_needed_node
b0cdf642 64
b326746d 65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
ccd2f3d1 69 used by C++ frontend to explicitly mark the keyed methods.
b0cdf642 70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
6329636b 80 Analyzing of all functions is deferred
b0cdf642 81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
ca67a72b 91 The intra-procedural information is produced and its existence
b0cdf642 92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
6329636b 106 where reference has been optimized out. */
121f3051 107
acc70efa 108
ae01b312 109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
b5530559 114#include "rtl.h"
acc70efa 115#include "tree-flow.h"
ae01b312 116#include "tree-inline.h"
117#include "langhooks.h"
c6224531 118#include "pointer-set.h"
ae01b312 119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
80a85d8a 125#include "diagnostic.h"
ce084dfc 126#include "tree-pretty-print.h"
127#include "gimple-pretty-print.h"
f79b6507 128#include "timevar.h"
d7c6d889 129#include "params.h"
130#include "fibheap.h"
611e5405 131#include "intl.h"
b69eb0ff 132#include "function.h"
b5d36404 133#include "ipa-prop.h"
75a70cf9 134#include "gimple.h"
135#include "tree-iterator.h"
f1e2a033 136#include "tree-pass.h"
bfec3452 137#include "tree-dump.h"
c1dcd13c 138#include "output.h"
9ed5b1f5 139#include "coverage.h"
c9036234 140#include "plugin.h"
d7c6d889 141
a6868229 142static void cgraph_expand_all_functions (void);
d9d9733a 143static void cgraph_mark_functions_to_output (void);
144static void cgraph_expand_function (struct cgraph_node *);
f788fff2 145static void cgraph_output_pending_asms (void);
bfec3452 146static void cgraph_analyze_function (struct cgraph_node *);
25bb88de 147
ecb08119 148FILE *cgraph_dump_file;
121f3051 149
28454517 150/* Used for vtable lookup in thunk adjusting. */
151static GTY (()) tree vtable_entry_type;
152
2c0b522d 153/* Determine if function DECL is needed. That is, visible to something
154 either outside this translation unit, something magic in the system
6329636b 155 configury. */
2c0b522d 156
7bfefa9d 157bool
158cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
2c0b522d 159{
3f82b628 160 /* If the user told us it is used, then it must be so. */
05806473 161 if (node->local.externally_visible)
162 return true;
163
3f82b628 164 /* ??? If the assembler name is set by hand, it is possible to assemble
165 the name later after finalizing the function and the fact is noticed
166 in assemble_name then. This is arguably a bug. */
167 if (DECL_ASSEMBLER_NAME_SET_P (decl)
168 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
169 return true;
170
55680bef 171 /* With -fkeep-inline-functions we are keeping all inline functions except
172 for extern inline ones. */
173 if (flag_keep_inline_functions
174 && DECL_DECLARED_INLINE_P (decl)
316ef6d8 175 && !DECL_EXTERNAL (decl)
176 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
55680bef 177 return true;
178
2c0b522d 179 /* If we decided it was needed before, but at the time we didn't have
180 the body of the function available, then it's still needed. We have
181 to go back and re-check its dependencies now. */
182 if (node->needed)
183 return true;
184
185 /* Externally visible functions must be output. The exception is
a0c938f0 186 COMDAT functions that must be output only when they are needed.
8baa9d15 187
188 When not optimizing, also output the static functions. (see
95da6220 189 PR24561), but don't do so for always_inline functions, functions
0f9238c0 190 declared inline and nested functions. These were optimized out
d3d410e1 191 in the original implementation and it is unclear whether we want
554f2707 192 to change the behavior here. */
bba7ddf8 193 if (((TREE_PUBLIC (decl)
0f9238c0 194 || (!optimize
195 && !node->local.disregard_inline_limits
d3d410e1 196 && !DECL_DECLARED_INLINE_P (decl)
0f9238c0 197 && !(DECL_CONTEXT (decl)
198 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
59dd4830 199 && !flag_whole_program
cbcf2791 200 && !flag_lto)
62eec3b4 201 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
2c0b522d 202 return true;
203
2c0b522d 204 return false;
205}
206
bdc40eb8 207/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
523c1122 208 functions into callgraph in a way so they look like ordinary reachable
209 functions inserted into callgraph already at construction time. */
210
211bool
212cgraph_process_new_functions (void)
213{
214 bool output = false;
215 tree fndecl;
216 struct cgraph_node *node;
217
0cddb138 218 varpool_analyze_pending_decls ();
523c1122 219 /* Note that this queue may grow as its being processed, as the new
220 functions may generate new ones. */
221 while (cgraph_new_nodes)
222 {
223 node = cgraph_new_nodes;
224 fndecl = node->decl;
225 cgraph_new_nodes = cgraph_new_nodes->next_needed;
226 switch (cgraph_state)
227 {
228 case CGRAPH_STATE_CONSTRUCTION:
229 /* At construction time we just need to finalize function and move
230 it into reachable functions list. */
231
232 node->next_needed = NULL;
233 cgraph_finalize_function (fndecl, false);
234 cgraph_mark_reachable_node (node);
235 output = true;
236 break;
237
238 case CGRAPH_STATE_IPA:
f517b36e 239 case CGRAPH_STATE_IPA_SSA:
523c1122 240 /* When IPA optimization already started, do all essential
241 transformations that has been already performed on the whole
242 cgraph but not on this function. */
243
75a70cf9 244 gimple_register_cfg_hooks ();
523c1122 245 if (!node->analyzed)
246 cgraph_analyze_function (node);
247 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
248 current_function_decl = fndecl;
9c1bff7a 249 compute_inline_parameters (node);
f517b36e 250 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
251 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
252 /* When not optimizing, be sure we run early local passes anyway
253 to expand OMP. */
254 || !optimize)
20099e35 255 execute_pass_list (pass_early_local_passes.pass.sub);
523c1122 256 free_dominance_info (CDI_POST_DOMINATORS);
257 free_dominance_info (CDI_DOMINATORS);
258 pop_cfun ();
259 current_function_decl = NULL;
260 break;
261
262 case CGRAPH_STATE_EXPANSION:
263 /* Functions created during expansion shall be compiled
264 directly. */
09fc9532 265 node->process = 0;
523c1122 266 cgraph_expand_function (node);
267 break;
268
269 default:
270 gcc_unreachable ();
271 break;
272 }
50828ed8 273 cgraph_call_function_insertion_hooks (node);
0cddb138 274 varpool_analyze_pending_decls ();
523c1122 275 }
276 return output;
277}
278
9b8fb23a 279/* As an GCC extension we allow redefinition of the function. The
280 semantics when both copies of bodies differ is not well defined.
281 We replace the old body with new body so in unit at a time mode
282 we always use new body, while in normal mode we may end up with
283 old body inlined into some functions and new body expanded and
284 inlined in others.
285
286 ??? It may make more sense to use one body for inlining and other
287 body for expanding the function but this is difficult to do. */
288
289static void
290cgraph_reset_node (struct cgraph_node *node)
291{
09fc9532 292 /* If node->process is set, then we have already begun whole-unit analysis.
6329636b 293 This is *not* testing for whether we've already emitted the function.
294 That case can be sort-of legitimately seen with real function redefinition
295 errors. I would argue that the front end should never present us with
296 such a case, but don't enforce that for now. */
09fc9532 297 gcc_assert (!node->process);
9b8fb23a 298
299 /* Reset our data structures so we can analyze the function again. */
300 memset (&node->local, 0, sizeof (node->local));
301 memset (&node->global, 0, sizeof (node->global));
302 memset (&node->rtl, 0, sizeof (node->rtl));
303 node->analyzed = false;
304 node->local.redefined_extern_inline = true;
305 node->local.finalized = false;
306
9b8fb23a 307 cgraph_node_remove_callees (node);
308
309 /* We may need to re-queue the node for assembling in case
46beef9a 310 we already proceeded it and ignored as not needed or got
311 a re-declaration in IMA mode. */
312 if (node->reachable)
9b8fb23a 313 {
314 struct cgraph_node *n;
315
316 for (n = cgraph_nodes_queue; n; n = n->next_needed)
317 if (n == node)
318 break;
319 if (!n)
320 node->reachable = 0;
321 }
322}
c08871a9 323
1e8e9920 324static void
325cgraph_lower_function (struct cgraph_node *node)
326{
327 if (node->lowered)
328 return;
bfec3452 329
330 if (node->nested)
331 lower_nested_functions (node->decl);
332 gcc_assert (!node->nested);
333
1e8e9920 334 tree_lowering_passes (node->decl);
335 node->lowered = true;
336}
337
28df663b 338/* DECL has been parsed. Take it, queue it, compile it at the whim of the
339 logic in effect. If NESTED is true, then our caller cannot stand to have
340 the garbage collector run at the moment. We would need to either create
341 a new GC context, or just not compile right now. */
ae01b312 342
343void
28df663b 344cgraph_finalize_function (tree decl, bool nested)
ae01b312 345{
346 struct cgraph_node *node = cgraph_node (decl);
347
c08871a9 348 if (node->local.finalized)
9b8fb23a 349 cgraph_reset_node (node);
28df663b 350
167b550b 351 node->pid = cgraph_max_pid ++;
c08871a9 352 notice_global_symbol (decl);
79bb87b4 353 node->local.finalized = true;
e27482aa 354 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
22c748a1 355 node->finalized_by_frontend = true;
ae01b312 356
7bfefa9d 357 if (cgraph_decide_is_function_needed (node, decl))
2c0b522d 358 cgraph_mark_needed_node (node);
359
ecda6e51 360 /* Since we reclaim unreachable nodes at the end of every language
3f82b628 361 level unit, we need to be conservative about possible entry points
362 there. */
1e3aebec 363 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
364 || DECL_STATIC_CONSTRUCTOR (decl)
d050bafd 365 || DECL_STATIC_DESTRUCTOR (decl)
366 /* COMDAT virtual functions may be referenced by vtable from
367 other compilatoin unit. Still we want to devirtualize calls
368 to those so we need to analyze them.
369 FIXME: We should introduce may edges for this purpose and update
370 their handling in unreachable function removal and inliner too. */
371 || (DECL_VIRTUAL_P (decl) && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
3f82b628 372 cgraph_mark_reachable_node (node);
373
2c0b522d 374 /* If we've not yet emitted decl, tell the debug info about it. */
28df663b 375 if (!TREE_ASM_WRITTEN (decl))
2c0b522d 376 (*debug_hooks->deferred_inline_function) (decl);
4e8871a0 377
b69eb0ff 378 /* Possibly warn about unused parameters. */
379 if (warn_unused_parameter)
380 do_warn_unused_parameter (decl);
6329636b 381
382 if (!nested)
383 ggc_collect ();
ae01b312 384}
385
0da03d11 386/* C99 extern inline keywords allow changing of declaration after function
387 has been finalized. We need to re-decide if we want to mark the function as
388 needed then. */
389
390void
391cgraph_mark_if_needed (tree decl)
392{
393 struct cgraph_node *node = cgraph_node (decl);
7bfefa9d 394 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
0da03d11 395 cgraph_mark_needed_node (node);
396}
397
ccf4ab6b 398/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
399static bool
400clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
401{
402 while (node != node2 && node2)
403 node2 = node2->clone_of;
404 return node2 != NULL;
405}
406
1a036a3b 407/* Verify edge E count and frequency. */
408
409static bool
410verify_edge_count_and_frequency (struct cgraph_edge *e)
411{
412 bool error_found = false;
413 if (e->count < 0)
414 {
415 error ("caller edge count is negative");
416 error_found = true;
417 }
418 if (e->frequency < 0)
419 {
420 error ("caller edge frequency is negative");
421 error_found = true;
422 }
423 if (e->frequency > CGRAPH_FREQ_MAX)
424 {
425 error ("caller edge frequency is too large");
426 error_found = true;
427 }
428 if (gimple_has_body_p (e->caller->decl)
429 && !e->caller->global.inlined_to
430 && (e->frequency
431 != compute_call_stmt_bb_frequency (e->caller->decl,
432 gimple_bb (e->call_stmt))))
433 {
434 error ("caller edge frequency %i does not match BB freqency %i",
435 e->frequency,
436 compute_call_stmt_bb_frequency (e->caller->decl,
437 gimple_bb (e->call_stmt)));
438 error_found = true;
439 }
440 return error_found;
441}
442
7b29dd2f 443/* Switch to THIS_CFUN if needed and print STMT to stderr. */
444static void
445cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt)
446{
447 /* debug_gimple_stmt needs correct cfun */
448 if (cfun != this_cfun)
449 set_cfun (this_cfun);
450 debug_gimple_stmt (stmt);
451}
452
b0cdf642 453/* Verify cgraph nodes of given cgraph node. */
4b987fac 454DEBUG_FUNCTION void
b0cdf642 455verify_cgraph_node (struct cgraph_node *node)
456{
457 struct cgraph_edge *e;
e27482aa 458 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
459 basic_block this_block;
75a70cf9 460 gimple_stmt_iterator gsi;
9bfec7c2 461 bool error_found = false;
b0cdf642 462
852f689e 463 if (seen_error ())
bd09cd3e 464 return;
465
b0cdf642 466 timevar_push (TV_CGRAPH_VERIFY);
b0cdf642 467 for (e = node->callees; e; e = e->next_callee)
468 if (e->aux)
469 {
0a81f5a0 470 error ("aux field set for edge %s->%s",
abd3e6b5 471 identifier_to_locale (cgraph_node_name (e->caller)),
472 identifier_to_locale (cgraph_node_name (e->callee)));
b0cdf642 473 error_found = true;
474 }
a2cb9b3b 475 if (node->count < 0)
476 {
bf776685 477 error ("execution count is negative");
a2cb9b3b 478 error_found = true;
479 }
59dd4830 480 if (node->global.inlined_to && node->local.externally_visible)
481 {
bf776685 482 error ("externally visible inline clone");
59dd4830 483 error_found = true;
484 }
485 if (node->global.inlined_to && node->address_taken)
486 {
bf776685 487 error ("inline clone with address taken");
59dd4830 488 error_found = true;
489 }
490 if (node->global.inlined_to && node->needed)
491 {
bf776685 492 error ("inline clone is needed");
59dd4830 493 error_found = true;
494 }
799c8711 495 for (e = node->indirect_calls; e; e = e->next_callee)
496 {
497 if (e->aux)
498 {
499 error ("aux field set for indirect edge from %s",
500 identifier_to_locale (cgraph_node_name (e->caller)));
501 error_found = true;
502 }
503 if (!e->indirect_unknown_callee
504 || !e->indirect_info)
505 {
506 error ("An indirect edge from %s is not marked as indirect or has "
507 "associated indirect_info, the corresponding statement is: ",
508 identifier_to_locale (cgraph_node_name (e->caller)));
7b29dd2f 509 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
799c8711 510 error_found = true;
511 }
512 }
b0cdf642 513 for (e = node->callers; e; e = e->next_caller)
514 {
1a036a3b 515 if (verify_edge_count_and_frequency (e))
516 error_found = true;
b0cdf642 517 if (!e->inline_failed)
518 {
519 if (node->global.inlined_to
520 != (e->caller->global.inlined_to
521 ? e->caller->global.inlined_to : e->caller))
522 {
0a81f5a0 523 error ("inlined_to pointer is wrong");
b0cdf642 524 error_found = true;
525 }
526 if (node->callers->next_caller)
527 {
0a81f5a0 528 error ("multiple inline callers");
b0cdf642 529 error_found = true;
530 }
531 }
532 else
533 if (node->global.inlined_to)
534 {
0a81f5a0 535 error ("inlined_to pointer set for noninline callers");
b0cdf642 536 error_found = true;
537 }
538 }
1a036a3b 539 for (e = node->indirect_calls; e; e = e->next_callee)
540 if (verify_edge_count_and_frequency (e))
541 error_found = true;
b0cdf642 542 if (!node->callers && node->global.inlined_to)
543 {
5cd75817 544 error ("inlined_to pointer is set but no predecessors found");
b0cdf642 545 error_found = true;
546 }
547 if (node->global.inlined_to == node)
548 {
0a81f5a0 549 error ("inlined_to pointer refers to itself");
b0cdf642 550 error_found = true;
551 }
552
0f6439b9 553 if (!cgraph_node (node->decl))
b0cdf642 554 {
0f6439b9 555 error ("node not found in cgraph_hash");
b0cdf642 556 error_found = true;
557 }
a0c938f0 558
ccf4ab6b 559 if (node->clone_of)
560 {
561 struct cgraph_node *n;
562 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
563 if (n == node)
564 break;
565 if (!n)
566 {
567 error ("node has wrong clone_of");
568 error_found = true;
569 }
570 }
571 if (node->clones)
572 {
573 struct cgraph_node *n;
574 for (n = node->clones; n; n = n->next_sibling_clone)
575 if (n->clone_of != node)
576 break;
577 if (n)
578 {
579 error ("node has wrong clone list");
580 error_found = true;
581 }
582 }
583 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
584 {
585 error ("node is in clone list but it is not clone");
586 error_found = true;
587 }
588 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
589 {
590 error ("node has wrong prev_clone pointer");
591 error_found = true;
592 }
593 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
594 {
595 error ("double linked list of clones corrupted");
596 error_found = true;
597 }
c524ac5d 598 if (node->same_comdat_group)
599 {
600 struct cgraph_node *n = node->same_comdat_group;
601
602 if (!DECL_ONE_ONLY (node->decl))
603 {
604 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
605 error_found = true;
606 }
607 if (n == node)
608 {
609 error ("node is alone in a comdat group");
610 error_found = true;
611 }
612 do
613 {
614 if (!n->same_comdat_group)
615 {
616 error ("same_comdat_group is not a circular list");
617 error_found = true;
618 break;
619 }
620 n = n->same_comdat_group;
621 }
622 while (n != node);
623 }
ccf4ab6b 624
625 if (node->analyzed && gimple_has_body_p (node->decl)
75a70cf9 626 && !TREE_ASM_WRITTEN (node->decl)
7bfefa9d 627 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
628 && !flag_wpa)
b0cdf642 629 {
e27482aa 630 if (this_cfun->cfg)
631 {
632 /* The nodes we're interested in are never shared, so walk
633 the tree ignoring duplicates. */
e7c352d1 634 struct pointer_set_t *visited_nodes = pointer_set_create ();
e27482aa 635 /* Reach the trees by walking over the CFG, and note the
636 enclosing basic-blocks in the call edges. */
637 FOR_EACH_BB_FN (this_block, this_cfun)
75a70cf9 638 for (gsi = gsi_start_bb (this_block);
639 !gsi_end_p (gsi);
640 gsi_next (&gsi))
9bfec7c2 641 {
75a70cf9 642 gimple stmt = gsi_stmt (gsi);
799c8711 643 if (is_gimple_call (stmt))
9bfec7c2 644 {
645 struct cgraph_edge *e = cgraph_edge (node, stmt);
799c8711 646 tree decl = gimple_call_fndecl (stmt);
9bfec7c2 647 if (e)
648 {
649 if (e->aux)
650 {
0a81f5a0 651 error ("shared call_stmt:");
7b29dd2f 652 cgraph_debug_gimple_stmt (this_cfun, stmt);
9bfec7c2 653 error_found = true;
654 }
799c8711 655 if (!e->indirect_unknown_callee)
28454517 656 {
76f3f3ab 657 struct cgraph_node *n;
658
799c8711 659 if (e->callee->same_body_alias)
660 {
661 error ("edge points to same body alias:");
662 debug_tree (e->callee->decl);
663 error_found = true;
664 }
e748b31d 665 else if (!e->callee->global.inlined_to
799c8711 666 && decl
e748b31d 667 && cgraph_get_node (decl)
668 && (e->callee->former_clone_of
669 != cgraph_get_node (decl)->decl)
799c8711 670 && !clone_of_p (cgraph_node (decl),
671 e->callee))
672 {
673 error ("edge points to wrong declaration:");
674 debug_tree (e->callee->decl);
675 fprintf (stderr," Instead of:");
676 debug_tree (decl);
677 error_found = true;
678 }
76f3f3ab 679 else if (decl
680 && (n = cgraph_get_node_or_alias (decl))
681 && (n->same_body_alias
682 && n->thunk.thunk_p))
683 {
684 error ("a call to thunk improperly represented "
685 "in the call graph:");
7b29dd2f 686 cgraph_debug_gimple_stmt (this_cfun, stmt);
687 error_found = true;
76f3f3ab 688 }
28454517 689 }
799c8711 690 else if (decl)
9bfec7c2 691 {
799c8711 692 error ("an indirect edge with unknown callee "
693 "corresponding to a call_stmt with "
694 "a known declaration:");
ee3f5fc0 695 error_found = true;
7b29dd2f 696 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
9bfec7c2 697 }
698 e->aux = (void *)1;
699 }
799c8711 700 else if (decl)
9bfec7c2 701 {
0a81f5a0 702 error ("missing callgraph edge for call stmt:");
7b29dd2f 703 cgraph_debug_gimple_stmt (this_cfun, stmt);
9bfec7c2 704 error_found = true;
705 }
706 }
707 }
e27482aa 708 pointer_set_destroy (visited_nodes);
e27482aa 709 }
710 else
711 /* No CFG available?! */
712 gcc_unreachable ();
713
b0cdf642 714 for (e = node->callees; e; e = e->next_callee)
715 {
799c8711 716 if (!e->aux)
b0cdf642 717 {
0a81f5a0 718 error ("edge %s->%s has no corresponding call_stmt",
abd3e6b5 719 identifier_to_locale (cgraph_node_name (e->caller)),
720 identifier_to_locale (cgraph_node_name (e->callee)));
7b29dd2f 721 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
b0cdf642 722 error_found = true;
723 }
724 e->aux = 0;
725 }
799c8711 726 for (e = node->indirect_calls; e; e = e->next_callee)
727 {
728 if (!e->aux)
729 {
730 error ("an indirect edge from %s has no corresponding call_stmt",
731 identifier_to_locale (cgraph_node_name (e->caller)));
7b29dd2f 732 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
799c8711 733 error_found = true;
734 }
735 e->aux = 0;
736 }
b0cdf642 737 }
738 if (error_found)
739 {
740 dump_cgraph_node (stderr, node);
0a81f5a0 741 internal_error ("verify_cgraph_node failed");
b0cdf642 742 }
743 timevar_pop (TV_CGRAPH_VERIFY);
744}
745
746/* Verify whole cgraph structure. */
4b987fac 747DEBUG_FUNCTION void
b0cdf642 748verify_cgraph (void)
749{
750 struct cgraph_node *node;
751
852f689e 752 if (seen_error ())
8ec2a798 753 return;
754
b0cdf642 755 for (node = cgraph_nodes; node; node = node->next)
756 verify_cgraph_node (node);
757}
758
56af936e 759/* Output all asm statements we have stored up to be output. */
760
761static void
762cgraph_output_pending_asms (void)
763{
764 struct cgraph_asm_node *can;
765
852f689e 766 if (seen_error ())
56af936e 767 return;
768
769 for (can = cgraph_asm_nodes; can; can = can->next)
770 assemble_asm (can->asm_str);
771 cgraph_asm_nodes = NULL;
772}
773
0785e435 774/* Analyze the function scheduled to be output. */
bfec3452 775static void
0785e435 776cgraph_analyze_function (struct cgraph_node *node)
777{
bfec3452 778 tree save = current_function_decl;
0785e435 779 tree decl = node->decl;
780
ec1e35b2 781 current_function_decl = decl;
e27482aa 782 push_cfun (DECL_STRUCT_FUNCTION (decl));
bfec3452 783
6816d0c4 784 assign_assembler_name_if_neeeded (node->decl);
785
bfec3452 786 /* Make sure to gimplify bodies only once. During analyzing a
787 function we lower it, which will require gimplified nested
788 functions, so we can end up here with an already gimplified
789 body. */
790 if (!gimple_body (decl))
791 gimplify_function_tree (decl);
792 dump_function (TDI_generic, decl);
793
e27482aa 794 cgraph_lower_function (node);
6e8d6e86 795 node->analyzed = true;
0785e435 796
e27482aa 797 pop_cfun ();
bfec3452 798 current_function_decl = save;
0785e435 799}
800
d05db70d 801/* Process attributes common for vars and functions. */
802
803static void
804process_common_attributes (tree decl)
805{
806 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
807
808 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
809 {
810 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
811 "%<weakref%> attribute should be accompanied with"
812 " an %<alias%> attribute");
813 DECL_WEAK (decl) = 0;
40b32d93 814 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
815 DECL_ATTRIBUTES (decl));
d05db70d 816 }
817}
818
05806473 819/* Look for externally_visible and used attributes and mark cgraph nodes
820 accordingly.
821
822 We cannot mark the nodes at the point the attributes are processed (in
823 handle_*_attribute) because the copy of the declarations available at that
824 point may not be canonical. For example, in:
825
826 void f();
827 void f() __attribute__((used));
828
829 the declaration we see in handle_used_attribute will be the second
830 declaration -- but the front end will subsequently merge that declaration
831 with the original declaration and discard the second declaration.
832
833 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
834
835 void f() {}
836 void f() __attribute__((externally_visible));
837
838 is valid.
839
840 So, we walk the nodes at the end of the translation unit, applying the
841 attributes at that point. */
842
843static void
844process_function_and_variable_attributes (struct cgraph_node *first,
1d416bd7 845 struct varpool_node *first_var)
05806473 846{
847 struct cgraph_node *node;
1d416bd7 848 struct varpool_node *vnode;
05806473 849
850 for (node = cgraph_nodes; node != first; node = node->next)
851 {
852 tree decl = node->decl;
83a23b05 853 if (DECL_PRESERVE_P (decl))
0b49f8f8 854 cgraph_mark_needed_node (node);
62433d51 855 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
856 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
857 && TREE_PUBLIC (node->decl))
858 {
859 if (node->local.finalized)
860 cgraph_mark_needed_node (node);
861 }
862 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
05806473 863 {
ba12ea31 864 if (! TREE_PUBLIC (node->decl))
712d2297 865 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
866 "%<externally_visible%>"
867 " attribute have effect only on public objects");
59dd4830 868 else if (node->local.finalized)
869 cgraph_mark_needed_node (node);
05806473 870 }
40b32d93 871 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
872 && node->local.finalized)
873 {
874 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
875 "%<weakref%> attribute ignored"
876 " because function is defined");
877 DECL_WEAK (decl) = 0;
878 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
879 DECL_ATTRIBUTES (decl));
880 }
d05db70d 881 process_common_attributes (decl);
05806473 882 }
1d416bd7 883 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
05806473 884 {
885 tree decl = vnode->decl;
83a23b05 886 if (DECL_PRESERVE_P (decl))
05806473 887 {
22671757 888 vnode->force_output = true;
05806473 889 if (vnode->finalized)
1d416bd7 890 varpool_mark_needed_node (vnode);
05806473 891 }
62433d51 892 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
893 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
0d9d5d69 894 && TREE_PUBLIC (vnode->decl))
62433d51 895 {
896 if (vnode->finalized)
897 varpool_mark_needed_node (vnode);
898 }
899 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
05806473 900 {
ba12ea31 901 if (! TREE_PUBLIC (vnode->decl))
712d2297 902 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
903 "%<externally_visible%>"
904 " attribute have effect only on public objects");
59dd4830 905 else if (vnode->finalized)
906 varpool_mark_needed_node (vnode);
05806473 907 }
40b32d93 908 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
909 && vnode->finalized
910 && DECL_INITIAL (decl))
911 {
912 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
913 "%<weakref%> attribute ignored"
914 " because variable is initialized");
915 DECL_WEAK (decl) = 0;
916 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
917 DECL_ATTRIBUTES (decl));
918 }
d05db70d 919 process_common_attributes (decl);
05806473 920 }
921}
922
aeeb194b 923/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
924 each reachable functions) and build cgraph.
925 The function can be called multiple times after inserting new nodes
0d424440 926 into beginning of queue. Just the new part of queue is re-scanned then. */
ae01b312 927
aeeb194b 928static void
929cgraph_analyze_functions (void)
ae01b312 930{
c1dcd13c 931 /* Keep track of already processed nodes when called multiple times for
06b27565 932 intermodule optimization. */
c1dcd13c 933 static struct cgraph_node *first_analyzed;
c17d0de1 934 struct cgraph_node *first_processed = first_analyzed;
1d416bd7 935 static struct varpool_node *first_analyzed_var;
aeeb194b 936 struct cgraph_node *node, *next;
ae01b312 937
f1c35659 938 bitmap_obstack_initialize (NULL);
c17d0de1 939 process_function_and_variable_attributes (first_processed,
940 first_analyzed_var);
941 first_processed = cgraph_nodes;
1d416bd7 942 first_analyzed_var = varpool_nodes;
943 varpool_analyze_pending_decls ();
f79b6507 944 if (cgraph_dump_file)
ae01b312 945 {
e4200070 946 fprintf (cgraph_dump_file, "Initial entry points:");
c1dcd13c 947 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 948 if (node->needed)
f79b6507 949 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
950 fprintf (cgraph_dump_file, "\n");
ae01b312 951 }
aeeb194b 952 cgraph_process_new_functions ();
ae01b312 953
e6d2b2d8 954 /* Propagate reachability flag and lower representation of all reachable
955 functions. In the future, lowering will introduce new functions and
956 new entry points on the way (by template instantiation and virtual
957 method table generation for instance). */
3d7bfc56 958 while (cgraph_nodes_queue)
ae01b312 959 {
0785e435 960 struct cgraph_edge *edge;
3d7bfc56 961 tree decl = cgraph_nodes_queue->decl;
962
963 node = cgraph_nodes_queue;
d87976fb 964 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 965 node->next_needed = NULL;
ae01b312 966
638531ad 967 /* ??? It is possible to create extern inline function and later using
bbd5cba2 968 weak alias attribute to kill its body. See
638531ad 969 gcc.c-torture/compile/20011119-1.c */
75a70cf9 970 if (!DECL_STRUCT_FUNCTION (decl))
9b8fb23a 971 {
972 cgraph_reset_node (node);
973 continue;
974 }
638531ad 975
7bfefa9d 976 if (!node->analyzed)
977 cgraph_analyze_function (node);
2c0b522d 978
ae01b312 979 for (edge = node->callees; edge; edge = edge->next_callee)
0785e435 980 if (!edge->callee->reachable)
2c0b522d 981 cgraph_mark_reachable_node (edge->callee);
982
61c2c7b1 983 if (node->same_comdat_group)
984 {
985 for (next = node->same_comdat_group;
986 next != node;
987 next = next->same_comdat_group)
988 cgraph_mark_reachable_node (next);
989 }
990
d544ceff 991 /* If decl is a clone of an abstract function, mark that abstract
992 function so that we don't release its body. The DECL_INITIAL() of that
993 abstract function declaration will be later needed to output debug info. */
994 if (DECL_ABSTRACT_ORIGIN (decl))
995 {
996 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
997 origin_node->abstract_and_needed = true;
998 }
999
c17d0de1 1000 /* We finalize local static variables during constructing callgraph
1001 edges. Process their attributes too. */
1002 process_function_and_variable_attributes (first_processed,
1003 first_analyzed_var);
1004 first_processed = cgraph_nodes;
1d416bd7 1005 first_analyzed_var = varpool_nodes;
1006 varpool_analyze_pending_decls ();
aeeb194b 1007 cgraph_process_new_functions ();
ae01b312 1008 }
2c0b522d 1009
aa5e06c7 1010 /* Collect entry points to the unit. */
f79b6507 1011 if (cgraph_dump_file)
3d7bfc56 1012 {
e4200070 1013 fprintf (cgraph_dump_file, "Unit entry points:");
c1dcd13c 1014 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 1015 if (node->needed)
f79b6507 1016 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
e4200070 1017 fprintf (cgraph_dump_file, "\n\nInitial ");
0785e435 1018 dump_cgraph (cgraph_dump_file);
7410370b 1019 dump_varpool (cgraph_dump_file);
3d7bfc56 1020 }
e6d2b2d8 1021
f79b6507 1022 if (cgraph_dump_file)
1023 fprintf (cgraph_dump_file, "\nReclaiming functions:");
ae01b312 1024
f4ec5ce1 1025 for (node = cgraph_nodes; node != first_analyzed; node = next)
ae01b312 1026 {
1027 tree decl = node->decl;
f4ec5ce1 1028 next = node->next;
ae01b312 1029
1a1a827a 1030 if (node->local.finalized && !gimple_has_body_p (decl))
a0c938f0 1031 cgraph_reset_node (node);
9b8fb23a 1032
1a1a827a 1033 if (!node->reachable && gimple_has_body_p (decl))
ae01b312 1034 {
f79b6507 1035 if (cgraph_dump_file)
1036 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
b0cdf642 1037 cgraph_remove_node (node);
9b8fb23a 1038 continue;
ae01b312 1039 }
bc5cab3b 1040 else
1041 node->next_needed = NULL;
1a1a827a 1042 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
9b8fb23a 1043 gcc_assert (node->analyzed == node->local.finalized);
ae01b312 1044 }
f79b6507 1045 if (cgraph_dump_file)
e4200070 1046 {
1047 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1048 dump_cgraph (cgraph_dump_file);
7410370b 1049 dump_varpool (cgraph_dump_file);
e4200070 1050 }
f1c35659 1051 bitmap_obstack_release (NULL);
c1dcd13c 1052 first_analyzed = cgraph_nodes;
ae01b312 1053 ggc_collect ();
aeeb194b 1054}
1055
8f69fd82 1056
aeeb194b 1057/* Analyze the whole compilation unit once it is parsed completely. */
1058
1059void
1060cgraph_finalize_compilation_unit (void)
1061{
9929334e 1062 timevar_push (TV_CGRAPH);
1063
bfec3452 1064 /* Do not skip analyzing the functions if there were errors, we
1065 miss diagnostics for following functions otherwise. */
aeeb194b 1066
8f69fd82 1067 /* Emit size functions we didn't inline. */
4189e677 1068 finalize_size_functions ();
8f69fd82 1069
9929334e 1070 /* Mark alias targets necessary and emit diagnostics. */
1071 finish_aliases_1 ();
1072
aeeb194b 1073 if (!quiet_flag)
1074 {
1075 fprintf (stderr, "\nAnalyzing compilation unit\n");
1076 fflush (stderr);
1077 }
1078
9929334e 1079 /* Gimplify and lower all functions, compute reachability and
1080 remove unreachable nodes. */
1081 cgraph_analyze_functions ();
1082
8f69fd82 1083 /* Mark alias targets necessary and emit diagnostics. */
1084 finish_aliases_1 ();
1085
9929334e 1086 /* Gimplify and lower thunks. */
aeeb194b 1087 cgraph_analyze_functions ();
bfec3452 1088
9929334e 1089 /* Finally drive the pass manager. */
bfec3452 1090 cgraph_optimize ();
9929334e 1091
1092 timevar_pop (TV_CGRAPH);
ae01b312 1093}
9ed5b1f5 1094
1095
ae01b312 1096/* Figure out what functions we want to assemble. */
1097
1098static void
d9d9733a 1099cgraph_mark_functions_to_output (void)
ae01b312 1100{
1101 struct cgraph_node *node;
61c2c7b1 1102#ifdef ENABLE_CHECKING
1103 bool check_same_comdat_groups = false;
1104
1105 for (node = cgraph_nodes; node; node = node->next)
1106 gcc_assert (!node->process);
1107#endif
ae01b312 1108
ae01b312 1109 for (node = cgraph_nodes; node; node = node->next)
1110 {
1111 tree decl = node->decl;
d7c6d889 1112 struct cgraph_edge *e;
a0c938f0 1113
61c2c7b1 1114 gcc_assert (!node->process || node->same_comdat_group);
1115 if (node->process)
1116 continue;
d7c6d889 1117
1118 for (e = node->callers; e; e = e->next_caller)
611e5405 1119 if (e->inline_failed)
d7c6d889 1120 break;
ae01b312 1121
e6d2b2d8 1122 /* We need to output all local functions that are used and not
1123 always inlined, as well as those that are reachable from
1124 outside the current compilation unit. */
1a1a827a 1125 if (node->analyzed
b0cdf642 1126 && !node->global.inlined_to
1e3aebec 1127 && (!cgraph_only_called_directly_p (node)
d7c6d889 1128 || (e && node->reachable))
4ee9c684 1129 && !TREE_ASM_WRITTEN (decl)
ae01b312 1130 && !DECL_EXTERNAL (decl))
61c2c7b1 1131 {
1132 node->process = 1;
1133 if (node->same_comdat_group)
1134 {
1135 struct cgraph_node *next;
1136 for (next = node->same_comdat_group;
1137 next != node;
1138 next = next->same_comdat_group)
1139 next->process = 1;
1140 }
1141 }
1142 else if (node->same_comdat_group)
1143 {
1144#ifdef ENABLE_CHECKING
1145 check_same_comdat_groups = true;
1146#endif
1147 }
cc636d56 1148 else
9cee7c3f 1149 {
1150 /* We should've reclaimed all functions that are not needed. */
1151#ifdef ENABLE_CHECKING
75a70cf9 1152 if (!node->global.inlined_to
1a1a827a 1153 && gimple_has_body_p (decl)
08843223 1154 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1155 are inside partition, we can end up not removing the body since we no longer
1156 have analyzed node pointing to it. */
1157 && !node->in_other_partition
9cee7c3f 1158 && !DECL_EXTERNAL (decl))
1159 {
1160 dump_cgraph_node (stderr, node);
1161 internal_error ("failed to reclaim unneeded function");
1162 }
1163#endif
75a70cf9 1164 gcc_assert (node->global.inlined_to
1a1a827a 1165 || !gimple_has_body_p (decl)
08843223 1166 || node->in_other_partition
9cee7c3f 1167 || DECL_EXTERNAL (decl));
1168
1169 }
a0c938f0 1170
961e3b13 1171 }
61c2c7b1 1172#ifdef ENABLE_CHECKING
1173 if (check_same_comdat_groups)
1174 for (node = cgraph_nodes; node; node = node->next)
1175 if (node->same_comdat_group && !node->process)
1176 {
1177 tree decl = node->decl;
1178 if (!node->global.inlined_to
1179 && gimple_has_body_p (decl)
08843223 1180 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1181 are inside partition, we can end up not removing the body since we no longer
1182 have analyzed node pointing to it. */
1183 && !node->in_other_partition
61c2c7b1 1184 && !DECL_EXTERNAL (decl))
1185 {
1186 dump_cgraph_node (stderr, node);
1187 internal_error ("failed to reclaim unneeded function");
1188 }
1189 }
1190#endif
961e3b13 1191}
1192
28454517 1193/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1194 in lowered gimple form.
1195
1196 Set current_function_decl and cfun to newly constructed empty function body.
1197 return basic block in the function body. */
1198
1199static basic_block
1200init_lowered_empty_function (tree decl)
1201{
1202 basic_block bb;
1203
1204 current_function_decl = decl;
1205 allocate_struct_function (decl, false);
1206 gimple_register_cfg_hooks ();
1207 init_empty_tree_cfg ();
1208 init_tree_ssa (cfun);
1209 init_ssa_operands ();
1210 cfun->gimple_df->in_ssa_p = true;
1211 DECL_INITIAL (decl) = make_node (BLOCK);
1212
1213 DECL_SAVED_TREE (decl) = error_mark_node;
1214 cfun->curr_properties |=
1215 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1216 PROP_ssa);
1217
1218 /* Create BB for body of the function and connect it properly. */
1219 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1220 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1221 make_edge (bb, EXIT_BLOCK_PTR, 0);
1222
1223 return bb;
1224}
1225
1226/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1227 offset indicated by VIRTUAL_OFFSET, if that is
1228 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1229 zero for a result adjusting thunk. */
1230
1231static tree
1232thunk_adjust (gimple_stmt_iterator * bsi,
1233 tree ptr, bool this_adjusting,
1234 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1235{
1236 gimple stmt;
1237 tree ret;
1238
55d6cb23 1239 if (this_adjusting
1240 && fixed_offset != 0)
28454517 1241 {
1242 stmt = gimple_build_assign (ptr,
1243 fold_build2_loc (input_location,
1244 POINTER_PLUS_EXPR,
1245 TREE_TYPE (ptr), ptr,
1246 size_int (fixed_offset)));
1247 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1248 }
1249
1250 /* If there's a virtual offset, look up that value in the vtable and
1251 adjust the pointer again. */
1252 if (virtual_offset)
1253 {
1254 tree vtabletmp;
1255 tree vtabletmp2;
1256 tree vtabletmp3;
1257 tree offsettmp;
1258
1259 if (!vtable_entry_type)
1260 {
1261 tree vfunc_type = make_node (FUNCTION_TYPE);
1262 TREE_TYPE (vfunc_type) = integer_type_node;
1263 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1264 layout_type (vfunc_type);
1265
1266 vtable_entry_type = build_pointer_type (vfunc_type);
1267 }
1268
1269 vtabletmp =
1270 create_tmp_var (build_pointer_type
1271 (build_pointer_type (vtable_entry_type)), "vptr");
1272
1273 /* The vptr is always at offset zero in the object. */
1274 stmt = gimple_build_assign (vtabletmp,
1275 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1276 ptr));
1277 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1278 mark_symbols_for_renaming (stmt);
1279 find_referenced_vars_in (stmt);
1280
1281 /* Form the vtable address. */
1282 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1283 "vtableaddr");
1284 stmt = gimple_build_assign (vtabletmp2,
182cf5a9 1285 build_simple_mem_ref (vtabletmp));
28454517 1286 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1287 mark_symbols_for_renaming (stmt);
1288 find_referenced_vars_in (stmt);
1289
1290 /* Find the entry with the vcall offset. */
1291 stmt = gimple_build_assign (vtabletmp2,
1292 fold_build2_loc (input_location,
1293 POINTER_PLUS_EXPR,
1294 TREE_TYPE (vtabletmp2),
1295 vtabletmp2,
1296 fold_convert (sizetype,
1297 virtual_offset)));
1298 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1299
1300 /* Get the offset itself. */
1301 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1302 "vcalloffset");
1303 stmt = gimple_build_assign (vtabletmp3,
182cf5a9 1304 build_simple_mem_ref (vtabletmp2));
28454517 1305 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1306 mark_symbols_for_renaming (stmt);
1307 find_referenced_vars_in (stmt);
1308
1309 /* Cast to sizetype. */
1310 offsettmp = create_tmp_var (sizetype, "offset");
1311 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1312 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1313 mark_symbols_for_renaming (stmt);
1314 find_referenced_vars_in (stmt);
1315
1316 /* Adjust the `this' pointer. */
1317 ptr = fold_build2_loc (input_location,
1318 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1319 offsettmp);
1320 }
1321
55d6cb23 1322 if (!this_adjusting
1323 && fixed_offset != 0)
28454517 1324 /* Adjust the pointer by the constant. */
1325 {
1326 tree ptrtmp;
1327
1328 if (TREE_CODE (ptr) == VAR_DECL)
1329 ptrtmp = ptr;
1330 else
1331 {
1332 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1333 stmt = gimple_build_assign (ptrtmp, ptr);
1334 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1335 mark_symbols_for_renaming (stmt);
1336 find_referenced_vars_in (stmt);
1337 }
1338 ptr = fold_build2_loc (input_location,
1339 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1340 size_int (fixed_offset));
1341 }
1342
1343 /* Emit the statement and gimplify the adjustment expression. */
1344 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1345 stmt = gimple_build_assign (ret, ptr);
1346 mark_symbols_for_renaming (stmt);
1347 find_referenced_vars_in (stmt);
1348 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1349
1350 return ret;
1351}
1352
1353/* Produce assembler for thunk NODE. */
1354
1355static void
1356assemble_thunk (struct cgraph_node *node)
1357{
1358 bool this_adjusting = node->thunk.this_adjusting;
1359 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1360 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1361 tree virtual_offset = NULL;
1362 tree alias = node->thunk.alias;
1363 tree thunk_fndecl = node->decl;
1364 tree a = DECL_ARGUMENTS (thunk_fndecl);
1365
1366 current_function_decl = thunk_fndecl;
1367
aed6e608 1368 /* Ensure thunks are emitted in their correct sections. */
1369 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1370
28454517 1371 if (this_adjusting
1372 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1373 virtual_value, alias))
1374 {
1375 const char *fnname;
1376 tree fn_block;
1377
1378 DECL_RESULT (thunk_fndecl)
1379 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1380 RESULT_DECL, 0, integer_type_node);
22ea3b47 1381 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
28454517 1382
1383 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1384 create one. */
1385 fn_block = make_node (BLOCK);
1386 BLOCK_VARS (fn_block) = a;
1387 DECL_INITIAL (thunk_fndecl) = fn_block;
1388 init_function_start (thunk_fndecl);
1389 cfun->is_thunk = 1;
1390 assemble_start_function (thunk_fndecl, fnname);
1391
1392 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1393 fixed_offset, virtual_value, alias);
1394
1395 assemble_end_function (thunk_fndecl, fnname);
1396 init_insn_lengths ();
1397 free_after_compilation (cfun);
1398 set_cfun (NULL);
1399 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1400 }
1401 else
1402 {
1403 tree restype;
1404 basic_block bb, then_bb, else_bb, return_bb;
1405 gimple_stmt_iterator bsi;
1406 int nargs = 0;
1407 tree arg;
1408 int i;
1409 tree resdecl;
1410 tree restmp = NULL;
1411 VEC(tree, heap) *vargs;
1412
1413 gimple call;
1414 gimple ret;
1415
1416 DECL_IGNORED_P (thunk_fndecl) = 1;
1417 bitmap_obstack_initialize (NULL);
1418
1419 if (node->thunk.virtual_offset_p)
1420 virtual_offset = size_int (virtual_value);
1421
1422 /* Build the return declaration for the function. */
1423 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1424 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1425 {
1426 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1427 DECL_ARTIFICIAL (resdecl) = 1;
1428 DECL_IGNORED_P (resdecl) = 1;
1429 DECL_RESULT (thunk_fndecl) = resdecl;
1430 }
1431 else
1432 resdecl = DECL_RESULT (thunk_fndecl);
1433
1434 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1435
1436 bsi = gsi_start_bb (bb);
1437
1438 /* Build call to the function being thunked. */
1439 if (!VOID_TYPE_P (restype))
1440 {
1441 if (!is_gimple_reg_type (restype))
1442 {
1443 restmp = resdecl;
2ab2ce89 1444 add_local_decl (cfun, restmp);
28454517 1445 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1446 }
1447 else
1448 restmp = create_tmp_var_raw (restype, "retval");
1449 }
1450
1767a056 1451 for (arg = a; arg; arg = DECL_CHAIN (arg))
28454517 1452 nargs++;
1453 vargs = VEC_alloc (tree, heap, nargs);
1454 if (this_adjusting)
1455 VEC_quick_push (tree, vargs,
1456 thunk_adjust (&bsi,
1457 a, 1, fixed_offset,
1458 virtual_offset));
1459 else
1460 VEC_quick_push (tree, vargs, a);
1767a056 1461 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
28454517 1462 VEC_quick_push (tree, vargs, arg);
1463 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1464 VEC_free (tree, heap, vargs);
1465 gimple_call_set_cannot_inline (call, true);
1466 gimple_call_set_from_thunk (call, true);
1467 if (restmp)
1468 gimple_call_set_lhs (call, restmp);
1469 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1470 mark_symbols_for_renaming (call);
1471 find_referenced_vars_in (call);
1472 update_stmt (call);
1473
1474 if (restmp && !this_adjusting)
1475 {
57ab8ec3 1476 tree true_label = NULL_TREE;
28454517 1477
1478 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1479 {
1480 gimple stmt;
1481 /* If the return type is a pointer, we need to
1482 protect against NULL. We know there will be an
1483 adjustment, because that's why we're emitting a
1484 thunk. */
1485 then_bb = create_basic_block (NULL, (void *) 0, bb);
1486 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1487 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1488 remove_edge (single_succ_edge (bb));
1489 true_label = gimple_block_label (then_bb);
28454517 1490 stmt = gimple_build_cond (NE_EXPR, restmp,
385f3f36 1491 build_zero_cst (TREE_TYPE (restmp)),
28454517 1492 NULL_TREE, NULL_TREE);
1493 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1494 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1495 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1496 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1497 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1498 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1499 bsi = gsi_last_bb (then_bb);
1500 }
1501
1502 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1503 fixed_offset, virtual_offset);
1504 if (true_label)
1505 {
1506 gimple stmt;
1507 bsi = gsi_last_bb (else_bb);
385f3f36 1508 stmt = gimple_build_assign (restmp,
1509 build_zero_cst (TREE_TYPE (restmp)));
28454517 1510 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1511 bsi = gsi_last_bb (return_bb);
1512 }
1513 }
1514 else
1515 gimple_call_set_tail (call, true);
1516
1517 /* Build return value. */
1518 ret = gimple_build_return (restmp);
1519 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1520
1521 delete_unreachable_blocks ();
1522 update_ssa (TODO_update_ssa);
1523
1524 cgraph_remove_same_body_alias (node);
1525 /* Since we want to emit the thunk, we explicitly mark its name as
1526 referenced. */
28454517 1527 cgraph_add_new_function (thunk_fndecl, true);
1528 bitmap_obstack_release (NULL);
1529 }
1530 current_function_decl = NULL;
1531}
1532
ae01b312 1533/* Expand function specified by NODE. */
e6d2b2d8 1534
ae01b312 1535static void
d9d9733a 1536cgraph_expand_function (struct cgraph_node *node)
ae01b312 1537{
1538 tree decl = node->decl;
1539
b0cdf642 1540 /* We ought to not compile any inline clones. */
cc636d56 1541 gcc_assert (!node->global.inlined_to);
b0cdf642 1542
6329636b 1543 announce_function (decl);
09fc9532 1544 node->process = 0;
ed772161 1545 if (node->same_body)
1546 {
28454517 1547 struct cgraph_node *alias, *next;
ed772161 1548 bool saved_alias = node->alias;
28454517 1549 for (alias = node->same_body;
1550 alias && alias->next; alias = alias->next)
1551 ;
1552 /* Walk aliases in the order they were created; it is possible that
1553 thunks reffers to the aliases made earlier. */
1554 for (; alias; alias = next)
1555 {
1556 next = alias->previous;
1557 if (!alias->thunk.thunk_p)
1558 assemble_alias (alias->decl,
1559 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1560 else
1561 assemble_thunk (alias);
1562 }
ed772161 1563 node->alias = saved_alias;
f7777314 1564 cgraph_process_new_functions ();
ed772161 1565 }
f7777314 1566
1567 gcc_assert (node->lowered);
1568
1569 /* Generate RTL for the body of DECL. */
1570 tree_rest_of_compilation (decl);
1571
1572 /* Make sure that BE didn't give up on compiling. */
1573 gcc_assert (TREE_ASM_WRITTEN (decl));
1574 current_function_decl = NULL;
1a1a827a 1575 gcc_assert (!cgraph_preserve_function_body_p (decl));
1576 cgraph_release_function_body (node);
1577 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1578 points to the dead function body. */
1579 cgraph_node_remove_callees (node);
e1be32b8 1580
1581 cgraph_function_flags_ready = true;
ae01b312 1582}
1583
b0cdf642 1584/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
d7c6d889 1585
1586bool
326a9581 1587cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
d7c6d889 1588{
b0cdf642 1589 *reason = e->inline_failed;
1590 return !e->inline_failed;
d7c6d889 1591}
b0cdf642 1592
acc70efa 1593
acc70efa 1594
d9d9733a 1595/* Expand all functions that must be output.
1596
d7c6d889 1597 Attempt to topologically sort the nodes so function is output when
1598 all called functions are already assembled to allow data to be
91c82c20 1599 propagated across the callgraph. Use a stack to get smaller distance
3927afe0 1600 between a function and its callees (later we may choose to use a more
d7c6d889 1601 sophisticated algorithm for function reordering; we will likely want
1602 to use subsections to make the output functions appear in top-down
1603 order). */
1604
1605static void
a6868229 1606cgraph_expand_all_functions (void)
d7c6d889 1607{
1608 struct cgraph_node *node;
4c36ffe6 1609 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
c04e3894 1610 int order_pos, new_order_pos = 0;
d7c6d889 1611 int i;
1612
d7c6d889 1613 order_pos = cgraph_postorder (order);
cc636d56 1614 gcc_assert (order_pos == cgraph_n_nodes);
d7c6d889 1615
7bd28bba 1616 /* Garbage collector may remove inline clones we eliminate during
b0cdf642 1617 optimization. So we must be sure to not reference them. */
1618 for (i = 0; i < order_pos; i++)
09fc9532 1619 if (order[i]->process)
b0cdf642 1620 order[new_order_pos++] = order[i];
1621
1622 for (i = new_order_pos - 1; i >= 0; i--)
d7c6d889 1623 {
1624 node = order[i];
09fc9532 1625 if (node->process)
d7c6d889 1626 {
cc636d56 1627 gcc_assert (node->reachable);
09fc9532 1628 node->process = 0;
d7c6d889 1629 cgraph_expand_function (node);
1630 }
1631 }
523c1122 1632 cgraph_process_new_functions ();
773c5ba7 1633
d7c6d889 1634 free (order);
773c5ba7 1635
d7c6d889 1636}
1637
56af936e 1638/* This is used to sort the node types by the cgraph order number. */
1639
0b09525f 1640enum cgraph_order_sort_kind
1641{
1642 ORDER_UNDEFINED = 0,
1643 ORDER_FUNCTION,
1644 ORDER_VAR,
1645 ORDER_ASM
1646};
1647
56af936e 1648struct cgraph_order_sort
1649{
0b09525f 1650 enum cgraph_order_sort_kind kind;
56af936e 1651 union
1652 {
1653 struct cgraph_node *f;
1d416bd7 1654 struct varpool_node *v;
56af936e 1655 struct cgraph_asm_node *a;
1656 } u;
1657};
1658
1659/* Output all functions, variables, and asm statements in the order
1660 according to their order fields, which is the order in which they
1661 appeared in the file. This implements -fno-toplevel-reorder. In
1662 this mode we may output functions and variables which don't really
1663 need to be output. */
1664
1665static void
1666cgraph_output_in_order (void)
1667{
1668 int max;
56af936e 1669 struct cgraph_order_sort *nodes;
1670 int i;
1671 struct cgraph_node *pf;
1d416bd7 1672 struct varpool_node *pv;
56af936e 1673 struct cgraph_asm_node *pa;
1674
1675 max = cgraph_order;
3e1cde87 1676 nodes = XCNEWVEC (struct cgraph_order_sort, max);
56af936e 1677
1d416bd7 1678 varpool_analyze_pending_decls ();
56af936e 1679
1680 for (pf = cgraph_nodes; pf; pf = pf->next)
1681 {
09fc9532 1682 if (pf->process)
56af936e 1683 {
1684 i = pf->order;
1685 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1686 nodes[i].kind = ORDER_FUNCTION;
1687 nodes[i].u.f = pf;
1688 }
1689 }
1690
1d416bd7 1691 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
56af936e 1692 {
1693 i = pv->order;
1694 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1695 nodes[i].kind = ORDER_VAR;
1696 nodes[i].u.v = pv;
1697 }
1698
1699 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1700 {
1701 i = pa->order;
1702 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1703 nodes[i].kind = ORDER_ASM;
1704 nodes[i].u.a = pa;
1705 }
56af936e 1706
304e5318 1707 /* In toplevel reorder mode we output all statics; mark them as needed. */
1708 for (i = 0; i < max; ++i)
1709 {
1710 if (nodes[i].kind == ORDER_VAR)
1711 {
1712 varpool_mark_needed_node (nodes[i].u.v);
1713 }
1714 }
1715 varpool_empty_needed_queue ();
1716
56af936e 1717 for (i = 0; i < max; ++i)
1718 {
1719 switch (nodes[i].kind)
1720 {
1721 case ORDER_FUNCTION:
09fc9532 1722 nodes[i].u.f->process = 0;
56af936e 1723 cgraph_expand_function (nodes[i].u.f);
1724 break;
1725
1726 case ORDER_VAR:
1d416bd7 1727 varpool_assemble_decl (nodes[i].u.v);
56af936e 1728 break;
1729
1730 case ORDER_ASM:
1731 assemble_asm (nodes[i].u.a->asm_str);
1732 break;
1733
1734 case ORDER_UNDEFINED:
1735 break;
1736
1737 default:
1738 gcc_unreachable ();
1739 }
1740 }
4b4ea2db 1741
1742 cgraph_asm_nodes = NULL;
3e1cde87 1743 free (nodes);
56af936e 1744}
1745
b0cdf642 1746/* Return true when function body of DECL still needs to be kept around
1747 for later re-use. */
1748bool
1749cgraph_preserve_function_body_p (tree decl)
1750{
1751 struct cgraph_node *node;
8d8c4c8d 1752
1753 gcc_assert (cgraph_global_info_ready);
b0cdf642 1754 /* Look if there is any clone around. */
ccf4ab6b 1755 node = cgraph_node (decl);
1756 if (node->clones)
1757 return true;
b0cdf642 1758 return false;
1759}
1760
77fce4cd 1761static void
1762ipa_passes (void)
1763{
87d4aa85 1764 set_cfun (NULL);
4b14adf9 1765 current_function_decl = NULL;
75a70cf9 1766 gimple_register_cfg_hooks ();
77fce4cd 1767 bitmap_obstack_initialize (NULL);
59dd4830 1768
c9036234 1769 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1770
59dd4830 1771 if (!in_lto_p)
7b2e8956 1772 {
1773 execute_ipa_pass_list (all_small_ipa_passes);
1774 if (seen_error ())
1775 return;
1776 }
9ed5b1f5 1777
7bfefa9d 1778 /* If pass_all_early_optimizations was not scheduled, the state of
1779 the cgraph will not be properly updated. Update it now. */
1780 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1781 cgraph_state = CGRAPH_STATE_IPA_SSA;
9ed5b1f5 1782
7bfefa9d 1783 if (!in_lto_p)
1784 {
1785 /* Generate coverage variables and constructors. */
1786 coverage_finish ();
1787
1788 /* Process new functions added. */
1789 set_cfun (NULL);
1790 current_function_decl = NULL;
1791 cgraph_process_new_functions ();
7bfefa9d 1792
c9036234 1793 execute_ipa_summary_passes
1794 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
8867b500 1795 }
23433d72 1796
1797 /* Some targets need to handle LTO assembler output specially. */
1798 if (flag_generate_lto)
1799 targetm.asm_out.lto_start ();
1800
7bfefa9d 1801 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1802
1803 if (!in_lto_p)
1804 ipa_write_summaries ();
1805
23433d72 1806 if (flag_generate_lto)
1807 targetm.asm_out.lto_end ();
1808
8867b500 1809 if (!flag_ltrans)
1810 execute_ipa_pass_list (all_regular_ipa_passes);
c9036234 1811 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
9ed5b1f5 1812
77fce4cd 1813 bitmap_obstack_release (NULL);
1814}
1815
34e5cced 1816
ae01b312 1817/* Perform simple optimizations based on callgraph. */
1818
7bfefa9d 1819void
d9d9733a 1820cgraph_optimize (void)
ae01b312 1821{
852f689e 1822 if (seen_error ())
cb2b5570 1823 return;
1824
b0cdf642 1825#ifdef ENABLE_CHECKING
1826 verify_cgraph ();
1827#endif
a861fe52 1828
c1dcd13c 1829 /* Frontend may output common variables after the unit has been finalized.
1830 It is safe to deal with them here as they are always zero initialized. */
1d416bd7 1831 varpool_analyze_pending_decls ();
e9f08e82 1832
f79b6507 1833 timevar_push (TV_CGRAPHOPT);
51949610 1834 if (pre_ipa_mem_report)
1835 {
1836 fprintf (stderr, "Memory consumption before IPA\n");
1837 dump_memory_report (false);
1838 }
d7c6d889 1839 if (!quiet_flag)
cd6bca02 1840 fprintf (stderr, "Performing interprocedural optimizations\n");
523c1122 1841 cgraph_state = CGRAPH_STATE_IPA;
c04e3894 1842
be4d0974 1843 /* Don't run the IPA passes if there was any error or sorry messages. */
852f689e 1844 if (!seen_error ())
be4d0974 1845 ipa_passes ();
1846
34e5cced 1847 /* Do nothing else if any IPA pass found errors. */
852f689e 1848 if (seen_error ())
021c1c18 1849 {
1850 timevar_pop (TV_CGRAPHOPT);
1851 return;
1852 }
34e5cced 1853
e1be32b8 1854 /* This pass remove bodies of extern inline functions we never inlined.
1855 Do this later so other IPA passes see what is really going on. */
1856 cgraph_remove_unreachable_nodes (false, dump_file);
80a85d8a 1857 cgraph_global_info_ready = true;
f79b6507 1858 if (cgraph_dump_file)
1859 {
e4200070 1860 fprintf (cgraph_dump_file, "Optimized ");
f79b6507 1861 dump_cgraph (cgraph_dump_file);
c1dcd13c 1862 dump_varpool (cgraph_dump_file);
f79b6507 1863 }
51949610 1864 if (post_ipa_mem_report)
1865 {
defa2fa6 1866 fprintf (stderr, "Memory consumption after IPA\n");
51949610 1867 dump_memory_report (false);
1868 }
f79b6507 1869 timevar_pop (TV_CGRAPHOPT);
ae01b312 1870
d7c6d889 1871 /* Output everything. */
47306a5d 1872 (*debug_hooks->assembly_start) ();
e4200070 1873 if (!quiet_flag)
1874 fprintf (stderr, "Assembling functions:\n");
b0cdf642 1875#ifdef ENABLE_CHECKING
1876 verify_cgraph ();
1877#endif
56af936e 1878
ccf4ab6b 1879 cgraph_materialize_all_clones ();
acc70efa 1880 cgraph_mark_functions_to_output ();
c1dcd13c 1881
523c1122 1882 cgraph_state = CGRAPH_STATE_EXPANSION;
56af936e 1883 if (!flag_toplevel_reorder)
1884 cgraph_output_in_order ();
1885 else
1886 {
1887 cgraph_output_pending_asms ();
1888
1889 cgraph_expand_all_functions ();
1d416bd7 1890 varpool_remove_unreferenced_decls ();
56af936e 1891
1d416bd7 1892 varpool_assemble_pending_decls ();
56af936e 1893 }
523c1122 1894 cgraph_process_new_functions ();
1895 cgraph_state = CGRAPH_STATE_FINISHED;
c1dcd13c 1896
f79b6507 1897 if (cgraph_dump_file)
1898 {
e4200070 1899 fprintf (cgraph_dump_file, "\nFinal ");
f79b6507 1900 dump_cgraph (cgraph_dump_file);
7410370b 1901 dump_varpool (cgraph_dump_file);
f79b6507 1902 }
b0cdf642 1903#ifdef ENABLE_CHECKING
1904 verify_cgraph ();
4ee9c684 1905 /* Double check that all inline clones are gone and that all
1906 function bodies have been released from memory. */
852f689e 1907 if (!seen_error ())
4ee9c684 1908 {
1909 struct cgraph_node *node;
1910 bool error_found = false;
1911
1912 for (node = cgraph_nodes; node; node = node->next)
1913 if (node->analyzed
1914 && (node->global.inlined_to
1a1a827a 1915 || gimple_has_body_p (node->decl)))
4ee9c684 1916 {
1917 error_found = true;
1918 dump_cgraph_node (stderr, node);
a0c938f0 1919 }
4ee9c684 1920 if (error_found)
c04e3894 1921 internal_error ("nodes with unreleased memory found");
4ee9c684 1922 }
b0cdf642 1923#endif
ae01b312 1924}
34e5cced 1925
121f3051 1926void
1927init_cgraph (void)
1928{
01ec0a6c 1929 if (!cgraph_dump_file)
1930 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
121f3051 1931}
b5d36404 1932
a0c938f0 1933/* The edges representing the callers of the NEW_VERSION node were
b5d36404 1934 fixed by cgraph_function_versioning (), now the call_expr in their
1935 respective tree code should be updated to call the NEW_VERSION. */
1936
1937static void
1938update_call_expr (struct cgraph_node *new_version)
1939{
1940 struct cgraph_edge *e;
1941
1942 gcc_assert (new_version);
75a70cf9 1943
1944 /* Update the call expr on the edges to call the new version. */
b5d36404 1945 for (e = new_version->callers; e; e = e->next_caller)
e03a95e7 1946 {
1947 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
1948 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
e38def9c 1949 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
e03a95e7 1950 }
b5d36404 1951}
1952
1953
1954/* Create a new cgraph node which is the new version of
1955 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1956 edges which should be redirected to point to
1957 NEW_VERSION. ALL the callees edges of OLD_VERSION
1958 are cloned to the new version node. Return the new
b06ab5fa 1959 version node.
1960
1961 If non-NULL BLOCK_TO_COPY determine what basic blocks
1962 was copied to prevent duplications of calls that are dead
1963 in the clone. */
b5d36404 1964
1965static struct cgraph_node *
1966cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
4460a647 1967 tree new_decl,
b06ab5fa 1968 VEC(cgraph_edge_p,heap) *redirect_callers,
1969 bitmap bbs_to_copy)
1970 {
b5d36404 1971 struct cgraph_node *new_version;
32936803 1972 struct cgraph_edge *e;
b5d36404 1973 unsigned i;
1974
1975 gcc_assert (old_version);
a0c938f0 1976
b5d36404 1977 new_version = cgraph_node (new_decl);
1978
1979 new_version->analyzed = true;
1980 new_version->local = old_version->local;
a70a5e2c 1981 new_version->local.externally_visible = false;
1982 new_version->local.local = true;
1983 new_version->local.vtable_method = false;
b5d36404 1984 new_version->global = old_version->global;
a93f1c3b 1985 new_version->rtl = old_version->rtl;
b5d36404 1986 new_version->reachable = true;
1987 new_version->count = old_version->count;
1988
a70a5e2c 1989 for (e = old_version->callees; e; e=e->next_callee)
b06ab5fa 1990 if (!bbs_to_copy
1991 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
1992 cgraph_clone_edge (e, new_version, e->call_stmt,
1993 e->lto_stmt_uid, REG_BR_PROB_BASE,
1994 CGRAPH_FREQ_BASE,
1995 e->loop_nest, true);
a70a5e2c 1996 for (e = old_version->indirect_calls; e; e=e->next_callee)
b06ab5fa 1997 if (!bbs_to_copy
1998 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
1999 cgraph_clone_edge (e, new_version, e->call_stmt,
2000 e->lto_stmt_uid, REG_BR_PROB_BASE,
2001 CGRAPH_FREQ_BASE,
2002 e->loop_nest, true);
48148244 2003 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
4460a647 2004 {
2005 /* Redirect calls to the old version node to point to its new
2006 version. */
2007 cgraph_redirect_edge_callee (e, new_version);
2008 }
b5d36404 2009
2010 return new_version;
2011 }
2012
2013 /* Perform function versioning.
a0c938f0 2014 Function versioning includes copying of the tree and
b5d36404 2015 a callgraph update (creating a new cgraph node and updating
2016 its callees and callers).
2017
2018 REDIRECT_CALLERS varray includes the edges to be redirected
2019 to the new version.
2020
2021 TREE_MAP is a mapping of tree nodes we want to replace with
2022 new ones (according to results of prior analysis).
2023 OLD_VERSION_NODE is the node that is versioned.
48e1416a 2024 It returns the new version's cgraph node.
b06ab5fa 2025 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2026 from new version.
2027 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
2028 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
b5d36404 2029
2030struct cgraph_node *
2031cgraph_function_versioning (struct cgraph_node *old_version_node,
4460a647 2032 VEC(cgraph_edge_p,heap) *redirect_callers,
ccf4ab6b 2033 VEC (ipa_replace_map_p,gc)* tree_map,
a70a5e2c 2034 bitmap args_to_skip,
b06ab5fa 2035 bitmap bbs_to_copy,
2036 basic_block new_entry_block,
a70a5e2c 2037 const char *clone_name)
b5d36404 2038{
2039 tree old_decl = old_version_node->decl;
2040 struct cgraph_node *new_version_node = NULL;
2041 tree new_decl;
2042
2043 if (!tree_versionable_function_p (old_decl))
2044 return NULL;
2045
3c97c75d 2046 gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
2047
b5d36404 2048 /* Make a new FUNCTION_DECL tree node for the
2049 new version. */
5afe38fe 2050 if (!args_to_skip)
2051 new_decl = copy_node (old_decl);
2052 else
2053 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
b5d36404 2054
df0b8dfb 2055 /* Generate a new name for the new version. */
2056 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2057 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2058 SET_DECL_RTL (new_decl, NULL);
2059
b5d36404 2060 /* Create the new version's call-graph node.
2061 and update the edges of the new node. */
2062 new_version_node =
2063 cgraph_copy_node_for_versioning (old_version_node, new_decl,
b06ab5fa 2064 redirect_callers, bbs_to_copy);
b5d36404 2065
2066 /* Copy the OLD_VERSION_NODE function tree to the new version. */
b06ab5fa 2067 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2068 bbs_to_copy, new_entry_block);
b5d36404 2069
a0c938f0 2070 /* Update the new version's properties.
e03a95e7 2071 Make The new version visible only within this translation unit. Make sure
2072 that is not weak also.
a0c938f0 2073 ??? We cannot use COMDAT linkage because there is no
b5d36404 2074 ABI support for this. */
6137cc9f 2075 cgraph_make_decl_local (new_version_node->decl);
f014e39d 2076 DECL_VIRTUAL_P (new_version_node->decl) = 0;
b5d36404 2077 new_version_node->local.externally_visible = 0;
2078 new_version_node->local.local = 1;
2079 new_version_node->lowered = true;
f014e39d 2080
e03a95e7 2081 /* Update the call_expr on the edges to call the new version node. */
2082 update_call_expr (new_version_node);
48e1416a 2083
50828ed8 2084 cgraph_call_function_insertion_hooks (new_version_node);
b5d36404 2085 return new_version_node;
2086}
469679ab 2087
2088/* Produce separate function body for inline clones so the offline copy can be
2089 modified without affecting them. */
2090struct cgraph_node *
2091save_inline_function_body (struct cgraph_node *node)
2092{
ccf4ab6b 2093 struct cgraph_node *first_clone, *n;
469679ab 2094
2095 gcc_assert (node == cgraph_node (node->decl));
2096
2097 cgraph_lower_function (node);
2098
ccf4ab6b 2099 first_clone = node->clones;
469679ab 2100
2101 first_clone->decl = copy_node (node->decl);
469679ab 2102 cgraph_insert_node_to_hashtable (first_clone);
2103 gcc_assert (first_clone == cgraph_node (first_clone->decl));
ccf4ab6b 2104 if (first_clone->next_sibling_clone)
2105 {
2106 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2107 n->clone_of = first_clone;
2108 n->clone_of = first_clone;
2109 n->next_sibling_clone = first_clone->clones;
2110 if (first_clone->clones)
2111 first_clone->clones->prev_sibling_clone = n;
2112 first_clone->clones = first_clone->next_sibling_clone;
2113 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2114 first_clone->next_sibling_clone = NULL;
2115 gcc_assert (!first_clone->prev_sibling_clone);
2116 }
2117 first_clone->clone_of = NULL;
2118 node->clones = NULL;
2119
2120 if (first_clone->clones)
2121 for (n = first_clone->clones; n != first_clone;)
2122 {
2123 gcc_assert (n->decl == node->decl);
2124 n->decl = first_clone->decl;
2125 if (n->clones)
2126 n = n->clones;
2127 else if (n->next_sibling_clone)
2128 n = n->next_sibling_clone;
2129 else
2130 {
2131 while (n != first_clone && !n->next_sibling_clone)
2132 n = n->clone_of;
2133 if (n != first_clone)
2134 n = n->next_sibling_clone;
2135 }
2136 }
469679ab 2137
2138 /* Copy the OLD_VERSION_NODE function tree to the new version. */
b06ab5fa 2139 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL,
2140 NULL, NULL);
469679ab 2141
2142 DECL_EXTERNAL (first_clone->decl) = 0;
ecd88073 2143 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
469679ab 2144 TREE_PUBLIC (first_clone->decl) = 0;
2145 DECL_COMDAT (first_clone->decl) = 0;
7fe9b425 2146 VEC_free (ipa_opt_pass, heap,
6d1cc52c 2147 first_clone->ipa_transforms_to_apply);
2148 first_clone->ipa_transforms_to_apply = NULL;
469679ab 2149
469679ab 2150#ifdef ENABLE_CHECKING
2151 verify_cgraph_node (first_clone);
2152#endif
2153 return first_clone;
2154}
a861fe52 2155
ccf4ab6b 2156/* Given virtual clone, turn it into actual clone. */
2157static void
2158cgraph_materialize_clone (struct cgraph_node *node)
2159{
2160 bitmap_obstack_initialize (NULL);
e748b31d 2161 node->former_clone_of = node->clone_of->decl;
2162 if (node->clone_of->former_clone_of)
2163 node->former_clone_of = node->clone_of->former_clone_of;
ccf4ab6b 2164 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2165 tree_function_versioning (node->clone_of->decl, node->decl,
2166 node->clone.tree_map, true,
b06ab5fa 2167 node->clone.args_to_skip, NULL, NULL);
e20422ea 2168 if (cgraph_dump_file)
2169 {
2170 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2171 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2172 }
ccf4ab6b 2173
2174 /* Function is no longer clone. */
2175 if (node->next_sibling_clone)
2176 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2177 if (node->prev_sibling_clone)
2178 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2179 else
2180 node->clone_of->clones = node->next_sibling_clone;
2181 node->next_sibling_clone = NULL;
2182 node->prev_sibling_clone = NULL;
6d1cc52c 2183 if (!node->clone_of->analyzed && !node->clone_of->clones)
7d6a1ec8 2184 {
2185 cgraph_release_function_body (node->clone_of);
2186 cgraph_node_remove_callees (node->clone_of);
2187 ipa_remove_all_references (&node->clone_of->ref_list);
2188 }
ccf4ab6b 2189 node->clone_of = NULL;
2190 bitmap_obstack_release (NULL);
2191}
2192
c596d830 2193/* If necessary, change the function declaration in the call statement
2194 associated with E so that it corresponds to the edge callee. */
2195
2196gimple
2197cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2198{
2199 tree decl = gimple_call_fndecl (e->call_stmt);
2200 gimple new_stmt;
3fd0ca33 2201 gimple_stmt_iterator gsi;
2202 bool gsi_computed = false;
1f449108 2203#ifdef ENABLE_CHECKING
2204 struct cgraph_node *node;
2205#endif
c596d830 2206
1caef38b 2207 if (e->indirect_unknown_callee
2208 || decl == e->callee->decl
c596d830 2209 /* Don't update call from same body alias to the real function. */
1caef38b 2210 || (decl && cgraph_get_node (decl) == cgraph_get_node (e->callee->decl)))
c596d830 2211 return e->call_stmt;
2212
1f449108 2213#ifdef ENABLE_CHECKING
1caef38b 2214 if (decl)
2215 {
2216 node = cgraph_get_node (decl);
2217 gcc_assert (!node || !node->clone.combined_args_to_skip);
2218 }
1f449108 2219#endif
e748b31d 2220
c596d830 2221 if (cgraph_dump_file)
2222 {
2223 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2224 cgraph_node_name (e->caller), e->caller->uid,
2225 cgraph_node_name (e->callee), e->callee->uid);
2226 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
e748b31d 2227 if (e->callee->clone.combined_args_to_skip)
91aba934 2228 {
2229 fprintf (cgraph_dump_file, " combined args to skip: ");
2230 dump_bitmap (cgraph_dump_file,
2231 e->callee->clone.combined_args_to_skip);
e748b31d 2232 }
c596d830 2233 }
2234
9bab6a70 2235 if (e->indirect_info &&
2236 e->indirect_info->thunk_delta != 0
3fd0ca33 2237 && (!e->callee->clone.combined_args_to_skip
2238 || !bitmap_bit_p (e->callee->clone.combined_args_to_skip, 0)))
2239 {
2240 if (cgraph_dump_file)
9bab6a70 2241 fprintf (cgraph_dump_file, " Thunk delta is "
2242 HOST_WIDE_INT_PRINT_DEC "\n", e->indirect_info->thunk_delta);
3fd0ca33 2243 gsi = gsi_for_stmt (e->call_stmt);
2244 gsi_computed = true;
9bab6a70 2245 gimple_adjust_this_by_delta (&gsi,
2246 build_int_cst (sizetype,
2247 e->indirect_info->thunk_delta));
2248 e->indirect_info->thunk_delta = 0;
3fd0ca33 2249 }
2250
c596d830 2251 if (e->callee->clone.combined_args_to_skip)
91aba934 2252 {
092cd838 2253 int lp_nr;
91aba934 2254
2255 new_stmt
2256 = gimple_call_copy_skip_args (e->call_stmt,
2257 e->callee->clone.combined_args_to_skip);
75c7f5a5 2258 gimple_call_set_fndecl (new_stmt, e->callee->decl);
91aba934 2259
2260 if (gimple_vdef (new_stmt)
2261 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2262 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2263
3fd0ca33 2264 if (!gsi_computed)
2265 gsi = gsi_for_stmt (e->call_stmt);
9126b675 2266 gsi_replace (&gsi, new_stmt, false);
092cd838 2267 /* We need to defer cleaning EH info on the new statement to
2268 fixup-cfg. We may not have dominator information at this point
2269 and thus would end up with unreachable blocks and have no way
2270 to communicate that we need to run CFG cleanup then. */
2271 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2272 if (lp_nr != 0)
2273 {
2274 remove_stmt_from_eh_lp (e->call_stmt);
2275 add_stmt_to_eh_lp (new_stmt, lp_nr);
2276 }
91aba934 2277 }
c596d830 2278 else
75c7f5a5 2279 {
2280 new_stmt = e->call_stmt;
2281 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2282 update_stmt (new_stmt);
2283 }
c596d830 2284
c596d830 2285 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2286
2287 if (cgraph_dump_file)
2288 {
2289 fprintf (cgraph_dump_file, " updated to:");
2290 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2291 }
2292 return new_stmt;
2293}
2294
ccf4ab6b 2295/* Once all functions from compilation unit are in memory, produce all clones
c596d830 2296 and update all calls. We might also do this on demand if we don't want to
2297 bring all functions to memory prior compilation, but current WHOPR
2298 implementation does that and it is is bit easier to keep everything right in
2299 this order. */
ccf4ab6b 2300void
2301cgraph_materialize_all_clones (void)
2302{
2303 struct cgraph_node *node;
2304 bool stabilized = false;
2305
2306 if (cgraph_dump_file)
2307 fprintf (cgraph_dump_file, "Materializing clones\n");
2308#ifdef ENABLE_CHECKING
2309 verify_cgraph ();
2310#endif
2311
2312 /* We can also do topological order, but number of iterations should be
2313 bounded by number of IPA passes since single IPA pass is probably not
2314 going to create clones of clones it created itself. */
2315 while (!stabilized)
2316 {
2317 stabilized = true;
2318 for (node = cgraph_nodes; node; node = node->next)
2319 {
2320 if (node->clone_of && node->decl != node->clone_of->decl
2321 && !gimple_has_body_p (node->decl))
2322 {
2323 if (gimple_has_body_p (node->clone_of->decl))
2324 {
2325 if (cgraph_dump_file)
e20422ea 2326 {
2327 fprintf (cgraph_dump_file, "clonning %s to %s\n",
2328 cgraph_node_name (node->clone_of),
2329 cgraph_node_name (node));
2330 if (node->clone.tree_map)
2331 {
2332 unsigned int i;
2333 fprintf (cgraph_dump_file, " replace map: ");
2334 for (i = 0; i < VEC_length (ipa_replace_map_p,
2335 node->clone.tree_map);
2336 i++)
2337 {
2338 struct ipa_replace_map *replace_info;
2339 replace_info = VEC_index (ipa_replace_map_p,
2340 node->clone.tree_map,
2341 i);
2342 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2343 fprintf (cgraph_dump_file, " -> ");
2344 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2345 fprintf (cgraph_dump_file, "%s%s;",
2346 replace_info->replace_p ? "(replace)":"",
2347 replace_info->ref_p ? "(ref)":"");
2348 }
2349 fprintf (cgraph_dump_file, "\n");
2350 }
2351 if (node->clone.args_to_skip)
2352 {
2353 fprintf (cgraph_dump_file, " args_to_skip: ");
2354 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2355 }
2356 if (node->clone.args_to_skip)
2357 {
2358 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2359 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2360 }
2361 }
ccf4ab6b 2362 cgraph_materialize_clone (node);
a510bd8d 2363 stabilized = false;
ccf4ab6b 2364 }
ccf4ab6b 2365 }
2366 }
2367 }
ee3f5fc0 2368 for (node = cgraph_nodes; node; node = node->next)
2369 if (!node->analyzed && node->callees)
2370 cgraph_node_remove_callees (node);
c596d830 2371 if (cgraph_dump_file)
2372 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
947781ac 2373#ifdef ENABLE_CHECKING
2374 verify_cgraph ();
2375#endif
ccf4ab6b 2376 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2377}
2378
a861fe52 2379#include "gt-cgraphunit.h"