]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
Fix wrong date
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
cd6bca02 1/* Callgraph based interprocedural optimizations.
028a99ef 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
e3a37aef 3 Free Software Foundation, Inc.
ae01b312 4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
ae01b312 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
ae01b312 21
b0cdf642 22/* This module implements main driver of compilation process as well as
cd6bca02 23 few basic interprocedural optimizers.
b0cdf642 24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
b326746d 35 (There is one exception needed for implementing GCC extern inline
36 function.)
b0cdf642 37
1d416bd7 38 - varpool_finalize_variable
b0cdf642 39
7bd28bba 40 This function has same behavior as the above but is used for static
b0cdf642 41 variables.
42
43 - cgraph_finalize_compilation_unit
44
b326746d 45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
b0cdf642 47
6329636b 48 In the the call-graph construction and local function
b0cdf642 49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
b326746d 52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
b0cdf642 54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
b0cdf642 62 - cgraph_mark_needed_node
1d416bd7 63 - varpool_mark_needed_node
b0cdf642 64
b326746d 65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
ccd2f3d1 69 used by C++ frontend to explicitly mark the keyed methods.
b0cdf642 70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
6329636b 80 Analyzing of all functions is deferred
b0cdf642 81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
ca67a72b 91 The intra-procedural information is produced and its existence
b0cdf642 92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
6329636b 106 where reference has been optimized out. */
121f3051 107
acc70efa 108
ae01b312 109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
b5530559 114#include "rtl.h"
acc70efa 115#include "tree-flow.h"
ae01b312 116#include "tree-inline.h"
117#include "langhooks.h"
c6224531 118#include "pointer-set.h"
ae01b312 119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
80a85d8a 125#include "diagnostic.h"
ce084dfc 126#include "tree-pretty-print.h"
127#include "gimple-pretty-print.h"
f79b6507 128#include "timevar.h"
d7c6d889 129#include "params.h"
130#include "fibheap.h"
611e5405 131#include "intl.h"
b69eb0ff 132#include "function.h"
b5d36404 133#include "ipa-prop.h"
75a70cf9 134#include "gimple.h"
135#include "tree-iterator.h"
f1e2a033 136#include "tree-pass.h"
bfec3452 137#include "tree-dump.h"
c1dcd13c 138#include "output.h"
9ed5b1f5 139#include "coverage.h"
c9036234 140#include "plugin.h"
d7c6d889 141
a6868229 142static void cgraph_expand_all_functions (void);
d9d9733a 143static void cgraph_mark_functions_to_output (void);
144static void cgraph_expand_function (struct cgraph_node *);
f788fff2 145static void cgraph_output_pending_asms (void);
bfec3452 146static void cgraph_analyze_function (struct cgraph_node *);
25bb88de 147
ecb08119 148FILE *cgraph_dump_file;
121f3051 149
28454517 150/* Used for vtable lookup in thunk adjusting. */
151static GTY (()) tree vtable_entry_type;
152
2c0b522d 153/* Determine if function DECL is needed. That is, visible to something
154 either outside this translation unit, something magic in the system
6329636b 155 configury. */
2c0b522d 156
7bfefa9d 157bool
158cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
2c0b522d 159{
3f82b628 160 /* If the user told us it is used, then it must be so. */
05806473 161 if (node->local.externally_visible)
162 return true;
163
3f82b628 164 /* ??? If the assembler name is set by hand, it is possible to assemble
165 the name later after finalizing the function and the fact is noticed
166 in assemble_name then. This is arguably a bug. */
167 if (DECL_ASSEMBLER_NAME_SET_P (decl)
168 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
169 return true;
170
55680bef 171 /* With -fkeep-inline-functions we are keeping all inline functions except
172 for extern inline ones. */
173 if (flag_keep_inline_functions
174 && DECL_DECLARED_INLINE_P (decl)
316ef6d8 175 && !DECL_EXTERNAL (decl)
176 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
55680bef 177 return true;
178
2c0b522d 179 /* If we decided it was needed before, but at the time we didn't have
180 the body of the function available, then it's still needed. We have
181 to go back and re-check its dependencies now. */
182 if (node->needed)
183 return true;
184
185 /* Externally visible functions must be output. The exception is
a0c938f0 186 COMDAT functions that must be output only when they are needed.
8baa9d15 187
188 When not optimizing, also output the static functions. (see
95da6220 189 PR24561), but don't do so for always_inline functions, functions
0f9238c0 190 declared inline and nested functions. These were optimized out
d3d410e1 191 in the original implementation and it is unclear whether we want
554f2707 192 to change the behavior here. */
bba7ddf8 193 if (((TREE_PUBLIC (decl)
0f9238c0 194 || (!optimize
195 && !node->local.disregard_inline_limits
d3d410e1 196 && !DECL_DECLARED_INLINE_P (decl)
0f9238c0 197 && !(DECL_CONTEXT (decl)
198 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
59dd4830 199 && !flag_whole_program
cbcf2791 200 && !flag_lto)
62eec3b4 201 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
2c0b522d 202 return true;
203
2c0b522d 204 return false;
205}
206
bdc40eb8 207/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
523c1122 208 functions into callgraph in a way so they look like ordinary reachable
209 functions inserted into callgraph already at construction time. */
210
211bool
212cgraph_process_new_functions (void)
213{
214 bool output = false;
215 tree fndecl;
216 struct cgraph_node *node;
217
0cddb138 218 varpool_analyze_pending_decls ();
523c1122 219 /* Note that this queue may grow as its being processed, as the new
220 functions may generate new ones. */
221 while (cgraph_new_nodes)
222 {
223 node = cgraph_new_nodes;
224 fndecl = node->decl;
225 cgraph_new_nodes = cgraph_new_nodes->next_needed;
226 switch (cgraph_state)
227 {
228 case CGRAPH_STATE_CONSTRUCTION:
229 /* At construction time we just need to finalize function and move
230 it into reachable functions list. */
231
232 node->next_needed = NULL;
233 cgraph_finalize_function (fndecl, false);
234 cgraph_mark_reachable_node (node);
235 output = true;
236 break;
237
238 case CGRAPH_STATE_IPA:
f517b36e 239 case CGRAPH_STATE_IPA_SSA:
523c1122 240 /* When IPA optimization already started, do all essential
241 transformations that has been already performed on the whole
242 cgraph but not on this function. */
243
75a70cf9 244 gimple_register_cfg_hooks ();
523c1122 245 if (!node->analyzed)
246 cgraph_analyze_function (node);
247 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
248 current_function_decl = fndecl;
9c1bff7a 249 compute_inline_parameters (node);
f517b36e 250 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
251 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
252 /* When not optimizing, be sure we run early local passes anyway
253 to expand OMP. */
254 || !optimize)
20099e35 255 execute_pass_list (pass_early_local_passes.pass.sub);
523c1122 256 free_dominance_info (CDI_POST_DOMINATORS);
257 free_dominance_info (CDI_DOMINATORS);
258 pop_cfun ();
259 current_function_decl = NULL;
260 break;
261
262 case CGRAPH_STATE_EXPANSION:
263 /* Functions created during expansion shall be compiled
264 directly. */
09fc9532 265 node->process = 0;
523c1122 266 cgraph_expand_function (node);
267 break;
268
269 default:
270 gcc_unreachable ();
271 break;
272 }
50828ed8 273 cgraph_call_function_insertion_hooks (node);
0cddb138 274 varpool_analyze_pending_decls ();
523c1122 275 }
276 return output;
277}
278
9b8fb23a 279/* As an GCC extension we allow redefinition of the function. The
280 semantics when both copies of bodies differ is not well defined.
281 We replace the old body with new body so in unit at a time mode
282 we always use new body, while in normal mode we may end up with
283 old body inlined into some functions and new body expanded and
284 inlined in others.
285
286 ??? It may make more sense to use one body for inlining and other
287 body for expanding the function but this is difficult to do. */
288
289static void
290cgraph_reset_node (struct cgraph_node *node)
291{
09fc9532 292 /* If node->process is set, then we have already begun whole-unit analysis.
6329636b 293 This is *not* testing for whether we've already emitted the function.
294 That case can be sort-of legitimately seen with real function redefinition
295 errors. I would argue that the front end should never present us with
296 such a case, but don't enforce that for now. */
09fc9532 297 gcc_assert (!node->process);
9b8fb23a 298
299 /* Reset our data structures so we can analyze the function again. */
300 memset (&node->local, 0, sizeof (node->local));
301 memset (&node->global, 0, sizeof (node->global));
302 memset (&node->rtl, 0, sizeof (node->rtl));
303 node->analyzed = false;
304 node->local.redefined_extern_inline = true;
305 node->local.finalized = false;
306
9b8fb23a 307 cgraph_node_remove_callees (node);
308
309 /* We may need to re-queue the node for assembling in case
46beef9a 310 we already proceeded it and ignored as not needed or got
311 a re-declaration in IMA mode. */
312 if (node->reachable)
9b8fb23a 313 {
314 struct cgraph_node *n;
315
316 for (n = cgraph_nodes_queue; n; n = n->next_needed)
317 if (n == node)
318 break;
319 if (!n)
320 node->reachable = 0;
321 }
322}
c08871a9 323
1e8e9920 324static void
325cgraph_lower_function (struct cgraph_node *node)
326{
327 if (node->lowered)
328 return;
bfec3452 329
330 if (node->nested)
331 lower_nested_functions (node->decl);
332 gcc_assert (!node->nested);
333
1e8e9920 334 tree_lowering_passes (node->decl);
335 node->lowered = true;
336}
337
28df663b 338/* DECL has been parsed. Take it, queue it, compile it at the whim of the
339 logic in effect. If NESTED is true, then our caller cannot stand to have
340 the garbage collector run at the moment. We would need to either create
341 a new GC context, or just not compile right now. */
ae01b312 342
343void
28df663b 344cgraph_finalize_function (tree decl, bool nested)
ae01b312 345{
346 struct cgraph_node *node = cgraph_node (decl);
347
c08871a9 348 if (node->local.finalized)
9b8fb23a 349 cgraph_reset_node (node);
28df663b 350
167b550b 351 node->pid = cgraph_max_pid ++;
c08871a9 352 notice_global_symbol (decl);
79bb87b4 353 node->local.finalized = true;
e27482aa 354 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
22c748a1 355 node->finalized_by_frontend = true;
ae01b312 356
7bfefa9d 357 if (cgraph_decide_is_function_needed (node, decl))
2c0b522d 358 cgraph_mark_needed_node (node);
359
ecda6e51 360 /* Since we reclaim unreachable nodes at the end of every language
3f82b628 361 level unit, we need to be conservative about possible entry points
362 there. */
1e3aebec 363 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
364 || DECL_STATIC_CONSTRUCTOR (decl)
d050bafd 365 || DECL_STATIC_DESTRUCTOR (decl)
366 /* COMDAT virtual functions may be referenced by vtable from
367 other compilatoin unit. Still we want to devirtualize calls
368 to those so we need to analyze them.
369 FIXME: We should introduce may edges for this purpose and update
370 their handling in unreachable function removal and inliner too. */
371 || (DECL_VIRTUAL_P (decl) && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
3f82b628 372 cgraph_mark_reachable_node (node);
373
2c0b522d 374 /* If we've not yet emitted decl, tell the debug info about it. */
28df663b 375 if (!TREE_ASM_WRITTEN (decl))
2c0b522d 376 (*debug_hooks->deferred_inline_function) (decl);
4e8871a0 377
b69eb0ff 378 /* Possibly warn about unused parameters. */
379 if (warn_unused_parameter)
380 do_warn_unused_parameter (decl);
6329636b 381
382 if (!nested)
383 ggc_collect ();
ae01b312 384}
385
0da03d11 386/* C99 extern inline keywords allow changing of declaration after function
387 has been finalized. We need to re-decide if we want to mark the function as
388 needed then. */
389
390void
391cgraph_mark_if_needed (tree decl)
392{
393 struct cgraph_node *node = cgraph_node (decl);
7bfefa9d 394 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
0da03d11 395 cgraph_mark_needed_node (node);
396}
397
ccf4ab6b 398/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
399static bool
400clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
401{
402 while (node != node2 && node2)
403 node2 = node2->clone_of;
404 return node2 != NULL;
405}
406
1a036a3b 407/* Verify edge E count and frequency. */
408
409static bool
410verify_edge_count_and_frequency (struct cgraph_edge *e)
411{
412 bool error_found = false;
413 if (e->count < 0)
414 {
415 error ("caller edge count is negative");
416 error_found = true;
417 }
418 if (e->frequency < 0)
419 {
420 error ("caller edge frequency is negative");
421 error_found = true;
422 }
423 if (e->frequency > CGRAPH_FREQ_MAX)
424 {
425 error ("caller edge frequency is too large");
426 error_found = true;
427 }
428 if (gimple_has_body_p (e->caller->decl)
429 && !e->caller->global.inlined_to
430 && (e->frequency
431 != compute_call_stmt_bb_frequency (e->caller->decl,
432 gimple_bb (e->call_stmt))))
433 {
434 error ("caller edge frequency %i does not match BB freqency %i",
435 e->frequency,
436 compute_call_stmt_bb_frequency (e->caller->decl,
437 gimple_bb (e->call_stmt)));
438 error_found = true;
439 }
440 return error_found;
441}
442
b0cdf642 443/* Verify cgraph nodes of given cgraph node. */
4b987fac 444DEBUG_FUNCTION void
b0cdf642 445verify_cgraph_node (struct cgraph_node *node)
446{
447 struct cgraph_edge *e;
e27482aa 448 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
117ef3d7 449 struct function *saved_cfun = cfun;
e27482aa 450 basic_block this_block;
75a70cf9 451 gimple_stmt_iterator gsi;
9bfec7c2 452 bool error_found = false;
b0cdf642 453
852f689e 454 if (seen_error ())
bd09cd3e 455 return;
456
b0cdf642 457 timevar_push (TV_CGRAPH_VERIFY);
117ef3d7 458 /* debug_generic_stmt needs correct cfun */
459 set_cfun (this_cfun);
b0cdf642 460 for (e = node->callees; e; e = e->next_callee)
461 if (e->aux)
462 {
0a81f5a0 463 error ("aux field set for edge %s->%s",
abd3e6b5 464 identifier_to_locale (cgraph_node_name (e->caller)),
465 identifier_to_locale (cgraph_node_name (e->callee)));
b0cdf642 466 error_found = true;
467 }
a2cb9b3b 468 if (node->count < 0)
469 {
bf776685 470 error ("execution count is negative");
a2cb9b3b 471 error_found = true;
472 }
59dd4830 473 if (node->global.inlined_to && node->local.externally_visible)
474 {
bf776685 475 error ("externally visible inline clone");
59dd4830 476 error_found = true;
477 }
478 if (node->global.inlined_to && node->address_taken)
479 {
bf776685 480 error ("inline clone with address taken");
59dd4830 481 error_found = true;
482 }
483 if (node->global.inlined_to && node->needed)
484 {
bf776685 485 error ("inline clone is needed");
59dd4830 486 error_found = true;
487 }
799c8711 488 for (e = node->indirect_calls; e; e = e->next_callee)
489 {
490 if (e->aux)
491 {
492 error ("aux field set for indirect edge from %s",
493 identifier_to_locale (cgraph_node_name (e->caller)));
494 error_found = true;
495 }
496 if (!e->indirect_unknown_callee
497 || !e->indirect_info)
498 {
499 error ("An indirect edge from %s is not marked as indirect or has "
500 "associated indirect_info, the corresponding statement is: ",
501 identifier_to_locale (cgraph_node_name (e->caller)));
502 debug_gimple_stmt (e->call_stmt);
503 error_found = true;
504 }
505 }
b0cdf642 506 for (e = node->callers; e; e = e->next_caller)
507 {
1a036a3b 508 if (verify_edge_count_and_frequency (e))
509 error_found = true;
b0cdf642 510 if (!e->inline_failed)
511 {
512 if (node->global.inlined_to
513 != (e->caller->global.inlined_to
514 ? e->caller->global.inlined_to : e->caller))
515 {
0a81f5a0 516 error ("inlined_to pointer is wrong");
b0cdf642 517 error_found = true;
518 }
519 if (node->callers->next_caller)
520 {
0a81f5a0 521 error ("multiple inline callers");
b0cdf642 522 error_found = true;
523 }
524 }
525 else
526 if (node->global.inlined_to)
527 {
0a81f5a0 528 error ("inlined_to pointer set for noninline callers");
b0cdf642 529 error_found = true;
530 }
531 }
1a036a3b 532 for (e = node->indirect_calls; e; e = e->next_callee)
533 if (verify_edge_count_and_frequency (e))
534 error_found = true;
b0cdf642 535 if (!node->callers && node->global.inlined_to)
536 {
5cd75817 537 error ("inlined_to pointer is set but no predecessors found");
b0cdf642 538 error_found = true;
539 }
540 if (node->global.inlined_to == node)
541 {
0a81f5a0 542 error ("inlined_to pointer refers to itself");
b0cdf642 543 error_found = true;
544 }
545
0f6439b9 546 if (!cgraph_node (node->decl))
b0cdf642 547 {
0f6439b9 548 error ("node not found in cgraph_hash");
b0cdf642 549 error_found = true;
550 }
a0c938f0 551
ccf4ab6b 552 if (node->clone_of)
553 {
554 struct cgraph_node *n;
555 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
556 if (n == node)
557 break;
558 if (!n)
559 {
560 error ("node has wrong clone_of");
561 error_found = true;
562 }
563 }
564 if (node->clones)
565 {
566 struct cgraph_node *n;
567 for (n = node->clones; n; n = n->next_sibling_clone)
568 if (n->clone_of != node)
569 break;
570 if (n)
571 {
572 error ("node has wrong clone list");
573 error_found = true;
574 }
575 }
576 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
577 {
578 error ("node is in clone list but it is not clone");
579 error_found = true;
580 }
581 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
582 {
583 error ("node has wrong prev_clone pointer");
584 error_found = true;
585 }
586 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
587 {
588 error ("double linked list of clones corrupted");
589 error_found = true;
590 }
c524ac5d 591 if (node->same_comdat_group)
592 {
593 struct cgraph_node *n = node->same_comdat_group;
594
595 if (!DECL_ONE_ONLY (node->decl))
596 {
597 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
598 error_found = true;
599 }
600 if (n == node)
601 {
602 error ("node is alone in a comdat group");
603 error_found = true;
604 }
605 do
606 {
607 if (!n->same_comdat_group)
608 {
609 error ("same_comdat_group is not a circular list");
610 error_found = true;
611 break;
612 }
613 n = n->same_comdat_group;
614 }
615 while (n != node);
616 }
ccf4ab6b 617
618 if (node->analyzed && gimple_has_body_p (node->decl)
75a70cf9 619 && !TREE_ASM_WRITTEN (node->decl)
7bfefa9d 620 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
621 && !flag_wpa)
b0cdf642 622 {
e27482aa 623 if (this_cfun->cfg)
624 {
625 /* The nodes we're interested in are never shared, so walk
626 the tree ignoring duplicates. */
e7c352d1 627 struct pointer_set_t *visited_nodes = pointer_set_create ();
e27482aa 628 /* Reach the trees by walking over the CFG, and note the
629 enclosing basic-blocks in the call edges. */
630 FOR_EACH_BB_FN (this_block, this_cfun)
75a70cf9 631 for (gsi = gsi_start_bb (this_block);
632 !gsi_end_p (gsi);
633 gsi_next (&gsi))
9bfec7c2 634 {
75a70cf9 635 gimple stmt = gsi_stmt (gsi);
799c8711 636 if (is_gimple_call (stmt))
9bfec7c2 637 {
638 struct cgraph_edge *e = cgraph_edge (node, stmt);
799c8711 639 tree decl = gimple_call_fndecl (stmt);
9bfec7c2 640 if (e)
641 {
642 if (e->aux)
643 {
0a81f5a0 644 error ("shared call_stmt:");
75a70cf9 645 debug_gimple_stmt (stmt);
9bfec7c2 646 error_found = true;
647 }
799c8711 648 if (!e->indirect_unknown_callee)
28454517 649 {
799c8711 650 if (e->callee->same_body_alias)
651 {
652 error ("edge points to same body alias:");
653 debug_tree (e->callee->decl);
654 error_found = true;
655 }
e748b31d 656 else if (!e->callee->global.inlined_to
799c8711 657 && decl
e748b31d 658 && cgraph_get_node (decl)
659 && (e->callee->former_clone_of
660 != cgraph_get_node (decl)->decl)
799c8711 661 && !clone_of_p (cgraph_node (decl),
662 e->callee))
663 {
664 error ("edge points to wrong declaration:");
665 debug_tree (e->callee->decl);
666 fprintf (stderr," Instead of:");
667 debug_tree (decl);
668 error_found = true;
669 }
28454517 670 }
799c8711 671 else if (decl)
9bfec7c2 672 {
799c8711 673 error ("an indirect edge with unknown callee "
674 "corresponding to a call_stmt with "
675 "a known declaration:");
ee3f5fc0 676 error_found = true;
799c8711 677 debug_gimple_stmt (e->call_stmt);
9bfec7c2 678 }
679 e->aux = (void *)1;
680 }
799c8711 681 else if (decl)
9bfec7c2 682 {
0a81f5a0 683 error ("missing callgraph edge for call stmt:");
75a70cf9 684 debug_gimple_stmt (stmt);
9bfec7c2 685 error_found = true;
686 }
687 }
688 }
e27482aa 689 pointer_set_destroy (visited_nodes);
e27482aa 690 }
691 else
692 /* No CFG available?! */
693 gcc_unreachable ();
694
b0cdf642 695 for (e = node->callees; e; e = e->next_callee)
696 {
799c8711 697 if (!e->aux)
b0cdf642 698 {
0a81f5a0 699 error ("edge %s->%s has no corresponding call_stmt",
abd3e6b5 700 identifier_to_locale (cgraph_node_name (e->caller)),
701 identifier_to_locale (cgraph_node_name (e->callee)));
75a70cf9 702 debug_gimple_stmt (e->call_stmt);
b0cdf642 703 error_found = true;
704 }
705 e->aux = 0;
706 }
799c8711 707 for (e = node->indirect_calls; e; e = e->next_callee)
708 {
709 if (!e->aux)
710 {
711 error ("an indirect edge from %s has no corresponding call_stmt",
712 identifier_to_locale (cgraph_node_name (e->caller)));
713 debug_gimple_stmt (e->call_stmt);
714 error_found = true;
715 }
716 e->aux = 0;
717 }
b0cdf642 718 }
719 if (error_found)
720 {
721 dump_cgraph_node (stderr, node);
0a81f5a0 722 internal_error ("verify_cgraph_node failed");
b0cdf642 723 }
117ef3d7 724 set_cfun (saved_cfun);
b0cdf642 725 timevar_pop (TV_CGRAPH_VERIFY);
726}
727
728/* Verify whole cgraph structure. */
4b987fac 729DEBUG_FUNCTION void
b0cdf642 730verify_cgraph (void)
731{
732 struct cgraph_node *node;
733
852f689e 734 if (seen_error ())
8ec2a798 735 return;
736
b0cdf642 737 for (node = cgraph_nodes; node; node = node->next)
738 verify_cgraph_node (node);
739}
740
56af936e 741/* Output all asm statements we have stored up to be output. */
742
743static void
744cgraph_output_pending_asms (void)
745{
746 struct cgraph_asm_node *can;
747
852f689e 748 if (seen_error ())
56af936e 749 return;
750
751 for (can = cgraph_asm_nodes; can; can = can->next)
752 assemble_asm (can->asm_str);
753 cgraph_asm_nodes = NULL;
754}
755
0785e435 756/* Analyze the function scheduled to be output. */
bfec3452 757static void
0785e435 758cgraph_analyze_function (struct cgraph_node *node)
759{
bfec3452 760 tree save = current_function_decl;
0785e435 761 tree decl = node->decl;
762
ec1e35b2 763 current_function_decl = decl;
e27482aa 764 push_cfun (DECL_STRUCT_FUNCTION (decl));
bfec3452 765
6816d0c4 766 assign_assembler_name_if_neeeded (node->decl);
767
bfec3452 768 /* Make sure to gimplify bodies only once. During analyzing a
769 function we lower it, which will require gimplified nested
770 functions, so we can end up here with an already gimplified
771 body. */
772 if (!gimple_body (decl))
773 gimplify_function_tree (decl);
774 dump_function (TDI_generic, decl);
775
e27482aa 776 cgraph_lower_function (node);
6e8d6e86 777 node->analyzed = true;
0785e435 778
e27482aa 779 pop_cfun ();
bfec3452 780 current_function_decl = save;
0785e435 781}
782
05806473 783/* Look for externally_visible and used attributes and mark cgraph nodes
784 accordingly.
785
786 We cannot mark the nodes at the point the attributes are processed (in
787 handle_*_attribute) because the copy of the declarations available at that
788 point may not be canonical. For example, in:
789
790 void f();
791 void f() __attribute__((used));
792
793 the declaration we see in handle_used_attribute will be the second
794 declaration -- but the front end will subsequently merge that declaration
795 with the original declaration and discard the second declaration.
796
797 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
798
799 void f() {}
800 void f() __attribute__((externally_visible));
801
802 is valid.
803
804 So, we walk the nodes at the end of the translation unit, applying the
805 attributes at that point. */
806
807static void
808process_function_and_variable_attributes (struct cgraph_node *first,
1d416bd7 809 struct varpool_node *first_var)
05806473 810{
811 struct cgraph_node *node;
1d416bd7 812 struct varpool_node *vnode;
05806473 813
814 for (node = cgraph_nodes; node != first; node = node->next)
815 {
816 tree decl = node->decl;
83a23b05 817 if (DECL_PRESERVE_P (decl))
0b49f8f8 818 cgraph_mark_needed_node (node);
62433d51 819 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
820 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
821 && TREE_PUBLIC (node->decl))
822 {
823 if (node->local.finalized)
824 cgraph_mark_needed_node (node);
825 }
826 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
05806473 827 {
ba12ea31 828 if (! TREE_PUBLIC (node->decl))
712d2297 829 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
830 "%<externally_visible%>"
831 " attribute have effect only on public objects");
59dd4830 832 else if (node->local.finalized)
833 cgraph_mark_needed_node (node);
05806473 834 }
835 }
1d416bd7 836 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
05806473 837 {
838 tree decl = vnode->decl;
83a23b05 839 if (DECL_PRESERVE_P (decl))
05806473 840 {
22671757 841 vnode->force_output = true;
05806473 842 if (vnode->finalized)
1d416bd7 843 varpool_mark_needed_node (vnode);
05806473 844 }
62433d51 845 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
846 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
0d9d5d69 847 && TREE_PUBLIC (vnode->decl))
62433d51 848 {
849 if (vnode->finalized)
850 varpool_mark_needed_node (vnode);
851 }
852 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
05806473 853 {
ba12ea31 854 if (! TREE_PUBLIC (vnode->decl))
712d2297 855 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
856 "%<externally_visible%>"
857 " attribute have effect only on public objects");
59dd4830 858 else if (vnode->finalized)
859 varpool_mark_needed_node (vnode);
05806473 860 }
861 }
862}
863
aeeb194b 864/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
865 each reachable functions) and build cgraph.
866 The function can be called multiple times after inserting new nodes
0d424440 867 into beginning of queue. Just the new part of queue is re-scanned then. */
ae01b312 868
aeeb194b 869static void
870cgraph_analyze_functions (void)
ae01b312 871{
c1dcd13c 872 /* Keep track of already processed nodes when called multiple times for
06b27565 873 intermodule optimization. */
c1dcd13c 874 static struct cgraph_node *first_analyzed;
c17d0de1 875 struct cgraph_node *first_processed = first_analyzed;
1d416bd7 876 static struct varpool_node *first_analyzed_var;
aeeb194b 877 struct cgraph_node *node, *next;
ae01b312 878
f1c35659 879 bitmap_obstack_initialize (NULL);
c17d0de1 880 process_function_and_variable_attributes (first_processed,
881 first_analyzed_var);
882 first_processed = cgraph_nodes;
1d416bd7 883 first_analyzed_var = varpool_nodes;
884 varpool_analyze_pending_decls ();
f79b6507 885 if (cgraph_dump_file)
ae01b312 886 {
e4200070 887 fprintf (cgraph_dump_file, "Initial entry points:");
c1dcd13c 888 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 889 if (node->needed)
f79b6507 890 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
891 fprintf (cgraph_dump_file, "\n");
ae01b312 892 }
aeeb194b 893 cgraph_process_new_functions ();
ae01b312 894
e6d2b2d8 895 /* Propagate reachability flag and lower representation of all reachable
896 functions. In the future, lowering will introduce new functions and
897 new entry points on the way (by template instantiation and virtual
898 method table generation for instance). */
3d7bfc56 899 while (cgraph_nodes_queue)
ae01b312 900 {
0785e435 901 struct cgraph_edge *edge;
3d7bfc56 902 tree decl = cgraph_nodes_queue->decl;
903
904 node = cgraph_nodes_queue;
d87976fb 905 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 906 node->next_needed = NULL;
ae01b312 907
638531ad 908 /* ??? It is possible to create extern inline function and later using
bbd5cba2 909 weak alias attribute to kill its body. See
638531ad 910 gcc.c-torture/compile/20011119-1.c */
75a70cf9 911 if (!DECL_STRUCT_FUNCTION (decl))
9b8fb23a 912 {
913 cgraph_reset_node (node);
914 continue;
915 }
638531ad 916
7bfefa9d 917 if (!node->analyzed)
918 cgraph_analyze_function (node);
2c0b522d 919
ae01b312 920 for (edge = node->callees; edge; edge = edge->next_callee)
0785e435 921 if (!edge->callee->reachable)
2c0b522d 922 cgraph_mark_reachable_node (edge->callee);
923
61c2c7b1 924 if (node->same_comdat_group)
925 {
926 for (next = node->same_comdat_group;
927 next != node;
928 next = next->same_comdat_group)
929 cgraph_mark_reachable_node (next);
930 }
931
d544ceff 932 /* If decl is a clone of an abstract function, mark that abstract
933 function so that we don't release its body. The DECL_INITIAL() of that
934 abstract function declaration will be later needed to output debug info. */
935 if (DECL_ABSTRACT_ORIGIN (decl))
936 {
937 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
938 origin_node->abstract_and_needed = true;
939 }
940
c17d0de1 941 /* We finalize local static variables during constructing callgraph
942 edges. Process their attributes too. */
943 process_function_and_variable_attributes (first_processed,
944 first_analyzed_var);
945 first_processed = cgraph_nodes;
1d416bd7 946 first_analyzed_var = varpool_nodes;
947 varpool_analyze_pending_decls ();
aeeb194b 948 cgraph_process_new_functions ();
ae01b312 949 }
2c0b522d 950
aa5e06c7 951 /* Collect entry points to the unit. */
f79b6507 952 if (cgraph_dump_file)
3d7bfc56 953 {
e4200070 954 fprintf (cgraph_dump_file, "Unit entry points:");
c1dcd13c 955 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 956 if (node->needed)
f79b6507 957 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
e4200070 958 fprintf (cgraph_dump_file, "\n\nInitial ");
0785e435 959 dump_cgraph (cgraph_dump_file);
7410370b 960 dump_varpool (cgraph_dump_file);
3d7bfc56 961 }
e6d2b2d8 962
f79b6507 963 if (cgraph_dump_file)
964 fprintf (cgraph_dump_file, "\nReclaiming functions:");
ae01b312 965
f4ec5ce1 966 for (node = cgraph_nodes; node != first_analyzed; node = next)
ae01b312 967 {
968 tree decl = node->decl;
f4ec5ce1 969 next = node->next;
ae01b312 970
1a1a827a 971 if (node->local.finalized && !gimple_has_body_p (decl))
a0c938f0 972 cgraph_reset_node (node);
9b8fb23a 973
1a1a827a 974 if (!node->reachable && gimple_has_body_p (decl))
ae01b312 975 {
f79b6507 976 if (cgraph_dump_file)
977 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
b0cdf642 978 cgraph_remove_node (node);
9b8fb23a 979 continue;
ae01b312 980 }
bc5cab3b 981 else
982 node->next_needed = NULL;
1a1a827a 983 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
9b8fb23a 984 gcc_assert (node->analyzed == node->local.finalized);
ae01b312 985 }
f79b6507 986 if (cgraph_dump_file)
e4200070 987 {
988 fprintf (cgraph_dump_file, "\n\nReclaimed ");
989 dump_cgraph (cgraph_dump_file);
7410370b 990 dump_varpool (cgraph_dump_file);
e4200070 991 }
f1c35659 992 bitmap_obstack_release (NULL);
c1dcd13c 993 first_analyzed = cgraph_nodes;
ae01b312 994 ggc_collect ();
aeeb194b 995}
996
8f69fd82 997
aeeb194b 998/* Analyze the whole compilation unit once it is parsed completely. */
999
1000void
1001cgraph_finalize_compilation_unit (void)
1002{
9929334e 1003 timevar_push (TV_CGRAPH);
1004
bfec3452 1005 /* Do not skip analyzing the functions if there were errors, we
1006 miss diagnostics for following functions otherwise. */
aeeb194b 1007
8f69fd82 1008 /* Emit size functions we didn't inline. */
4189e677 1009 finalize_size_functions ();
8f69fd82 1010
9929334e 1011 /* Mark alias targets necessary and emit diagnostics. */
1012 finish_aliases_1 ();
1013
aeeb194b 1014 if (!quiet_flag)
1015 {
1016 fprintf (stderr, "\nAnalyzing compilation unit\n");
1017 fflush (stderr);
1018 }
1019
9929334e 1020 /* Gimplify and lower all functions, compute reachability and
1021 remove unreachable nodes. */
1022 cgraph_analyze_functions ();
1023
8f69fd82 1024 /* Mark alias targets necessary and emit diagnostics. */
1025 finish_aliases_1 ();
1026
9929334e 1027 /* Gimplify and lower thunks. */
aeeb194b 1028 cgraph_analyze_functions ();
bfec3452 1029
9929334e 1030 /* Finally drive the pass manager. */
bfec3452 1031 cgraph_optimize ();
9929334e 1032
1033 timevar_pop (TV_CGRAPH);
ae01b312 1034}
9ed5b1f5 1035
1036
ae01b312 1037/* Figure out what functions we want to assemble. */
1038
1039static void
d9d9733a 1040cgraph_mark_functions_to_output (void)
ae01b312 1041{
1042 struct cgraph_node *node;
61c2c7b1 1043#ifdef ENABLE_CHECKING
1044 bool check_same_comdat_groups = false;
1045
1046 for (node = cgraph_nodes; node; node = node->next)
1047 gcc_assert (!node->process);
1048#endif
ae01b312 1049
ae01b312 1050 for (node = cgraph_nodes; node; node = node->next)
1051 {
1052 tree decl = node->decl;
d7c6d889 1053 struct cgraph_edge *e;
a0c938f0 1054
61c2c7b1 1055 gcc_assert (!node->process || node->same_comdat_group);
1056 if (node->process)
1057 continue;
d7c6d889 1058
1059 for (e = node->callers; e; e = e->next_caller)
611e5405 1060 if (e->inline_failed)
d7c6d889 1061 break;
ae01b312 1062
e6d2b2d8 1063 /* We need to output all local functions that are used and not
1064 always inlined, as well as those that are reachable from
1065 outside the current compilation unit. */
1a1a827a 1066 if (node->analyzed
b0cdf642 1067 && !node->global.inlined_to
1e3aebec 1068 && (!cgraph_only_called_directly_p (node)
d7c6d889 1069 || (e && node->reachable))
4ee9c684 1070 && !TREE_ASM_WRITTEN (decl)
ae01b312 1071 && !DECL_EXTERNAL (decl))
61c2c7b1 1072 {
1073 node->process = 1;
1074 if (node->same_comdat_group)
1075 {
1076 struct cgraph_node *next;
1077 for (next = node->same_comdat_group;
1078 next != node;
1079 next = next->same_comdat_group)
1080 next->process = 1;
1081 }
1082 }
1083 else if (node->same_comdat_group)
1084 {
1085#ifdef ENABLE_CHECKING
1086 check_same_comdat_groups = true;
1087#endif
1088 }
cc636d56 1089 else
9cee7c3f 1090 {
1091 /* We should've reclaimed all functions that are not needed. */
1092#ifdef ENABLE_CHECKING
75a70cf9 1093 if (!node->global.inlined_to
1a1a827a 1094 && gimple_has_body_p (decl)
08843223 1095 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1096 are inside partition, we can end up not removing the body since we no longer
1097 have analyzed node pointing to it. */
1098 && !node->in_other_partition
9cee7c3f 1099 && !DECL_EXTERNAL (decl))
1100 {
1101 dump_cgraph_node (stderr, node);
1102 internal_error ("failed to reclaim unneeded function");
1103 }
1104#endif
75a70cf9 1105 gcc_assert (node->global.inlined_to
1a1a827a 1106 || !gimple_has_body_p (decl)
08843223 1107 || node->in_other_partition
9cee7c3f 1108 || DECL_EXTERNAL (decl));
1109
1110 }
a0c938f0 1111
961e3b13 1112 }
61c2c7b1 1113#ifdef ENABLE_CHECKING
1114 if (check_same_comdat_groups)
1115 for (node = cgraph_nodes; node; node = node->next)
1116 if (node->same_comdat_group && !node->process)
1117 {
1118 tree decl = node->decl;
1119 if (!node->global.inlined_to
1120 && gimple_has_body_p (decl)
08843223 1121 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1122 are inside partition, we can end up not removing the body since we no longer
1123 have analyzed node pointing to it. */
1124 && !node->in_other_partition
61c2c7b1 1125 && !DECL_EXTERNAL (decl))
1126 {
1127 dump_cgraph_node (stderr, node);
1128 internal_error ("failed to reclaim unneeded function");
1129 }
1130 }
1131#endif
961e3b13 1132}
1133
28454517 1134/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1135 in lowered gimple form.
1136
1137 Set current_function_decl and cfun to newly constructed empty function body.
1138 return basic block in the function body. */
1139
1140static basic_block
1141init_lowered_empty_function (tree decl)
1142{
1143 basic_block bb;
1144
1145 current_function_decl = decl;
1146 allocate_struct_function (decl, false);
1147 gimple_register_cfg_hooks ();
1148 init_empty_tree_cfg ();
1149 init_tree_ssa (cfun);
1150 init_ssa_operands ();
1151 cfun->gimple_df->in_ssa_p = true;
1152 DECL_INITIAL (decl) = make_node (BLOCK);
1153
1154 DECL_SAVED_TREE (decl) = error_mark_node;
1155 cfun->curr_properties |=
1156 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1157 PROP_ssa);
1158
1159 /* Create BB for body of the function and connect it properly. */
1160 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1161 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1162 make_edge (bb, EXIT_BLOCK_PTR, 0);
1163
1164 return bb;
1165}
1166
1167/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1168 offset indicated by VIRTUAL_OFFSET, if that is
1169 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1170 zero for a result adjusting thunk. */
1171
1172static tree
1173thunk_adjust (gimple_stmt_iterator * bsi,
1174 tree ptr, bool this_adjusting,
1175 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1176{
1177 gimple stmt;
1178 tree ret;
1179
55d6cb23 1180 if (this_adjusting
1181 && fixed_offset != 0)
28454517 1182 {
1183 stmt = gimple_build_assign (ptr,
1184 fold_build2_loc (input_location,
1185 POINTER_PLUS_EXPR,
1186 TREE_TYPE (ptr), ptr,
1187 size_int (fixed_offset)));
1188 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1189 }
1190
1191 /* If there's a virtual offset, look up that value in the vtable and
1192 adjust the pointer again. */
1193 if (virtual_offset)
1194 {
1195 tree vtabletmp;
1196 tree vtabletmp2;
1197 tree vtabletmp3;
1198 tree offsettmp;
1199
1200 if (!vtable_entry_type)
1201 {
1202 tree vfunc_type = make_node (FUNCTION_TYPE);
1203 TREE_TYPE (vfunc_type) = integer_type_node;
1204 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1205 layout_type (vfunc_type);
1206
1207 vtable_entry_type = build_pointer_type (vfunc_type);
1208 }
1209
1210 vtabletmp =
1211 create_tmp_var (build_pointer_type
1212 (build_pointer_type (vtable_entry_type)), "vptr");
1213
1214 /* The vptr is always at offset zero in the object. */
1215 stmt = gimple_build_assign (vtabletmp,
1216 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1217 ptr));
1218 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1219 mark_symbols_for_renaming (stmt);
1220 find_referenced_vars_in (stmt);
1221
1222 /* Form the vtable address. */
1223 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1224 "vtableaddr");
1225 stmt = gimple_build_assign (vtabletmp2,
182cf5a9 1226 build_simple_mem_ref (vtabletmp));
28454517 1227 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1228 mark_symbols_for_renaming (stmt);
1229 find_referenced_vars_in (stmt);
1230
1231 /* Find the entry with the vcall offset. */
1232 stmt = gimple_build_assign (vtabletmp2,
1233 fold_build2_loc (input_location,
1234 POINTER_PLUS_EXPR,
1235 TREE_TYPE (vtabletmp2),
1236 vtabletmp2,
1237 fold_convert (sizetype,
1238 virtual_offset)));
1239 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1240
1241 /* Get the offset itself. */
1242 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1243 "vcalloffset");
1244 stmt = gimple_build_assign (vtabletmp3,
182cf5a9 1245 build_simple_mem_ref (vtabletmp2));
28454517 1246 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1247 mark_symbols_for_renaming (stmt);
1248 find_referenced_vars_in (stmt);
1249
1250 /* Cast to sizetype. */
1251 offsettmp = create_tmp_var (sizetype, "offset");
1252 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1253 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1254 mark_symbols_for_renaming (stmt);
1255 find_referenced_vars_in (stmt);
1256
1257 /* Adjust the `this' pointer. */
1258 ptr = fold_build2_loc (input_location,
1259 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1260 offsettmp);
1261 }
1262
55d6cb23 1263 if (!this_adjusting
1264 && fixed_offset != 0)
28454517 1265 /* Adjust the pointer by the constant. */
1266 {
1267 tree ptrtmp;
1268
1269 if (TREE_CODE (ptr) == VAR_DECL)
1270 ptrtmp = ptr;
1271 else
1272 {
1273 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1274 stmt = gimple_build_assign (ptrtmp, ptr);
1275 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1276 mark_symbols_for_renaming (stmt);
1277 find_referenced_vars_in (stmt);
1278 }
1279 ptr = fold_build2_loc (input_location,
1280 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1281 size_int (fixed_offset));
1282 }
1283
1284 /* Emit the statement and gimplify the adjustment expression. */
1285 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1286 stmt = gimple_build_assign (ret, ptr);
1287 mark_symbols_for_renaming (stmt);
1288 find_referenced_vars_in (stmt);
1289 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1290
1291 return ret;
1292}
1293
1294/* Produce assembler for thunk NODE. */
1295
1296static void
1297assemble_thunk (struct cgraph_node *node)
1298{
1299 bool this_adjusting = node->thunk.this_adjusting;
1300 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1301 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1302 tree virtual_offset = NULL;
1303 tree alias = node->thunk.alias;
1304 tree thunk_fndecl = node->decl;
1305 tree a = DECL_ARGUMENTS (thunk_fndecl);
1306
1307 current_function_decl = thunk_fndecl;
1308
1309 if (this_adjusting
1310 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1311 virtual_value, alias))
1312 {
1313 const char *fnname;
1314 tree fn_block;
1315
1316 DECL_RESULT (thunk_fndecl)
1317 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1318 RESULT_DECL, 0, integer_type_node);
22ea3b47 1319 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
28454517 1320
1321 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1322 create one. */
1323 fn_block = make_node (BLOCK);
1324 BLOCK_VARS (fn_block) = a;
1325 DECL_INITIAL (thunk_fndecl) = fn_block;
1326 init_function_start (thunk_fndecl);
1327 cfun->is_thunk = 1;
1328 assemble_start_function (thunk_fndecl, fnname);
1329
1330 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1331 fixed_offset, virtual_value, alias);
1332
1333 assemble_end_function (thunk_fndecl, fnname);
1334 init_insn_lengths ();
1335 free_after_compilation (cfun);
1336 set_cfun (NULL);
1337 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1338 }
1339 else
1340 {
1341 tree restype;
1342 basic_block bb, then_bb, else_bb, return_bb;
1343 gimple_stmt_iterator bsi;
1344 int nargs = 0;
1345 tree arg;
1346 int i;
1347 tree resdecl;
1348 tree restmp = NULL;
1349 VEC(tree, heap) *vargs;
1350
1351 gimple call;
1352 gimple ret;
1353
1354 DECL_IGNORED_P (thunk_fndecl) = 1;
1355 bitmap_obstack_initialize (NULL);
1356
1357 if (node->thunk.virtual_offset_p)
1358 virtual_offset = size_int (virtual_value);
1359
1360 /* Build the return declaration for the function. */
1361 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1362 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1363 {
1364 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1365 DECL_ARTIFICIAL (resdecl) = 1;
1366 DECL_IGNORED_P (resdecl) = 1;
1367 DECL_RESULT (thunk_fndecl) = resdecl;
1368 }
1369 else
1370 resdecl = DECL_RESULT (thunk_fndecl);
1371
1372 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1373
1374 bsi = gsi_start_bb (bb);
1375
1376 /* Build call to the function being thunked. */
1377 if (!VOID_TYPE_P (restype))
1378 {
1379 if (!is_gimple_reg_type (restype))
1380 {
1381 restmp = resdecl;
2ab2ce89 1382 add_local_decl (cfun, restmp);
28454517 1383 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1384 }
1385 else
1386 restmp = create_tmp_var_raw (restype, "retval");
1387 }
1388
1767a056 1389 for (arg = a; arg; arg = DECL_CHAIN (arg))
28454517 1390 nargs++;
1391 vargs = VEC_alloc (tree, heap, nargs);
1392 if (this_adjusting)
1393 VEC_quick_push (tree, vargs,
1394 thunk_adjust (&bsi,
1395 a, 1, fixed_offset,
1396 virtual_offset));
1397 else
1398 VEC_quick_push (tree, vargs, a);
1767a056 1399 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
28454517 1400 VEC_quick_push (tree, vargs, arg);
1401 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1402 VEC_free (tree, heap, vargs);
1403 gimple_call_set_cannot_inline (call, true);
1404 gimple_call_set_from_thunk (call, true);
1405 if (restmp)
1406 gimple_call_set_lhs (call, restmp);
1407 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1408 mark_symbols_for_renaming (call);
1409 find_referenced_vars_in (call);
1410 update_stmt (call);
1411
1412 if (restmp && !this_adjusting)
1413 {
57ab8ec3 1414 tree true_label = NULL_TREE;
28454517 1415
1416 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1417 {
1418 gimple stmt;
1419 /* If the return type is a pointer, we need to
1420 protect against NULL. We know there will be an
1421 adjustment, because that's why we're emitting a
1422 thunk. */
1423 then_bb = create_basic_block (NULL, (void *) 0, bb);
1424 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1425 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1426 remove_edge (single_succ_edge (bb));
1427 true_label = gimple_block_label (then_bb);
28454517 1428 stmt = gimple_build_cond (NE_EXPR, restmp,
385f3f36 1429 build_zero_cst (TREE_TYPE (restmp)),
28454517 1430 NULL_TREE, NULL_TREE);
1431 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1432 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1433 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1434 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1435 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1436 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1437 bsi = gsi_last_bb (then_bb);
1438 }
1439
1440 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1441 fixed_offset, virtual_offset);
1442 if (true_label)
1443 {
1444 gimple stmt;
1445 bsi = gsi_last_bb (else_bb);
385f3f36 1446 stmt = gimple_build_assign (restmp,
1447 build_zero_cst (TREE_TYPE (restmp)));
28454517 1448 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1449 bsi = gsi_last_bb (return_bb);
1450 }
1451 }
1452 else
1453 gimple_call_set_tail (call, true);
1454
1455 /* Build return value. */
1456 ret = gimple_build_return (restmp);
1457 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1458
1459 delete_unreachable_blocks ();
1460 update_ssa (TODO_update_ssa);
1461
1462 cgraph_remove_same_body_alias (node);
1463 /* Since we want to emit the thunk, we explicitly mark its name as
1464 referenced. */
28454517 1465 cgraph_add_new_function (thunk_fndecl, true);
1466 bitmap_obstack_release (NULL);
1467 }
1468 current_function_decl = NULL;
1469}
1470
ae01b312 1471/* Expand function specified by NODE. */
e6d2b2d8 1472
ae01b312 1473static void
d9d9733a 1474cgraph_expand_function (struct cgraph_node *node)
ae01b312 1475{
1476 tree decl = node->decl;
1477
b0cdf642 1478 /* We ought to not compile any inline clones. */
cc636d56 1479 gcc_assert (!node->global.inlined_to);
b0cdf642 1480
6329636b 1481 announce_function (decl);
09fc9532 1482 node->process = 0;
ed772161 1483 if (node->same_body)
1484 {
28454517 1485 struct cgraph_node *alias, *next;
ed772161 1486 bool saved_alias = node->alias;
28454517 1487 for (alias = node->same_body;
1488 alias && alias->next; alias = alias->next)
1489 ;
1490 /* Walk aliases in the order they were created; it is possible that
1491 thunks reffers to the aliases made earlier. */
1492 for (; alias; alias = next)
1493 {
1494 next = alias->previous;
1495 if (!alias->thunk.thunk_p)
1496 assemble_alias (alias->decl,
1497 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1498 else
1499 assemble_thunk (alias);
1500 }
ed772161 1501 node->alias = saved_alias;
f7777314 1502 cgraph_process_new_functions ();
ed772161 1503 }
f7777314 1504
1505 gcc_assert (node->lowered);
1506
1507 /* Generate RTL for the body of DECL. */
1508 tree_rest_of_compilation (decl);
1509
1510 /* Make sure that BE didn't give up on compiling. */
1511 gcc_assert (TREE_ASM_WRITTEN (decl));
1512 current_function_decl = NULL;
1a1a827a 1513 gcc_assert (!cgraph_preserve_function_body_p (decl));
1514 cgraph_release_function_body (node);
1515 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1516 points to the dead function body. */
1517 cgraph_node_remove_callees (node);
e1be32b8 1518
1519 cgraph_function_flags_ready = true;
ae01b312 1520}
1521
b0cdf642 1522/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
d7c6d889 1523
1524bool
326a9581 1525cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
d7c6d889 1526{
b0cdf642 1527 *reason = e->inline_failed;
1528 return !e->inline_failed;
d7c6d889 1529}
b0cdf642 1530
acc70efa 1531
acc70efa 1532
d9d9733a 1533/* Expand all functions that must be output.
1534
d7c6d889 1535 Attempt to topologically sort the nodes so function is output when
1536 all called functions are already assembled to allow data to be
91c82c20 1537 propagated across the callgraph. Use a stack to get smaller distance
3927afe0 1538 between a function and its callees (later we may choose to use a more
d7c6d889 1539 sophisticated algorithm for function reordering; we will likely want
1540 to use subsections to make the output functions appear in top-down
1541 order). */
1542
1543static void
a6868229 1544cgraph_expand_all_functions (void)
d7c6d889 1545{
1546 struct cgraph_node *node;
4c36ffe6 1547 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
c04e3894 1548 int order_pos, new_order_pos = 0;
d7c6d889 1549 int i;
1550
d7c6d889 1551 order_pos = cgraph_postorder (order);
cc636d56 1552 gcc_assert (order_pos == cgraph_n_nodes);
d7c6d889 1553
7bd28bba 1554 /* Garbage collector may remove inline clones we eliminate during
b0cdf642 1555 optimization. So we must be sure to not reference them. */
1556 for (i = 0; i < order_pos; i++)
09fc9532 1557 if (order[i]->process)
b0cdf642 1558 order[new_order_pos++] = order[i];
1559
1560 for (i = new_order_pos - 1; i >= 0; i--)
d7c6d889 1561 {
1562 node = order[i];
09fc9532 1563 if (node->process)
d7c6d889 1564 {
cc636d56 1565 gcc_assert (node->reachable);
09fc9532 1566 node->process = 0;
d7c6d889 1567 cgraph_expand_function (node);
1568 }
1569 }
523c1122 1570 cgraph_process_new_functions ();
773c5ba7 1571
d7c6d889 1572 free (order);
773c5ba7 1573
d7c6d889 1574}
1575
56af936e 1576/* This is used to sort the node types by the cgraph order number. */
1577
0b09525f 1578enum cgraph_order_sort_kind
1579{
1580 ORDER_UNDEFINED = 0,
1581 ORDER_FUNCTION,
1582 ORDER_VAR,
1583 ORDER_ASM
1584};
1585
56af936e 1586struct cgraph_order_sort
1587{
0b09525f 1588 enum cgraph_order_sort_kind kind;
56af936e 1589 union
1590 {
1591 struct cgraph_node *f;
1d416bd7 1592 struct varpool_node *v;
56af936e 1593 struct cgraph_asm_node *a;
1594 } u;
1595};
1596
1597/* Output all functions, variables, and asm statements in the order
1598 according to their order fields, which is the order in which they
1599 appeared in the file. This implements -fno-toplevel-reorder. In
1600 this mode we may output functions and variables which don't really
1601 need to be output. */
1602
1603static void
1604cgraph_output_in_order (void)
1605{
1606 int max;
56af936e 1607 struct cgraph_order_sort *nodes;
1608 int i;
1609 struct cgraph_node *pf;
1d416bd7 1610 struct varpool_node *pv;
56af936e 1611 struct cgraph_asm_node *pa;
1612
1613 max = cgraph_order;
3e1cde87 1614 nodes = XCNEWVEC (struct cgraph_order_sort, max);
56af936e 1615
1d416bd7 1616 varpool_analyze_pending_decls ();
56af936e 1617
1618 for (pf = cgraph_nodes; pf; pf = pf->next)
1619 {
09fc9532 1620 if (pf->process)
56af936e 1621 {
1622 i = pf->order;
1623 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1624 nodes[i].kind = ORDER_FUNCTION;
1625 nodes[i].u.f = pf;
1626 }
1627 }
1628
1d416bd7 1629 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
56af936e 1630 {
1631 i = pv->order;
1632 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1633 nodes[i].kind = ORDER_VAR;
1634 nodes[i].u.v = pv;
1635 }
1636
1637 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1638 {
1639 i = pa->order;
1640 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1641 nodes[i].kind = ORDER_ASM;
1642 nodes[i].u.a = pa;
1643 }
56af936e 1644
304e5318 1645 /* In toplevel reorder mode we output all statics; mark them as needed. */
1646 for (i = 0; i < max; ++i)
1647 {
1648 if (nodes[i].kind == ORDER_VAR)
1649 {
1650 varpool_mark_needed_node (nodes[i].u.v);
1651 }
1652 }
1653 varpool_empty_needed_queue ();
1654
56af936e 1655 for (i = 0; i < max; ++i)
1656 {
1657 switch (nodes[i].kind)
1658 {
1659 case ORDER_FUNCTION:
09fc9532 1660 nodes[i].u.f->process = 0;
56af936e 1661 cgraph_expand_function (nodes[i].u.f);
1662 break;
1663
1664 case ORDER_VAR:
1d416bd7 1665 varpool_assemble_decl (nodes[i].u.v);
56af936e 1666 break;
1667
1668 case ORDER_ASM:
1669 assemble_asm (nodes[i].u.a->asm_str);
1670 break;
1671
1672 case ORDER_UNDEFINED:
1673 break;
1674
1675 default:
1676 gcc_unreachable ();
1677 }
1678 }
4b4ea2db 1679
1680 cgraph_asm_nodes = NULL;
3e1cde87 1681 free (nodes);
56af936e 1682}
1683
b0cdf642 1684/* Return true when function body of DECL still needs to be kept around
1685 for later re-use. */
1686bool
1687cgraph_preserve_function_body_p (tree decl)
1688{
1689 struct cgraph_node *node;
8d8c4c8d 1690
1691 gcc_assert (cgraph_global_info_ready);
b0cdf642 1692 /* Look if there is any clone around. */
ccf4ab6b 1693 node = cgraph_node (decl);
1694 if (node->clones)
1695 return true;
b0cdf642 1696 return false;
1697}
1698
77fce4cd 1699static void
1700ipa_passes (void)
1701{
87d4aa85 1702 set_cfun (NULL);
4b14adf9 1703 current_function_decl = NULL;
75a70cf9 1704 gimple_register_cfg_hooks ();
77fce4cd 1705 bitmap_obstack_initialize (NULL);
59dd4830 1706
c9036234 1707 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1708
59dd4830 1709 if (!in_lto_p)
7b2e8956 1710 {
1711 execute_ipa_pass_list (all_small_ipa_passes);
1712 if (seen_error ())
1713 return;
1714 }
9ed5b1f5 1715
7bfefa9d 1716 /* If pass_all_early_optimizations was not scheduled, the state of
1717 the cgraph will not be properly updated. Update it now. */
1718 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1719 cgraph_state = CGRAPH_STATE_IPA_SSA;
9ed5b1f5 1720
7bfefa9d 1721 if (!in_lto_p)
1722 {
1723 /* Generate coverage variables and constructors. */
1724 coverage_finish ();
1725
1726 /* Process new functions added. */
1727 set_cfun (NULL);
1728 current_function_decl = NULL;
1729 cgraph_process_new_functions ();
7bfefa9d 1730
c9036234 1731 execute_ipa_summary_passes
1732 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
8867b500 1733 }
23433d72 1734
1735 /* Some targets need to handle LTO assembler output specially. */
1736 if (flag_generate_lto)
1737 targetm.asm_out.lto_start ();
1738
7bfefa9d 1739 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1740
1741 if (!in_lto_p)
1742 ipa_write_summaries ();
1743
23433d72 1744 if (flag_generate_lto)
1745 targetm.asm_out.lto_end ();
1746
8867b500 1747 if (!flag_ltrans)
1748 execute_ipa_pass_list (all_regular_ipa_passes);
c9036234 1749 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
9ed5b1f5 1750
77fce4cd 1751 bitmap_obstack_release (NULL);
1752}
1753
34e5cced 1754
ae01b312 1755/* Perform simple optimizations based on callgraph. */
1756
7bfefa9d 1757void
d9d9733a 1758cgraph_optimize (void)
ae01b312 1759{
852f689e 1760 if (seen_error ())
cb2b5570 1761 return;
1762
b0cdf642 1763#ifdef ENABLE_CHECKING
1764 verify_cgraph ();
1765#endif
a861fe52 1766
c1dcd13c 1767 /* Frontend may output common variables after the unit has been finalized.
1768 It is safe to deal with them here as they are always zero initialized. */
1d416bd7 1769 varpool_analyze_pending_decls ();
e9f08e82 1770
f79b6507 1771 timevar_push (TV_CGRAPHOPT);
51949610 1772 if (pre_ipa_mem_report)
1773 {
1774 fprintf (stderr, "Memory consumption before IPA\n");
1775 dump_memory_report (false);
1776 }
d7c6d889 1777 if (!quiet_flag)
cd6bca02 1778 fprintf (stderr, "Performing interprocedural optimizations\n");
523c1122 1779 cgraph_state = CGRAPH_STATE_IPA;
c04e3894 1780
be4d0974 1781 /* Don't run the IPA passes if there was any error or sorry messages. */
852f689e 1782 if (!seen_error ())
be4d0974 1783 ipa_passes ();
1784
34e5cced 1785 /* Do nothing else if any IPA pass found errors. */
852f689e 1786 if (seen_error ())
021c1c18 1787 {
1788 timevar_pop (TV_CGRAPHOPT);
1789 return;
1790 }
34e5cced 1791
e1be32b8 1792 /* This pass remove bodies of extern inline functions we never inlined.
1793 Do this later so other IPA passes see what is really going on. */
1794 cgraph_remove_unreachable_nodes (false, dump_file);
80a85d8a 1795 cgraph_global_info_ready = true;
f79b6507 1796 if (cgraph_dump_file)
1797 {
e4200070 1798 fprintf (cgraph_dump_file, "Optimized ");
f79b6507 1799 dump_cgraph (cgraph_dump_file);
c1dcd13c 1800 dump_varpool (cgraph_dump_file);
f79b6507 1801 }
51949610 1802 if (post_ipa_mem_report)
1803 {
defa2fa6 1804 fprintf (stderr, "Memory consumption after IPA\n");
51949610 1805 dump_memory_report (false);
1806 }
f79b6507 1807 timevar_pop (TV_CGRAPHOPT);
ae01b312 1808
d7c6d889 1809 /* Output everything. */
47306a5d 1810 (*debug_hooks->assembly_start) ();
e4200070 1811 if (!quiet_flag)
1812 fprintf (stderr, "Assembling functions:\n");
b0cdf642 1813#ifdef ENABLE_CHECKING
1814 verify_cgraph ();
1815#endif
56af936e 1816
ccf4ab6b 1817 cgraph_materialize_all_clones ();
acc70efa 1818 cgraph_mark_functions_to_output ();
c1dcd13c 1819
523c1122 1820 cgraph_state = CGRAPH_STATE_EXPANSION;
56af936e 1821 if (!flag_toplevel_reorder)
1822 cgraph_output_in_order ();
1823 else
1824 {
1825 cgraph_output_pending_asms ();
1826
1827 cgraph_expand_all_functions ();
1d416bd7 1828 varpool_remove_unreferenced_decls ();
56af936e 1829
1d416bd7 1830 varpool_assemble_pending_decls ();
56af936e 1831 }
523c1122 1832 cgraph_process_new_functions ();
1833 cgraph_state = CGRAPH_STATE_FINISHED;
c1dcd13c 1834
f79b6507 1835 if (cgraph_dump_file)
1836 {
e4200070 1837 fprintf (cgraph_dump_file, "\nFinal ");
f79b6507 1838 dump_cgraph (cgraph_dump_file);
7410370b 1839 dump_varpool (cgraph_dump_file);
f79b6507 1840 }
b0cdf642 1841#ifdef ENABLE_CHECKING
1842 verify_cgraph ();
4ee9c684 1843 /* Double check that all inline clones are gone and that all
1844 function bodies have been released from memory. */
852f689e 1845 if (!seen_error ())
4ee9c684 1846 {
1847 struct cgraph_node *node;
1848 bool error_found = false;
1849
1850 for (node = cgraph_nodes; node; node = node->next)
1851 if (node->analyzed
1852 && (node->global.inlined_to
1a1a827a 1853 || gimple_has_body_p (node->decl)))
4ee9c684 1854 {
1855 error_found = true;
1856 dump_cgraph_node (stderr, node);
a0c938f0 1857 }
4ee9c684 1858 if (error_found)
c04e3894 1859 internal_error ("nodes with unreleased memory found");
4ee9c684 1860 }
b0cdf642 1861#endif
ae01b312 1862}
34e5cced 1863
121f3051 1864void
1865init_cgraph (void)
1866{
01ec0a6c 1867 if (!cgraph_dump_file)
1868 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
121f3051 1869}
b5d36404 1870
a0c938f0 1871/* The edges representing the callers of the NEW_VERSION node were
b5d36404 1872 fixed by cgraph_function_versioning (), now the call_expr in their
1873 respective tree code should be updated to call the NEW_VERSION. */
1874
1875static void
1876update_call_expr (struct cgraph_node *new_version)
1877{
1878 struct cgraph_edge *e;
1879
1880 gcc_assert (new_version);
75a70cf9 1881
1882 /* Update the call expr on the edges to call the new version. */
b5d36404 1883 for (e = new_version->callers; e; e = e->next_caller)
e03a95e7 1884 {
1885 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
1886 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
e38def9c 1887 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
e03a95e7 1888 }
b5d36404 1889}
1890
1891
1892/* Create a new cgraph node which is the new version of
1893 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1894 edges which should be redirected to point to
1895 NEW_VERSION. ALL the callees edges of OLD_VERSION
1896 are cloned to the new version node. Return the new
b06ab5fa 1897 version node.
1898
1899 If non-NULL BLOCK_TO_COPY determine what basic blocks
1900 was copied to prevent duplications of calls that are dead
1901 in the clone. */
b5d36404 1902
1903static struct cgraph_node *
1904cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
4460a647 1905 tree new_decl,
b06ab5fa 1906 VEC(cgraph_edge_p,heap) *redirect_callers,
1907 bitmap bbs_to_copy)
1908 {
b5d36404 1909 struct cgraph_node *new_version;
32936803 1910 struct cgraph_edge *e;
b5d36404 1911 unsigned i;
1912
1913 gcc_assert (old_version);
a0c938f0 1914
b5d36404 1915 new_version = cgraph_node (new_decl);
1916
1917 new_version->analyzed = true;
1918 new_version->local = old_version->local;
a70a5e2c 1919 new_version->local.externally_visible = false;
1920 new_version->local.local = true;
1921 new_version->local.vtable_method = false;
b5d36404 1922 new_version->global = old_version->global;
a93f1c3b 1923 new_version->rtl = old_version->rtl;
b5d36404 1924 new_version->reachable = true;
1925 new_version->count = old_version->count;
1926
a70a5e2c 1927 for (e = old_version->callees; e; e=e->next_callee)
b06ab5fa 1928 if (!bbs_to_copy
1929 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
1930 cgraph_clone_edge (e, new_version, e->call_stmt,
1931 e->lto_stmt_uid, REG_BR_PROB_BASE,
1932 CGRAPH_FREQ_BASE,
1933 e->loop_nest, true);
a70a5e2c 1934 for (e = old_version->indirect_calls; e; e=e->next_callee)
b06ab5fa 1935 if (!bbs_to_copy
1936 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
1937 cgraph_clone_edge (e, new_version, e->call_stmt,
1938 e->lto_stmt_uid, REG_BR_PROB_BASE,
1939 CGRAPH_FREQ_BASE,
1940 e->loop_nest, true);
48148244 1941 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
4460a647 1942 {
1943 /* Redirect calls to the old version node to point to its new
1944 version. */
1945 cgraph_redirect_edge_callee (e, new_version);
1946 }
b5d36404 1947
1948 return new_version;
1949 }
1950
1951 /* Perform function versioning.
a0c938f0 1952 Function versioning includes copying of the tree and
b5d36404 1953 a callgraph update (creating a new cgraph node and updating
1954 its callees and callers).
1955
1956 REDIRECT_CALLERS varray includes the edges to be redirected
1957 to the new version.
1958
1959 TREE_MAP is a mapping of tree nodes we want to replace with
1960 new ones (according to results of prior analysis).
1961 OLD_VERSION_NODE is the node that is versioned.
48e1416a 1962 It returns the new version's cgraph node.
b06ab5fa 1963 If non-NULL ARGS_TO_SKIP determine function parameters to remove
1964 from new version.
1965 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
1966 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
b5d36404 1967
1968struct cgraph_node *
1969cgraph_function_versioning (struct cgraph_node *old_version_node,
4460a647 1970 VEC(cgraph_edge_p,heap) *redirect_callers,
ccf4ab6b 1971 VEC (ipa_replace_map_p,gc)* tree_map,
a70a5e2c 1972 bitmap args_to_skip,
b06ab5fa 1973 bitmap bbs_to_copy,
1974 basic_block new_entry_block,
a70a5e2c 1975 const char *clone_name)
b5d36404 1976{
1977 tree old_decl = old_version_node->decl;
1978 struct cgraph_node *new_version_node = NULL;
1979 tree new_decl;
1980
1981 if (!tree_versionable_function_p (old_decl))
1982 return NULL;
1983
1984 /* Make a new FUNCTION_DECL tree node for the
1985 new version. */
5afe38fe 1986 if (!args_to_skip)
1987 new_decl = copy_node (old_decl);
1988 else
1989 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
b5d36404 1990
df0b8dfb 1991 /* Generate a new name for the new version. */
1992 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
1993 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
1994 SET_DECL_RTL (new_decl, NULL);
1995
b5d36404 1996 /* Create the new version's call-graph node.
1997 and update the edges of the new node. */
1998 new_version_node =
1999 cgraph_copy_node_for_versioning (old_version_node, new_decl,
b06ab5fa 2000 redirect_callers, bbs_to_copy);
b5d36404 2001
2002 /* Copy the OLD_VERSION_NODE function tree to the new version. */
b06ab5fa 2003 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2004 bbs_to_copy, new_entry_block);
b5d36404 2005
a0c938f0 2006 /* Update the new version's properties.
e03a95e7 2007 Make The new version visible only within this translation unit. Make sure
2008 that is not weak also.
a0c938f0 2009 ??? We cannot use COMDAT linkage because there is no
b5d36404 2010 ABI support for this. */
6137cc9f 2011 cgraph_make_decl_local (new_version_node->decl);
f014e39d 2012 DECL_VIRTUAL_P (new_version_node->decl) = 0;
b5d36404 2013 new_version_node->local.externally_visible = 0;
2014 new_version_node->local.local = 1;
2015 new_version_node->lowered = true;
f014e39d 2016
e03a95e7 2017 /* Update the call_expr on the edges to call the new version node. */
2018 update_call_expr (new_version_node);
48e1416a 2019
50828ed8 2020 cgraph_call_function_insertion_hooks (new_version_node);
b5d36404 2021 return new_version_node;
2022}
469679ab 2023
2024/* Produce separate function body for inline clones so the offline copy can be
2025 modified without affecting them. */
2026struct cgraph_node *
2027save_inline_function_body (struct cgraph_node *node)
2028{
ccf4ab6b 2029 struct cgraph_node *first_clone, *n;
469679ab 2030
2031 gcc_assert (node == cgraph_node (node->decl));
2032
2033 cgraph_lower_function (node);
2034
ccf4ab6b 2035 first_clone = node->clones;
469679ab 2036
2037 first_clone->decl = copy_node (node->decl);
469679ab 2038 cgraph_insert_node_to_hashtable (first_clone);
2039 gcc_assert (first_clone == cgraph_node (first_clone->decl));
ccf4ab6b 2040 if (first_clone->next_sibling_clone)
2041 {
2042 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2043 n->clone_of = first_clone;
2044 n->clone_of = first_clone;
2045 n->next_sibling_clone = first_clone->clones;
2046 if (first_clone->clones)
2047 first_clone->clones->prev_sibling_clone = n;
2048 first_clone->clones = first_clone->next_sibling_clone;
2049 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2050 first_clone->next_sibling_clone = NULL;
2051 gcc_assert (!first_clone->prev_sibling_clone);
2052 }
2053 first_clone->clone_of = NULL;
2054 node->clones = NULL;
2055
2056 if (first_clone->clones)
2057 for (n = first_clone->clones; n != first_clone;)
2058 {
2059 gcc_assert (n->decl == node->decl);
2060 n->decl = first_clone->decl;
2061 if (n->clones)
2062 n = n->clones;
2063 else if (n->next_sibling_clone)
2064 n = n->next_sibling_clone;
2065 else
2066 {
2067 while (n != first_clone && !n->next_sibling_clone)
2068 n = n->clone_of;
2069 if (n != first_clone)
2070 n = n->next_sibling_clone;
2071 }
2072 }
469679ab 2073
2074 /* Copy the OLD_VERSION_NODE function tree to the new version. */
b06ab5fa 2075 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL,
2076 NULL, NULL);
469679ab 2077
2078 DECL_EXTERNAL (first_clone->decl) = 0;
ecd88073 2079 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
469679ab 2080 TREE_PUBLIC (first_clone->decl) = 0;
2081 DECL_COMDAT (first_clone->decl) = 0;
7fe9b425 2082 VEC_free (ipa_opt_pass, heap,
6d1cc52c 2083 first_clone->ipa_transforms_to_apply);
2084 first_clone->ipa_transforms_to_apply = NULL;
469679ab 2085
469679ab 2086#ifdef ENABLE_CHECKING
2087 verify_cgraph_node (first_clone);
2088#endif
2089 return first_clone;
2090}
a861fe52 2091
ccf4ab6b 2092/* Given virtual clone, turn it into actual clone. */
2093static void
2094cgraph_materialize_clone (struct cgraph_node *node)
2095{
2096 bitmap_obstack_initialize (NULL);
e748b31d 2097 node->former_clone_of = node->clone_of->decl;
2098 if (node->clone_of->former_clone_of)
2099 node->former_clone_of = node->clone_of->former_clone_of;
ccf4ab6b 2100 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2101 tree_function_versioning (node->clone_of->decl, node->decl,
2102 node->clone.tree_map, true,
b06ab5fa 2103 node->clone.args_to_skip, NULL, NULL);
e20422ea 2104 if (cgraph_dump_file)
2105 {
2106 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2107 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2108 }
ccf4ab6b 2109
2110 /* Function is no longer clone. */
2111 if (node->next_sibling_clone)
2112 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2113 if (node->prev_sibling_clone)
2114 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2115 else
2116 node->clone_of->clones = node->next_sibling_clone;
2117 node->next_sibling_clone = NULL;
2118 node->prev_sibling_clone = NULL;
6d1cc52c 2119 if (!node->clone_of->analyzed && !node->clone_of->clones)
7d6a1ec8 2120 {
2121 cgraph_release_function_body (node->clone_of);
2122 cgraph_node_remove_callees (node->clone_of);
2123 ipa_remove_all_references (&node->clone_of->ref_list);
2124 }
ccf4ab6b 2125 node->clone_of = NULL;
2126 bitmap_obstack_release (NULL);
2127}
2128
c596d830 2129/* If necessary, change the function declaration in the call statement
2130 associated with E so that it corresponds to the edge callee. */
2131
2132gimple
2133cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2134{
2135 tree decl = gimple_call_fndecl (e->call_stmt);
2136 gimple new_stmt;
3fd0ca33 2137 gimple_stmt_iterator gsi;
2138 bool gsi_computed = false;
1f449108 2139#ifdef ENABLE_CHECKING
2140 struct cgraph_node *node;
2141#endif
c596d830 2142
1caef38b 2143 if (e->indirect_unknown_callee
2144 || decl == e->callee->decl
c596d830 2145 /* Don't update call from same body alias to the real function. */
1caef38b 2146 || (decl && cgraph_get_node (decl) == cgraph_get_node (e->callee->decl)))
c596d830 2147 return e->call_stmt;
2148
1f449108 2149#ifdef ENABLE_CHECKING
1caef38b 2150 if (decl)
2151 {
2152 node = cgraph_get_node (decl);
2153 gcc_assert (!node || !node->clone.combined_args_to_skip);
2154 }
1f449108 2155#endif
e748b31d 2156
c596d830 2157 if (cgraph_dump_file)
2158 {
2159 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2160 cgraph_node_name (e->caller), e->caller->uid,
2161 cgraph_node_name (e->callee), e->callee->uid);
2162 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
e748b31d 2163 if (e->callee->clone.combined_args_to_skip)
91aba934 2164 {
2165 fprintf (cgraph_dump_file, " combined args to skip: ");
2166 dump_bitmap (cgraph_dump_file,
2167 e->callee->clone.combined_args_to_skip);
e748b31d 2168 }
c596d830 2169 }
2170
3fd0ca33 2171 if (e->indirect_info && e->indirect_info->thunk_delta
2172 && integer_nonzerop (e->indirect_info->thunk_delta)
2173 && (!e->callee->clone.combined_args_to_skip
2174 || !bitmap_bit_p (e->callee->clone.combined_args_to_skip, 0)))
2175 {
2176 if (cgraph_dump_file)
2177 {
2178 fprintf (cgraph_dump_file, " Thunk delta is ");
2179 print_generic_expr (cgraph_dump_file,
2180 e->indirect_info->thunk_delta, 0);
2181 fprintf (cgraph_dump_file, "\n");
2182 }
2183 gsi = gsi_for_stmt (e->call_stmt);
2184 gsi_computed = true;
2185 gimple_adjust_this_by_delta (&gsi, e->indirect_info->thunk_delta);
2186 e->indirect_info->thunk_delta = NULL_TREE;
2187 }
2188
c596d830 2189 if (e->callee->clone.combined_args_to_skip)
91aba934 2190 {
092cd838 2191 int lp_nr;
91aba934 2192
2193 new_stmt
2194 = gimple_call_copy_skip_args (e->call_stmt,
2195 e->callee->clone.combined_args_to_skip);
75c7f5a5 2196 gimple_call_set_fndecl (new_stmt, e->callee->decl);
91aba934 2197
2198 if (gimple_vdef (new_stmt)
2199 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2200 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2201
3fd0ca33 2202 if (!gsi_computed)
2203 gsi = gsi_for_stmt (e->call_stmt);
9126b675 2204 gsi_replace (&gsi, new_stmt, false);
092cd838 2205 /* We need to defer cleaning EH info on the new statement to
2206 fixup-cfg. We may not have dominator information at this point
2207 and thus would end up with unreachable blocks and have no way
2208 to communicate that we need to run CFG cleanup then. */
2209 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2210 if (lp_nr != 0)
2211 {
2212 remove_stmt_from_eh_lp (e->call_stmt);
2213 add_stmt_to_eh_lp (new_stmt, lp_nr);
2214 }
91aba934 2215 }
c596d830 2216 else
75c7f5a5 2217 {
2218 new_stmt = e->call_stmt;
2219 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2220 update_stmt (new_stmt);
2221 }
c596d830 2222
c596d830 2223 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2224
2225 if (cgraph_dump_file)
2226 {
2227 fprintf (cgraph_dump_file, " updated to:");
2228 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2229 }
2230 return new_stmt;
2231}
2232
ccf4ab6b 2233/* Once all functions from compilation unit are in memory, produce all clones
c596d830 2234 and update all calls. We might also do this on demand if we don't want to
2235 bring all functions to memory prior compilation, but current WHOPR
2236 implementation does that and it is is bit easier to keep everything right in
2237 this order. */
ccf4ab6b 2238void
2239cgraph_materialize_all_clones (void)
2240{
2241 struct cgraph_node *node;
2242 bool stabilized = false;
2243
2244 if (cgraph_dump_file)
2245 fprintf (cgraph_dump_file, "Materializing clones\n");
2246#ifdef ENABLE_CHECKING
2247 verify_cgraph ();
2248#endif
2249
2250 /* We can also do topological order, but number of iterations should be
2251 bounded by number of IPA passes since single IPA pass is probably not
2252 going to create clones of clones it created itself. */
2253 while (!stabilized)
2254 {
2255 stabilized = true;
2256 for (node = cgraph_nodes; node; node = node->next)
2257 {
2258 if (node->clone_of && node->decl != node->clone_of->decl
2259 && !gimple_has_body_p (node->decl))
2260 {
2261 if (gimple_has_body_p (node->clone_of->decl))
2262 {
2263 if (cgraph_dump_file)
e20422ea 2264 {
2265 fprintf (cgraph_dump_file, "clonning %s to %s\n",
2266 cgraph_node_name (node->clone_of),
2267 cgraph_node_name (node));
2268 if (node->clone.tree_map)
2269 {
2270 unsigned int i;
2271 fprintf (cgraph_dump_file, " replace map: ");
2272 for (i = 0; i < VEC_length (ipa_replace_map_p,
2273 node->clone.tree_map);
2274 i++)
2275 {
2276 struct ipa_replace_map *replace_info;
2277 replace_info = VEC_index (ipa_replace_map_p,
2278 node->clone.tree_map,
2279 i);
2280 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2281 fprintf (cgraph_dump_file, " -> ");
2282 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2283 fprintf (cgraph_dump_file, "%s%s;",
2284 replace_info->replace_p ? "(replace)":"",
2285 replace_info->ref_p ? "(ref)":"");
2286 }
2287 fprintf (cgraph_dump_file, "\n");
2288 }
2289 if (node->clone.args_to_skip)
2290 {
2291 fprintf (cgraph_dump_file, " args_to_skip: ");
2292 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2293 }
2294 if (node->clone.args_to_skip)
2295 {
2296 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2297 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2298 }
2299 }
ccf4ab6b 2300 cgraph_materialize_clone (node);
a510bd8d 2301 stabilized = false;
ccf4ab6b 2302 }
ccf4ab6b 2303 }
2304 }
2305 }
ee3f5fc0 2306 for (node = cgraph_nodes; node; node = node->next)
2307 if (!node->analyzed && node->callees)
2308 cgraph_node_remove_callees (node);
c596d830 2309 if (cgraph_dump_file)
2310 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
947781ac 2311#ifdef ENABLE_CHECKING
2312 verify_cgraph ();
2313#endif
ccf4ab6b 2314 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2315}
2316
a861fe52 2317#include "gt-cgraphunit.h"