]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
2010-10-05 Paolo Carlini <paolo.carlini@oracle.com>
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
cd6bca02 1/* Callgraph based interprocedural optimizations.
028a99ef 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
e3a37aef 3 Free Software Foundation, Inc.
ae01b312 4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
ae01b312 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
ae01b312 21
b0cdf642 22/* This module implements main driver of compilation process as well as
cd6bca02 23 few basic interprocedural optimizers.
b0cdf642 24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
b326746d 35 (There is one exception needed for implementing GCC extern inline
36 function.)
b0cdf642 37
1d416bd7 38 - varpool_finalize_variable
b0cdf642 39
7bd28bba 40 This function has same behavior as the above but is used for static
b0cdf642 41 variables.
42
43 - cgraph_finalize_compilation_unit
44
b326746d 45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
b0cdf642 47
6329636b 48 In the the call-graph construction and local function
b0cdf642 49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
b326746d 52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
b0cdf642 54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
b0cdf642 62 - cgraph_mark_needed_node
1d416bd7 63 - varpool_mark_needed_node
b0cdf642 64
b326746d 65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
ccd2f3d1 69 used by C++ frontend to explicitly mark the keyed methods.
b0cdf642 70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
6329636b 80 Analyzing of all functions is deferred
b0cdf642 81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
ca67a72b 91 The intra-procedural information is produced and its existence
b0cdf642 92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
6329636b 106 where reference has been optimized out. */
121f3051 107
acc70efa 108
ae01b312 109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
b5530559 114#include "rtl.h"
acc70efa 115#include "tree-flow.h"
ae01b312 116#include "tree-inline.h"
117#include "langhooks.h"
c6224531 118#include "pointer-set.h"
ae01b312 119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
80a85d8a 125#include "diagnostic.h"
ce084dfc 126#include "tree-pretty-print.h"
127#include "gimple-pretty-print.h"
f79b6507 128#include "timevar.h"
d7c6d889 129#include "params.h"
130#include "fibheap.h"
611e5405 131#include "intl.h"
b69eb0ff 132#include "function.h"
b5d36404 133#include "ipa-prop.h"
75a70cf9 134#include "gimple.h"
135#include "tree-iterator.h"
f1e2a033 136#include "tree-pass.h"
bfec3452 137#include "tree-dump.h"
c1dcd13c 138#include "output.h"
9ed5b1f5 139#include "coverage.h"
c9036234 140#include "plugin.h"
d7c6d889 141
a6868229 142static void cgraph_expand_all_functions (void);
d9d9733a 143static void cgraph_mark_functions_to_output (void);
144static void cgraph_expand_function (struct cgraph_node *);
f788fff2 145static void cgraph_output_pending_asms (void);
bfec3452 146static void cgraph_analyze_function (struct cgraph_node *);
25bb88de 147
ecb08119 148FILE *cgraph_dump_file;
121f3051 149
28454517 150/* Used for vtable lookup in thunk adjusting. */
151static GTY (()) tree vtable_entry_type;
152
2c0b522d 153/* Determine if function DECL is needed. That is, visible to something
154 either outside this translation unit, something magic in the system
6329636b 155 configury. */
2c0b522d 156
7bfefa9d 157bool
158cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
2c0b522d 159{
3f82b628 160 /* If the user told us it is used, then it must be so. */
05806473 161 if (node->local.externally_visible)
162 return true;
163
3f82b628 164 /* ??? If the assembler name is set by hand, it is possible to assemble
165 the name later after finalizing the function and the fact is noticed
166 in assemble_name then. This is arguably a bug. */
167 if (DECL_ASSEMBLER_NAME_SET_P (decl)
168 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
169 return true;
170
55680bef 171 /* With -fkeep-inline-functions we are keeping all inline functions except
172 for extern inline ones. */
173 if (flag_keep_inline_functions
174 && DECL_DECLARED_INLINE_P (decl)
316ef6d8 175 && !DECL_EXTERNAL (decl)
176 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
55680bef 177 return true;
178
2c0b522d 179 /* If we decided it was needed before, but at the time we didn't have
180 the body of the function available, then it's still needed. We have
181 to go back and re-check its dependencies now. */
182 if (node->needed)
183 return true;
184
185 /* Externally visible functions must be output. The exception is
a0c938f0 186 COMDAT functions that must be output only when they are needed.
8baa9d15 187
188 When not optimizing, also output the static functions. (see
95da6220 189 PR24561), but don't do so for always_inline functions, functions
0f9238c0 190 declared inline and nested functions. These were optimized out
d3d410e1 191 in the original implementation and it is unclear whether we want
554f2707 192 to change the behavior here. */
bba7ddf8 193 if (((TREE_PUBLIC (decl)
0f9238c0 194 || (!optimize
195 && !node->local.disregard_inline_limits
d3d410e1 196 && !DECL_DECLARED_INLINE_P (decl)
0f9238c0 197 && !(DECL_CONTEXT (decl)
198 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
59dd4830 199 && !flag_whole_program
200 && !flag_lto
201 && !flag_whopr)
62eec3b4 202 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
2c0b522d 203 return true;
204
2c0b522d 205 return false;
206}
207
bdc40eb8 208/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
523c1122 209 functions into callgraph in a way so they look like ordinary reachable
210 functions inserted into callgraph already at construction time. */
211
212bool
213cgraph_process_new_functions (void)
214{
215 bool output = false;
216 tree fndecl;
217 struct cgraph_node *node;
218
0cddb138 219 varpool_analyze_pending_decls ();
523c1122 220 /* Note that this queue may grow as its being processed, as the new
221 functions may generate new ones. */
222 while (cgraph_new_nodes)
223 {
224 node = cgraph_new_nodes;
225 fndecl = node->decl;
226 cgraph_new_nodes = cgraph_new_nodes->next_needed;
227 switch (cgraph_state)
228 {
229 case CGRAPH_STATE_CONSTRUCTION:
230 /* At construction time we just need to finalize function and move
231 it into reachable functions list. */
232
233 node->next_needed = NULL;
234 cgraph_finalize_function (fndecl, false);
235 cgraph_mark_reachable_node (node);
236 output = true;
237 break;
238
239 case CGRAPH_STATE_IPA:
f517b36e 240 case CGRAPH_STATE_IPA_SSA:
523c1122 241 /* When IPA optimization already started, do all essential
242 transformations that has been already performed on the whole
243 cgraph but not on this function. */
244
75a70cf9 245 gimple_register_cfg_hooks ();
523c1122 246 if (!node->analyzed)
247 cgraph_analyze_function (node);
248 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
249 current_function_decl = fndecl;
9c1bff7a 250 compute_inline_parameters (node);
f517b36e 251 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
252 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
253 /* When not optimizing, be sure we run early local passes anyway
254 to expand OMP. */
255 || !optimize)
20099e35 256 execute_pass_list (pass_early_local_passes.pass.sub);
523c1122 257 free_dominance_info (CDI_POST_DOMINATORS);
258 free_dominance_info (CDI_DOMINATORS);
259 pop_cfun ();
260 current_function_decl = NULL;
261 break;
262
263 case CGRAPH_STATE_EXPANSION:
264 /* Functions created during expansion shall be compiled
265 directly. */
09fc9532 266 node->process = 0;
523c1122 267 cgraph_expand_function (node);
268 break;
269
270 default:
271 gcc_unreachable ();
272 break;
273 }
50828ed8 274 cgraph_call_function_insertion_hooks (node);
0cddb138 275 varpool_analyze_pending_decls ();
523c1122 276 }
277 return output;
278}
279
9b8fb23a 280/* As an GCC extension we allow redefinition of the function. The
281 semantics when both copies of bodies differ is not well defined.
282 We replace the old body with new body so in unit at a time mode
283 we always use new body, while in normal mode we may end up with
284 old body inlined into some functions and new body expanded and
285 inlined in others.
286
287 ??? It may make more sense to use one body for inlining and other
288 body for expanding the function but this is difficult to do. */
289
290static void
291cgraph_reset_node (struct cgraph_node *node)
292{
09fc9532 293 /* If node->process is set, then we have already begun whole-unit analysis.
6329636b 294 This is *not* testing for whether we've already emitted the function.
295 That case can be sort-of legitimately seen with real function redefinition
296 errors. I would argue that the front end should never present us with
297 such a case, but don't enforce that for now. */
09fc9532 298 gcc_assert (!node->process);
9b8fb23a 299
300 /* Reset our data structures so we can analyze the function again. */
301 memset (&node->local, 0, sizeof (node->local));
302 memset (&node->global, 0, sizeof (node->global));
303 memset (&node->rtl, 0, sizeof (node->rtl));
304 node->analyzed = false;
305 node->local.redefined_extern_inline = true;
306 node->local.finalized = false;
307
9b8fb23a 308 cgraph_node_remove_callees (node);
309
310 /* We may need to re-queue the node for assembling in case
46beef9a 311 we already proceeded it and ignored as not needed or got
312 a re-declaration in IMA mode. */
313 if (node->reachable)
9b8fb23a 314 {
315 struct cgraph_node *n;
316
317 for (n = cgraph_nodes_queue; n; n = n->next_needed)
318 if (n == node)
319 break;
320 if (!n)
321 node->reachable = 0;
322 }
323}
c08871a9 324
1e8e9920 325static void
326cgraph_lower_function (struct cgraph_node *node)
327{
328 if (node->lowered)
329 return;
bfec3452 330
331 if (node->nested)
332 lower_nested_functions (node->decl);
333 gcc_assert (!node->nested);
334
1e8e9920 335 tree_lowering_passes (node->decl);
336 node->lowered = true;
337}
338
28df663b 339/* DECL has been parsed. Take it, queue it, compile it at the whim of the
340 logic in effect. If NESTED is true, then our caller cannot stand to have
341 the garbage collector run at the moment. We would need to either create
342 a new GC context, or just not compile right now. */
ae01b312 343
344void
28df663b 345cgraph_finalize_function (tree decl, bool nested)
ae01b312 346{
347 struct cgraph_node *node = cgraph_node (decl);
348
c08871a9 349 if (node->local.finalized)
9b8fb23a 350 cgraph_reset_node (node);
28df663b 351
167b550b 352 node->pid = cgraph_max_pid ++;
c08871a9 353 notice_global_symbol (decl);
79bb87b4 354 node->local.finalized = true;
e27482aa 355 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
22c748a1 356 node->finalized_by_frontend = true;
ae01b312 357
7bfefa9d 358 if (cgraph_decide_is_function_needed (node, decl))
2c0b522d 359 cgraph_mark_needed_node (node);
360
ecda6e51 361 /* Since we reclaim unreachable nodes at the end of every language
3f82b628 362 level unit, we need to be conservative about possible entry points
363 there. */
1e3aebec 364 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
365 || DECL_STATIC_CONSTRUCTOR (decl)
d050bafd 366 || DECL_STATIC_DESTRUCTOR (decl)
367 /* COMDAT virtual functions may be referenced by vtable from
368 other compilatoin unit. Still we want to devirtualize calls
369 to those so we need to analyze them.
370 FIXME: We should introduce may edges for this purpose and update
371 their handling in unreachable function removal and inliner too. */
372 || (DECL_VIRTUAL_P (decl) && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
3f82b628 373 cgraph_mark_reachable_node (node);
374
2c0b522d 375 /* If we've not yet emitted decl, tell the debug info about it. */
28df663b 376 if (!TREE_ASM_WRITTEN (decl))
2c0b522d 377 (*debug_hooks->deferred_inline_function) (decl);
4e8871a0 378
b69eb0ff 379 /* Possibly warn about unused parameters. */
380 if (warn_unused_parameter)
381 do_warn_unused_parameter (decl);
6329636b 382
383 if (!nested)
384 ggc_collect ();
ae01b312 385}
386
0da03d11 387/* C99 extern inline keywords allow changing of declaration after function
388 has been finalized. We need to re-decide if we want to mark the function as
389 needed then. */
390
391void
392cgraph_mark_if_needed (tree decl)
393{
394 struct cgraph_node *node = cgraph_node (decl);
7bfefa9d 395 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
0da03d11 396 cgraph_mark_needed_node (node);
397}
398
f2d608d8 399#ifdef ENABLE_CHECKING
ccf4ab6b 400/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
401static bool
402clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
403{
404 while (node != node2 && node2)
405 node2 = node2->clone_of;
406 return node2 != NULL;
407}
f2d608d8 408#endif
ccf4ab6b 409
1a036a3b 410/* Verify edge E count and frequency. */
411
412static bool
413verify_edge_count_and_frequency (struct cgraph_edge *e)
414{
415 bool error_found = false;
416 if (e->count < 0)
417 {
418 error ("caller edge count is negative");
419 error_found = true;
420 }
421 if (e->frequency < 0)
422 {
423 error ("caller edge frequency is negative");
424 error_found = true;
425 }
426 if (e->frequency > CGRAPH_FREQ_MAX)
427 {
428 error ("caller edge frequency is too large");
429 error_found = true;
430 }
431 if (gimple_has_body_p (e->caller->decl)
432 && !e->caller->global.inlined_to
433 && (e->frequency
434 != compute_call_stmt_bb_frequency (e->caller->decl,
435 gimple_bb (e->call_stmt))))
436 {
437 error ("caller edge frequency %i does not match BB freqency %i",
438 e->frequency,
439 compute_call_stmt_bb_frequency (e->caller->decl,
440 gimple_bb (e->call_stmt)));
441 error_found = true;
442 }
443 return error_found;
444}
445
b0cdf642 446/* Verify cgraph nodes of given cgraph node. */
4b987fac 447DEBUG_FUNCTION void
b0cdf642 448verify_cgraph_node (struct cgraph_node *node)
449{
450 struct cgraph_edge *e;
e27482aa 451 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
117ef3d7 452 struct function *saved_cfun = cfun;
e27482aa 453 basic_block this_block;
75a70cf9 454 gimple_stmt_iterator gsi;
9bfec7c2 455 bool error_found = false;
b0cdf642 456
852f689e 457 if (seen_error ())
bd09cd3e 458 return;
459
b0cdf642 460 timevar_push (TV_CGRAPH_VERIFY);
117ef3d7 461 /* debug_generic_stmt needs correct cfun */
462 set_cfun (this_cfun);
b0cdf642 463 for (e = node->callees; e; e = e->next_callee)
464 if (e->aux)
465 {
0a81f5a0 466 error ("aux field set for edge %s->%s",
abd3e6b5 467 identifier_to_locale (cgraph_node_name (e->caller)),
468 identifier_to_locale (cgraph_node_name (e->callee)));
b0cdf642 469 error_found = true;
470 }
a2cb9b3b 471 if (node->count < 0)
472 {
473 error ("Execution count is negative");
474 error_found = true;
475 }
59dd4830 476 if (node->global.inlined_to && node->local.externally_visible)
477 {
478 error ("Externally visible inline clone");
479 error_found = true;
480 }
481 if (node->global.inlined_to && node->address_taken)
482 {
483 error ("Inline clone with address taken");
484 error_found = true;
485 }
486 if (node->global.inlined_to && node->needed)
487 {
488 error ("Inline clone is needed");
489 error_found = true;
490 }
799c8711 491 for (e = node->indirect_calls; e; e = e->next_callee)
492 {
493 if (e->aux)
494 {
495 error ("aux field set for indirect edge from %s",
496 identifier_to_locale (cgraph_node_name (e->caller)));
497 error_found = true;
498 }
499 if (!e->indirect_unknown_callee
500 || !e->indirect_info)
501 {
502 error ("An indirect edge from %s is not marked as indirect or has "
503 "associated indirect_info, the corresponding statement is: ",
504 identifier_to_locale (cgraph_node_name (e->caller)));
505 debug_gimple_stmt (e->call_stmt);
506 error_found = true;
507 }
508 }
b0cdf642 509 for (e = node->callers; e; e = e->next_caller)
510 {
1a036a3b 511 if (verify_edge_count_and_frequency (e))
512 error_found = true;
b0cdf642 513 if (!e->inline_failed)
514 {
515 if (node->global.inlined_to
516 != (e->caller->global.inlined_to
517 ? e->caller->global.inlined_to : e->caller))
518 {
0a81f5a0 519 error ("inlined_to pointer is wrong");
b0cdf642 520 error_found = true;
521 }
522 if (node->callers->next_caller)
523 {
0a81f5a0 524 error ("multiple inline callers");
b0cdf642 525 error_found = true;
526 }
527 }
528 else
529 if (node->global.inlined_to)
530 {
0a81f5a0 531 error ("inlined_to pointer set for noninline callers");
b0cdf642 532 error_found = true;
533 }
534 }
1a036a3b 535 for (e = node->indirect_calls; e; e = e->next_callee)
536 if (verify_edge_count_and_frequency (e))
537 error_found = true;
b0cdf642 538 if (!node->callers && node->global.inlined_to)
539 {
5cd75817 540 error ("inlined_to pointer is set but no predecessors found");
b0cdf642 541 error_found = true;
542 }
543 if (node->global.inlined_to == node)
544 {
0a81f5a0 545 error ("inlined_to pointer refers to itself");
b0cdf642 546 error_found = true;
547 }
548
0f6439b9 549 if (!cgraph_node (node->decl))
b0cdf642 550 {
0f6439b9 551 error ("node not found in cgraph_hash");
b0cdf642 552 error_found = true;
553 }
a0c938f0 554
ccf4ab6b 555 if (node->clone_of)
556 {
557 struct cgraph_node *n;
558 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
559 if (n == node)
560 break;
561 if (!n)
562 {
563 error ("node has wrong clone_of");
564 error_found = true;
565 }
566 }
567 if (node->clones)
568 {
569 struct cgraph_node *n;
570 for (n = node->clones; n; n = n->next_sibling_clone)
571 if (n->clone_of != node)
572 break;
573 if (n)
574 {
575 error ("node has wrong clone list");
576 error_found = true;
577 }
578 }
579 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
580 {
581 error ("node is in clone list but it is not clone");
582 error_found = true;
583 }
584 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
585 {
586 error ("node has wrong prev_clone pointer");
587 error_found = true;
588 }
589 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
590 {
591 error ("double linked list of clones corrupted");
592 error_found = true;
593 }
c524ac5d 594 if (node->same_comdat_group)
595 {
596 struct cgraph_node *n = node->same_comdat_group;
597
598 if (!DECL_ONE_ONLY (node->decl))
599 {
600 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
601 error_found = true;
602 }
603 if (n == node)
604 {
605 error ("node is alone in a comdat group");
606 error_found = true;
607 }
608 do
609 {
610 if (!n->same_comdat_group)
611 {
612 error ("same_comdat_group is not a circular list");
613 error_found = true;
614 break;
615 }
616 n = n->same_comdat_group;
617 }
618 while (n != node);
619 }
ccf4ab6b 620
621 if (node->analyzed && gimple_has_body_p (node->decl)
75a70cf9 622 && !TREE_ASM_WRITTEN (node->decl)
7bfefa9d 623 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
624 && !flag_wpa)
b0cdf642 625 {
e27482aa 626 if (this_cfun->cfg)
627 {
628 /* The nodes we're interested in are never shared, so walk
629 the tree ignoring duplicates. */
e7c352d1 630 struct pointer_set_t *visited_nodes = pointer_set_create ();
e27482aa 631 /* Reach the trees by walking over the CFG, and note the
632 enclosing basic-blocks in the call edges. */
633 FOR_EACH_BB_FN (this_block, this_cfun)
75a70cf9 634 for (gsi = gsi_start_bb (this_block);
635 !gsi_end_p (gsi);
636 gsi_next (&gsi))
9bfec7c2 637 {
75a70cf9 638 gimple stmt = gsi_stmt (gsi);
799c8711 639 if (is_gimple_call (stmt))
9bfec7c2 640 {
641 struct cgraph_edge *e = cgraph_edge (node, stmt);
799c8711 642 tree decl = gimple_call_fndecl (stmt);
9bfec7c2 643 if (e)
644 {
645 if (e->aux)
646 {
0a81f5a0 647 error ("shared call_stmt:");
75a70cf9 648 debug_gimple_stmt (stmt);
9bfec7c2 649 error_found = true;
650 }
799c8711 651 if (!e->indirect_unknown_callee)
28454517 652 {
799c8711 653 if (e->callee->same_body_alias)
654 {
655 error ("edge points to same body alias:");
656 debug_tree (e->callee->decl);
657 error_found = true;
658 }
e748b31d 659#ifdef ENABLE_CHECKING
660 else if (!e->callee->global.inlined_to
799c8711 661 && decl
e748b31d 662 && cgraph_get_node (decl)
663 && (e->callee->former_clone_of
664 != cgraph_get_node (decl)->decl)
799c8711 665 && !clone_of_p (cgraph_node (decl),
666 e->callee))
667 {
668 error ("edge points to wrong declaration:");
669 debug_tree (e->callee->decl);
670 fprintf (stderr," Instead of:");
671 debug_tree (decl);
672 error_found = true;
673 }
e748b31d 674#endif
28454517 675 }
799c8711 676 else if (decl)
9bfec7c2 677 {
799c8711 678 error ("an indirect edge with unknown callee "
679 "corresponding to a call_stmt with "
680 "a known declaration:");
ee3f5fc0 681 error_found = true;
799c8711 682 debug_gimple_stmt (e->call_stmt);
9bfec7c2 683 }
684 e->aux = (void *)1;
685 }
799c8711 686 else if (decl)
9bfec7c2 687 {
0a81f5a0 688 error ("missing callgraph edge for call stmt:");
75a70cf9 689 debug_gimple_stmt (stmt);
9bfec7c2 690 error_found = true;
691 }
692 }
693 }
e27482aa 694 pointer_set_destroy (visited_nodes);
e27482aa 695 }
696 else
697 /* No CFG available?! */
698 gcc_unreachable ();
699
b0cdf642 700 for (e = node->callees; e; e = e->next_callee)
701 {
799c8711 702 if (!e->aux)
b0cdf642 703 {
0a81f5a0 704 error ("edge %s->%s has no corresponding call_stmt",
abd3e6b5 705 identifier_to_locale (cgraph_node_name (e->caller)),
706 identifier_to_locale (cgraph_node_name (e->callee)));
75a70cf9 707 debug_gimple_stmt (e->call_stmt);
b0cdf642 708 error_found = true;
709 }
710 e->aux = 0;
711 }
799c8711 712 for (e = node->indirect_calls; e; e = e->next_callee)
713 {
714 if (!e->aux)
715 {
716 error ("an indirect edge from %s has no corresponding call_stmt",
717 identifier_to_locale (cgraph_node_name (e->caller)));
718 debug_gimple_stmt (e->call_stmt);
719 error_found = true;
720 }
721 e->aux = 0;
722 }
b0cdf642 723 }
724 if (error_found)
725 {
726 dump_cgraph_node (stderr, node);
0a81f5a0 727 internal_error ("verify_cgraph_node failed");
b0cdf642 728 }
117ef3d7 729 set_cfun (saved_cfun);
b0cdf642 730 timevar_pop (TV_CGRAPH_VERIFY);
731}
732
733/* Verify whole cgraph structure. */
4b987fac 734DEBUG_FUNCTION void
b0cdf642 735verify_cgraph (void)
736{
737 struct cgraph_node *node;
738
852f689e 739 if (seen_error ())
8ec2a798 740 return;
741
b0cdf642 742 for (node = cgraph_nodes; node; node = node->next)
743 verify_cgraph_node (node);
744}
745
56af936e 746/* Output all asm statements we have stored up to be output. */
747
748static void
749cgraph_output_pending_asms (void)
750{
751 struct cgraph_asm_node *can;
752
852f689e 753 if (seen_error ())
56af936e 754 return;
755
756 for (can = cgraph_asm_nodes; can; can = can->next)
757 assemble_asm (can->asm_str);
758 cgraph_asm_nodes = NULL;
759}
760
0785e435 761/* Analyze the function scheduled to be output. */
bfec3452 762static void
0785e435 763cgraph_analyze_function (struct cgraph_node *node)
764{
bfec3452 765 tree save = current_function_decl;
0785e435 766 tree decl = node->decl;
767
ec1e35b2 768 current_function_decl = decl;
e27482aa 769 push_cfun (DECL_STRUCT_FUNCTION (decl));
bfec3452 770
6816d0c4 771 assign_assembler_name_if_neeeded (node->decl);
772
bfec3452 773 /* Make sure to gimplify bodies only once. During analyzing a
774 function we lower it, which will require gimplified nested
775 functions, so we can end up here with an already gimplified
776 body. */
777 if (!gimple_body (decl))
778 gimplify_function_tree (decl);
779 dump_function (TDI_generic, decl);
780
e27482aa 781 cgraph_lower_function (node);
6e8d6e86 782 node->analyzed = true;
0785e435 783
e27482aa 784 pop_cfun ();
bfec3452 785 current_function_decl = save;
0785e435 786}
787
05806473 788/* Look for externally_visible and used attributes and mark cgraph nodes
789 accordingly.
790
791 We cannot mark the nodes at the point the attributes are processed (in
792 handle_*_attribute) because the copy of the declarations available at that
793 point may not be canonical. For example, in:
794
795 void f();
796 void f() __attribute__((used));
797
798 the declaration we see in handle_used_attribute will be the second
799 declaration -- but the front end will subsequently merge that declaration
800 with the original declaration and discard the second declaration.
801
802 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
803
804 void f() {}
805 void f() __attribute__((externally_visible));
806
807 is valid.
808
809 So, we walk the nodes at the end of the translation unit, applying the
810 attributes at that point. */
811
812static void
813process_function_and_variable_attributes (struct cgraph_node *first,
1d416bd7 814 struct varpool_node *first_var)
05806473 815{
816 struct cgraph_node *node;
1d416bd7 817 struct varpool_node *vnode;
05806473 818
819 for (node = cgraph_nodes; node != first; node = node->next)
820 {
821 tree decl = node->decl;
83a23b05 822 if (DECL_PRESERVE_P (decl))
0b49f8f8 823 cgraph_mark_needed_node (node);
05806473 824 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
825 {
ba12ea31 826 if (! TREE_PUBLIC (node->decl))
712d2297 827 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
828 "%<externally_visible%>"
829 " attribute have effect only on public objects");
59dd4830 830 else if (node->local.finalized)
831 cgraph_mark_needed_node (node);
05806473 832 }
833 }
1d416bd7 834 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
05806473 835 {
836 tree decl = vnode->decl;
83a23b05 837 if (DECL_PRESERVE_P (decl))
05806473 838 {
22671757 839 vnode->force_output = true;
05806473 840 if (vnode->finalized)
1d416bd7 841 varpool_mark_needed_node (vnode);
05806473 842 }
843 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
844 {
ba12ea31 845 if (! TREE_PUBLIC (vnode->decl))
712d2297 846 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
847 "%<externally_visible%>"
848 " attribute have effect only on public objects");
59dd4830 849 else if (vnode->finalized)
850 varpool_mark_needed_node (vnode);
05806473 851 }
852 }
853}
854
aeeb194b 855/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
856 each reachable functions) and build cgraph.
857 The function can be called multiple times after inserting new nodes
0d424440 858 into beginning of queue. Just the new part of queue is re-scanned then. */
ae01b312 859
aeeb194b 860static void
861cgraph_analyze_functions (void)
ae01b312 862{
c1dcd13c 863 /* Keep track of already processed nodes when called multiple times for
06b27565 864 intermodule optimization. */
c1dcd13c 865 static struct cgraph_node *first_analyzed;
c17d0de1 866 struct cgraph_node *first_processed = first_analyzed;
1d416bd7 867 static struct varpool_node *first_analyzed_var;
aeeb194b 868 struct cgraph_node *node, *next;
ae01b312 869
f1c35659 870 bitmap_obstack_initialize (NULL);
c17d0de1 871 process_function_and_variable_attributes (first_processed,
872 first_analyzed_var);
873 first_processed = cgraph_nodes;
1d416bd7 874 first_analyzed_var = varpool_nodes;
875 varpool_analyze_pending_decls ();
f79b6507 876 if (cgraph_dump_file)
ae01b312 877 {
e4200070 878 fprintf (cgraph_dump_file, "Initial entry points:");
c1dcd13c 879 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 880 if (node->needed)
f79b6507 881 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
882 fprintf (cgraph_dump_file, "\n");
ae01b312 883 }
aeeb194b 884 cgraph_process_new_functions ();
ae01b312 885
e6d2b2d8 886 /* Propagate reachability flag and lower representation of all reachable
887 functions. In the future, lowering will introduce new functions and
888 new entry points on the way (by template instantiation and virtual
889 method table generation for instance). */
3d7bfc56 890 while (cgraph_nodes_queue)
ae01b312 891 {
0785e435 892 struct cgraph_edge *edge;
3d7bfc56 893 tree decl = cgraph_nodes_queue->decl;
894
895 node = cgraph_nodes_queue;
d87976fb 896 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 897 node->next_needed = NULL;
ae01b312 898
638531ad 899 /* ??? It is possible to create extern inline function and later using
bbd5cba2 900 weak alias attribute to kill its body. See
638531ad 901 gcc.c-torture/compile/20011119-1.c */
75a70cf9 902 if (!DECL_STRUCT_FUNCTION (decl))
9b8fb23a 903 {
904 cgraph_reset_node (node);
905 continue;
906 }
638531ad 907
7bfefa9d 908 if (!node->analyzed)
909 cgraph_analyze_function (node);
2c0b522d 910
ae01b312 911 for (edge = node->callees; edge; edge = edge->next_callee)
0785e435 912 if (!edge->callee->reachable)
2c0b522d 913 cgraph_mark_reachable_node (edge->callee);
914
61c2c7b1 915 if (node->same_comdat_group)
916 {
917 for (next = node->same_comdat_group;
918 next != node;
919 next = next->same_comdat_group)
920 cgraph_mark_reachable_node (next);
921 }
922
d544ceff 923 /* If decl is a clone of an abstract function, mark that abstract
924 function so that we don't release its body. The DECL_INITIAL() of that
925 abstract function declaration will be later needed to output debug info. */
926 if (DECL_ABSTRACT_ORIGIN (decl))
927 {
928 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
929 origin_node->abstract_and_needed = true;
930 }
931
c17d0de1 932 /* We finalize local static variables during constructing callgraph
933 edges. Process their attributes too. */
934 process_function_and_variable_attributes (first_processed,
935 first_analyzed_var);
936 first_processed = cgraph_nodes;
1d416bd7 937 first_analyzed_var = varpool_nodes;
938 varpool_analyze_pending_decls ();
aeeb194b 939 cgraph_process_new_functions ();
ae01b312 940 }
2c0b522d 941
aa5e06c7 942 /* Collect entry points to the unit. */
f79b6507 943 if (cgraph_dump_file)
3d7bfc56 944 {
e4200070 945 fprintf (cgraph_dump_file, "Unit entry points:");
c1dcd13c 946 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 947 if (node->needed)
f79b6507 948 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
e4200070 949 fprintf (cgraph_dump_file, "\n\nInitial ");
0785e435 950 dump_cgraph (cgraph_dump_file);
3d7bfc56 951 }
e6d2b2d8 952
f79b6507 953 if (cgraph_dump_file)
954 fprintf (cgraph_dump_file, "\nReclaiming functions:");
ae01b312 955
f4ec5ce1 956 for (node = cgraph_nodes; node != first_analyzed; node = next)
ae01b312 957 {
958 tree decl = node->decl;
f4ec5ce1 959 next = node->next;
ae01b312 960
1a1a827a 961 if (node->local.finalized && !gimple_has_body_p (decl))
a0c938f0 962 cgraph_reset_node (node);
9b8fb23a 963
1a1a827a 964 if (!node->reachable && gimple_has_body_p (decl))
ae01b312 965 {
f79b6507 966 if (cgraph_dump_file)
967 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
b0cdf642 968 cgraph_remove_node (node);
9b8fb23a 969 continue;
ae01b312 970 }
bc5cab3b 971 else
972 node->next_needed = NULL;
1a1a827a 973 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
9b8fb23a 974 gcc_assert (node->analyzed == node->local.finalized);
ae01b312 975 }
f79b6507 976 if (cgraph_dump_file)
e4200070 977 {
978 fprintf (cgraph_dump_file, "\n\nReclaimed ");
979 dump_cgraph (cgraph_dump_file);
980 }
f1c35659 981 bitmap_obstack_release (NULL);
c1dcd13c 982 first_analyzed = cgraph_nodes;
ae01b312 983 ggc_collect ();
aeeb194b 984}
985
8f69fd82 986
aeeb194b 987/* Analyze the whole compilation unit once it is parsed completely. */
988
989void
990cgraph_finalize_compilation_unit (void)
991{
9929334e 992 timevar_push (TV_CGRAPH);
993
bfec3452 994 /* Do not skip analyzing the functions if there were errors, we
995 miss diagnostics for following functions otherwise. */
aeeb194b 996
8f69fd82 997 /* Emit size functions we didn't inline. */
4189e677 998 finalize_size_functions ();
8f69fd82 999
9929334e 1000 /* Mark alias targets necessary and emit diagnostics. */
1001 finish_aliases_1 ();
1002
aeeb194b 1003 if (!quiet_flag)
1004 {
1005 fprintf (stderr, "\nAnalyzing compilation unit\n");
1006 fflush (stderr);
1007 }
1008
9929334e 1009 /* Gimplify and lower all functions, compute reachability and
1010 remove unreachable nodes. */
1011 cgraph_analyze_functions ();
1012
8f69fd82 1013 /* Mark alias targets necessary and emit diagnostics. */
1014 finish_aliases_1 ();
1015
9929334e 1016 /* Gimplify and lower thunks. */
aeeb194b 1017 cgraph_analyze_functions ();
bfec3452 1018
9929334e 1019 /* Finally drive the pass manager. */
bfec3452 1020 cgraph_optimize ();
9929334e 1021
1022 timevar_pop (TV_CGRAPH);
ae01b312 1023}
9ed5b1f5 1024
1025
ae01b312 1026/* Figure out what functions we want to assemble. */
1027
1028static void
d9d9733a 1029cgraph_mark_functions_to_output (void)
ae01b312 1030{
1031 struct cgraph_node *node;
61c2c7b1 1032#ifdef ENABLE_CHECKING
1033 bool check_same_comdat_groups = false;
1034
1035 for (node = cgraph_nodes; node; node = node->next)
1036 gcc_assert (!node->process);
1037#endif
ae01b312 1038
ae01b312 1039 for (node = cgraph_nodes; node; node = node->next)
1040 {
1041 tree decl = node->decl;
d7c6d889 1042 struct cgraph_edge *e;
a0c938f0 1043
61c2c7b1 1044 gcc_assert (!node->process || node->same_comdat_group);
1045 if (node->process)
1046 continue;
d7c6d889 1047
1048 for (e = node->callers; e; e = e->next_caller)
611e5405 1049 if (e->inline_failed)
d7c6d889 1050 break;
ae01b312 1051
e6d2b2d8 1052 /* We need to output all local functions that are used and not
1053 always inlined, as well as those that are reachable from
1054 outside the current compilation unit. */
1a1a827a 1055 if (node->analyzed
b0cdf642 1056 && !node->global.inlined_to
1e3aebec 1057 && (!cgraph_only_called_directly_p (node)
d7c6d889 1058 || (e && node->reachable))
4ee9c684 1059 && !TREE_ASM_WRITTEN (decl)
ae01b312 1060 && !DECL_EXTERNAL (decl))
61c2c7b1 1061 {
1062 node->process = 1;
1063 if (node->same_comdat_group)
1064 {
1065 struct cgraph_node *next;
1066 for (next = node->same_comdat_group;
1067 next != node;
1068 next = next->same_comdat_group)
1069 next->process = 1;
1070 }
1071 }
1072 else if (node->same_comdat_group)
1073 {
1074#ifdef ENABLE_CHECKING
1075 check_same_comdat_groups = true;
1076#endif
1077 }
cc636d56 1078 else
9cee7c3f 1079 {
1080 /* We should've reclaimed all functions that are not needed. */
1081#ifdef ENABLE_CHECKING
75a70cf9 1082 if (!node->global.inlined_to
1a1a827a 1083 && gimple_has_body_p (decl)
08843223 1084 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1085 are inside partition, we can end up not removing the body since we no longer
1086 have analyzed node pointing to it. */
1087 && !node->in_other_partition
9cee7c3f 1088 && !DECL_EXTERNAL (decl))
1089 {
1090 dump_cgraph_node (stderr, node);
1091 internal_error ("failed to reclaim unneeded function");
1092 }
1093#endif
75a70cf9 1094 gcc_assert (node->global.inlined_to
1a1a827a 1095 || !gimple_has_body_p (decl)
08843223 1096 || node->in_other_partition
9cee7c3f 1097 || DECL_EXTERNAL (decl));
1098
1099 }
a0c938f0 1100
961e3b13 1101 }
61c2c7b1 1102#ifdef ENABLE_CHECKING
1103 if (check_same_comdat_groups)
1104 for (node = cgraph_nodes; node; node = node->next)
1105 if (node->same_comdat_group && !node->process)
1106 {
1107 tree decl = node->decl;
1108 if (!node->global.inlined_to
1109 && gimple_has_body_p (decl)
08843223 1110 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1111 are inside partition, we can end up not removing the body since we no longer
1112 have analyzed node pointing to it. */
1113 && !node->in_other_partition
61c2c7b1 1114 && !DECL_EXTERNAL (decl))
1115 {
1116 dump_cgraph_node (stderr, node);
1117 internal_error ("failed to reclaim unneeded function");
1118 }
1119 }
1120#endif
961e3b13 1121}
1122
28454517 1123/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1124 in lowered gimple form.
1125
1126 Set current_function_decl and cfun to newly constructed empty function body.
1127 return basic block in the function body. */
1128
1129static basic_block
1130init_lowered_empty_function (tree decl)
1131{
1132 basic_block bb;
1133
1134 current_function_decl = decl;
1135 allocate_struct_function (decl, false);
1136 gimple_register_cfg_hooks ();
1137 init_empty_tree_cfg ();
1138 init_tree_ssa (cfun);
1139 init_ssa_operands ();
1140 cfun->gimple_df->in_ssa_p = true;
1141 DECL_INITIAL (decl) = make_node (BLOCK);
1142
1143 DECL_SAVED_TREE (decl) = error_mark_node;
1144 cfun->curr_properties |=
1145 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1146 PROP_ssa);
1147
1148 /* Create BB for body of the function and connect it properly. */
1149 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1150 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1151 make_edge (bb, EXIT_BLOCK_PTR, 0);
1152
1153 return bb;
1154}
1155
1156/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1157 offset indicated by VIRTUAL_OFFSET, if that is
1158 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1159 zero for a result adjusting thunk. */
1160
1161static tree
1162thunk_adjust (gimple_stmt_iterator * bsi,
1163 tree ptr, bool this_adjusting,
1164 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1165{
1166 gimple stmt;
1167 tree ret;
1168
55d6cb23 1169 if (this_adjusting
1170 && fixed_offset != 0)
28454517 1171 {
1172 stmt = gimple_build_assign (ptr,
1173 fold_build2_loc (input_location,
1174 POINTER_PLUS_EXPR,
1175 TREE_TYPE (ptr), ptr,
1176 size_int (fixed_offset)));
1177 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1178 }
1179
1180 /* If there's a virtual offset, look up that value in the vtable and
1181 adjust the pointer again. */
1182 if (virtual_offset)
1183 {
1184 tree vtabletmp;
1185 tree vtabletmp2;
1186 tree vtabletmp3;
1187 tree offsettmp;
1188
1189 if (!vtable_entry_type)
1190 {
1191 tree vfunc_type = make_node (FUNCTION_TYPE);
1192 TREE_TYPE (vfunc_type) = integer_type_node;
1193 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1194 layout_type (vfunc_type);
1195
1196 vtable_entry_type = build_pointer_type (vfunc_type);
1197 }
1198
1199 vtabletmp =
1200 create_tmp_var (build_pointer_type
1201 (build_pointer_type (vtable_entry_type)), "vptr");
1202
1203 /* The vptr is always at offset zero in the object. */
1204 stmt = gimple_build_assign (vtabletmp,
1205 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1206 ptr));
1207 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1208 mark_symbols_for_renaming (stmt);
1209 find_referenced_vars_in (stmt);
1210
1211 /* Form the vtable address. */
1212 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1213 "vtableaddr");
1214 stmt = gimple_build_assign (vtabletmp2,
182cf5a9 1215 build_simple_mem_ref (vtabletmp));
28454517 1216 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1217 mark_symbols_for_renaming (stmt);
1218 find_referenced_vars_in (stmt);
1219
1220 /* Find the entry with the vcall offset. */
1221 stmt = gimple_build_assign (vtabletmp2,
1222 fold_build2_loc (input_location,
1223 POINTER_PLUS_EXPR,
1224 TREE_TYPE (vtabletmp2),
1225 vtabletmp2,
1226 fold_convert (sizetype,
1227 virtual_offset)));
1228 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1229
1230 /* Get the offset itself. */
1231 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1232 "vcalloffset");
1233 stmt = gimple_build_assign (vtabletmp3,
182cf5a9 1234 build_simple_mem_ref (vtabletmp2));
28454517 1235 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1236 mark_symbols_for_renaming (stmt);
1237 find_referenced_vars_in (stmt);
1238
1239 /* Cast to sizetype. */
1240 offsettmp = create_tmp_var (sizetype, "offset");
1241 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1242 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1243 mark_symbols_for_renaming (stmt);
1244 find_referenced_vars_in (stmt);
1245
1246 /* Adjust the `this' pointer. */
1247 ptr = fold_build2_loc (input_location,
1248 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1249 offsettmp);
1250 }
1251
55d6cb23 1252 if (!this_adjusting
1253 && fixed_offset != 0)
28454517 1254 /* Adjust the pointer by the constant. */
1255 {
1256 tree ptrtmp;
1257
1258 if (TREE_CODE (ptr) == VAR_DECL)
1259 ptrtmp = ptr;
1260 else
1261 {
1262 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1263 stmt = gimple_build_assign (ptrtmp, ptr);
1264 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1265 mark_symbols_for_renaming (stmt);
1266 find_referenced_vars_in (stmt);
1267 }
1268 ptr = fold_build2_loc (input_location,
1269 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1270 size_int (fixed_offset));
1271 }
1272
1273 /* Emit the statement and gimplify the adjustment expression. */
1274 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1275 stmt = gimple_build_assign (ret, ptr);
1276 mark_symbols_for_renaming (stmt);
1277 find_referenced_vars_in (stmt);
1278 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1279
1280 return ret;
1281}
1282
1283/* Produce assembler for thunk NODE. */
1284
1285static void
1286assemble_thunk (struct cgraph_node *node)
1287{
1288 bool this_adjusting = node->thunk.this_adjusting;
1289 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1290 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1291 tree virtual_offset = NULL;
1292 tree alias = node->thunk.alias;
1293 tree thunk_fndecl = node->decl;
1294 tree a = DECL_ARGUMENTS (thunk_fndecl);
1295
1296 current_function_decl = thunk_fndecl;
1297
1298 if (this_adjusting
1299 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1300 virtual_value, alias))
1301 {
1302 const char *fnname;
1303 tree fn_block;
1304
1305 DECL_RESULT (thunk_fndecl)
1306 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1307 RESULT_DECL, 0, integer_type_node);
22ea3b47 1308 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
28454517 1309
1310 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1311 create one. */
1312 fn_block = make_node (BLOCK);
1313 BLOCK_VARS (fn_block) = a;
1314 DECL_INITIAL (thunk_fndecl) = fn_block;
1315 init_function_start (thunk_fndecl);
1316 cfun->is_thunk = 1;
1317 assemble_start_function (thunk_fndecl, fnname);
1318
1319 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1320 fixed_offset, virtual_value, alias);
1321
1322 assemble_end_function (thunk_fndecl, fnname);
1323 init_insn_lengths ();
1324 free_after_compilation (cfun);
1325 set_cfun (NULL);
1326 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1327 }
1328 else
1329 {
1330 tree restype;
1331 basic_block bb, then_bb, else_bb, return_bb;
1332 gimple_stmt_iterator bsi;
1333 int nargs = 0;
1334 tree arg;
1335 int i;
1336 tree resdecl;
1337 tree restmp = NULL;
1338 VEC(tree, heap) *vargs;
1339
1340 gimple call;
1341 gimple ret;
1342
1343 DECL_IGNORED_P (thunk_fndecl) = 1;
1344 bitmap_obstack_initialize (NULL);
1345
1346 if (node->thunk.virtual_offset_p)
1347 virtual_offset = size_int (virtual_value);
1348
1349 /* Build the return declaration for the function. */
1350 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1351 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1352 {
1353 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1354 DECL_ARTIFICIAL (resdecl) = 1;
1355 DECL_IGNORED_P (resdecl) = 1;
1356 DECL_RESULT (thunk_fndecl) = resdecl;
1357 }
1358 else
1359 resdecl = DECL_RESULT (thunk_fndecl);
1360
1361 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1362
1363 bsi = gsi_start_bb (bb);
1364
1365 /* Build call to the function being thunked. */
1366 if (!VOID_TYPE_P (restype))
1367 {
1368 if (!is_gimple_reg_type (restype))
1369 {
1370 restmp = resdecl;
2ab2ce89 1371 add_local_decl (cfun, restmp);
28454517 1372 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1373 }
1374 else
1375 restmp = create_tmp_var_raw (restype, "retval");
1376 }
1377
1767a056 1378 for (arg = a; arg; arg = DECL_CHAIN (arg))
28454517 1379 nargs++;
1380 vargs = VEC_alloc (tree, heap, nargs);
1381 if (this_adjusting)
1382 VEC_quick_push (tree, vargs,
1383 thunk_adjust (&bsi,
1384 a, 1, fixed_offset,
1385 virtual_offset));
1386 else
1387 VEC_quick_push (tree, vargs, a);
1767a056 1388 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
28454517 1389 VEC_quick_push (tree, vargs, arg);
1390 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1391 VEC_free (tree, heap, vargs);
1392 gimple_call_set_cannot_inline (call, true);
1393 gimple_call_set_from_thunk (call, true);
1394 if (restmp)
1395 gimple_call_set_lhs (call, restmp);
1396 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1397 mark_symbols_for_renaming (call);
1398 find_referenced_vars_in (call);
1399 update_stmt (call);
1400
1401 if (restmp && !this_adjusting)
1402 {
57ab8ec3 1403 tree true_label = NULL_TREE;
28454517 1404
1405 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1406 {
1407 gimple stmt;
1408 /* If the return type is a pointer, we need to
1409 protect against NULL. We know there will be an
1410 adjustment, because that's why we're emitting a
1411 thunk. */
1412 then_bb = create_basic_block (NULL, (void *) 0, bb);
1413 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1414 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1415 remove_edge (single_succ_edge (bb));
1416 true_label = gimple_block_label (then_bb);
28454517 1417 stmt = gimple_build_cond (NE_EXPR, restmp,
1418 fold_convert (TREE_TYPE (restmp),
1419 integer_zero_node),
1420 NULL_TREE, NULL_TREE);
1421 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1422 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1423 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1424 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1425 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1426 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1427 bsi = gsi_last_bb (then_bb);
1428 }
1429
1430 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1431 fixed_offset, virtual_offset);
1432 if (true_label)
1433 {
1434 gimple stmt;
1435 bsi = gsi_last_bb (else_bb);
1436 stmt = gimple_build_assign (restmp, fold_convert (TREE_TYPE (restmp),
1437 integer_zero_node));
1438 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1439 bsi = gsi_last_bb (return_bb);
1440 }
1441 }
1442 else
1443 gimple_call_set_tail (call, true);
1444
1445 /* Build return value. */
1446 ret = gimple_build_return (restmp);
1447 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1448
1449 delete_unreachable_blocks ();
1450 update_ssa (TODO_update_ssa);
1451
1452 cgraph_remove_same_body_alias (node);
1453 /* Since we want to emit the thunk, we explicitly mark its name as
1454 referenced. */
28454517 1455 cgraph_add_new_function (thunk_fndecl, true);
1456 bitmap_obstack_release (NULL);
1457 }
1458 current_function_decl = NULL;
1459}
1460
ae01b312 1461/* Expand function specified by NODE. */
e6d2b2d8 1462
ae01b312 1463static void
d9d9733a 1464cgraph_expand_function (struct cgraph_node *node)
ae01b312 1465{
1466 tree decl = node->decl;
1467
b0cdf642 1468 /* We ought to not compile any inline clones. */
cc636d56 1469 gcc_assert (!node->global.inlined_to);
b0cdf642 1470
6329636b 1471 announce_function (decl);
09fc9532 1472 node->process = 0;
961e3b13 1473
e7c352d1 1474 gcc_assert (node->lowered);
f8deefc1 1475
794da2bb 1476 /* Generate RTL for the body of DECL. */
84e10000 1477 tree_rest_of_compilation (decl);
961e3b13 1478
4ee9c684 1479 /* Make sure that BE didn't give up on compiling. */
c04e3894 1480 gcc_assert (TREE_ASM_WRITTEN (decl));
ae01b312 1481 current_function_decl = NULL;
ed772161 1482 if (node->same_body)
1483 {
28454517 1484 struct cgraph_node *alias, *next;
ed772161 1485 bool saved_alias = node->alias;
28454517 1486 for (alias = node->same_body;
1487 alias && alias->next; alias = alias->next)
1488 ;
1489 /* Walk aliases in the order they were created; it is possible that
1490 thunks reffers to the aliases made earlier. */
1491 for (; alias; alias = next)
1492 {
1493 next = alias->previous;
1494 if (!alias->thunk.thunk_p)
1495 assemble_alias (alias->decl,
1496 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1497 else
1498 assemble_thunk (alias);
1499 }
ed772161 1500 node->alias = saved_alias;
1501 }
1a1a827a 1502 gcc_assert (!cgraph_preserve_function_body_p (decl));
1503 cgraph_release_function_body (node);
1504 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1505 points to the dead function body. */
1506 cgraph_node_remove_callees (node);
e1be32b8 1507
1508 cgraph_function_flags_ready = true;
ae01b312 1509}
1510
b0cdf642 1511/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
d7c6d889 1512
1513bool
326a9581 1514cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
d7c6d889 1515{
b0cdf642 1516 *reason = e->inline_failed;
1517 return !e->inline_failed;
d7c6d889 1518}
b0cdf642 1519
acc70efa 1520
acc70efa 1521
d9d9733a 1522/* Expand all functions that must be output.
1523
d7c6d889 1524 Attempt to topologically sort the nodes so function is output when
1525 all called functions are already assembled to allow data to be
91c82c20 1526 propagated across the callgraph. Use a stack to get smaller distance
3927afe0 1527 between a function and its callees (later we may choose to use a more
d7c6d889 1528 sophisticated algorithm for function reordering; we will likely want
1529 to use subsections to make the output functions appear in top-down
1530 order). */
1531
1532static void
a6868229 1533cgraph_expand_all_functions (void)
d7c6d889 1534{
1535 struct cgraph_node *node;
4c36ffe6 1536 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
c04e3894 1537 int order_pos, new_order_pos = 0;
d7c6d889 1538 int i;
1539
d7c6d889 1540 order_pos = cgraph_postorder (order);
cc636d56 1541 gcc_assert (order_pos == cgraph_n_nodes);
d7c6d889 1542
7bd28bba 1543 /* Garbage collector may remove inline clones we eliminate during
b0cdf642 1544 optimization. So we must be sure to not reference them. */
1545 for (i = 0; i < order_pos; i++)
09fc9532 1546 if (order[i]->process)
b0cdf642 1547 order[new_order_pos++] = order[i];
1548
1549 for (i = new_order_pos - 1; i >= 0; i--)
d7c6d889 1550 {
1551 node = order[i];
09fc9532 1552 if (node->process)
d7c6d889 1553 {
cc636d56 1554 gcc_assert (node->reachable);
09fc9532 1555 node->process = 0;
d7c6d889 1556 cgraph_expand_function (node);
1557 }
1558 }
523c1122 1559 cgraph_process_new_functions ();
773c5ba7 1560
d7c6d889 1561 free (order);
773c5ba7 1562
d7c6d889 1563}
1564
56af936e 1565/* This is used to sort the node types by the cgraph order number. */
1566
0b09525f 1567enum cgraph_order_sort_kind
1568{
1569 ORDER_UNDEFINED = 0,
1570 ORDER_FUNCTION,
1571 ORDER_VAR,
1572 ORDER_ASM
1573};
1574
56af936e 1575struct cgraph_order_sort
1576{
0b09525f 1577 enum cgraph_order_sort_kind kind;
56af936e 1578 union
1579 {
1580 struct cgraph_node *f;
1d416bd7 1581 struct varpool_node *v;
56af936e 1582 struct cgraph_asm_node *a;
1583 } u;
1584};
1585
1586/* Output all functions, variables, and asm statements in the order
1587 according to their order fields, which is the order in which they
1588 appeared in the file. This implements -fno-toplevel-reorder. In
1589 this mode we may output functions and variables which don't really
1590 need to be output. */
1591
1592static void
1593cgraph_output_in_order (void)
1594{
1595 int max;
56af936e 1596 struct cgraph_order_sort *nodes;
1597 int i;
1598 struct cgraph_node *pf;
1d416bd7 1599 struct varpool_node *pv;
56af936e 1600 struct cgraph_asm_node *pa;
1601
1602 max = cgraph_order;
3e1cde87 1603 nodes = XCNEWVEC (struct cgraph_order_sort, max);
56af936e 1604
1d416bd7 1605 varpool_analyze_pending_decls ();
56af936e 1606
1607 for (pf = cgraph_nodes; pf; pf = pf->next)
1608 {
09fc9532 1609 if (pf->process)
56af936e 1610 {
1611 i = pf->order;
1612 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1613 nodes[i].kind = ORDER_FUNCTION;
1614 nodes[i].u.f = pf;
1615 }
1616 }
1617
1d416bd7 1618 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
56af936e 1619 {
1620 i = pv->order;
1621 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1622 nodes[i].kind = ORDER_VAR;
1623 nodes[i].u.v = pv;
1624 }
1625
1626 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1627 {
1628 i = pa->order;
1629 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1630 nodes[i].kind = ORDER_ASM;
1631 nodes[i].u.a = pa;
1632 }
56af936e 1633
304e5318 1634 /* In toplevel reorder mode we output all statics; mark them as needed. */
1635 for (i = 0; i < max; ++i)
1636 {
1637 if (nodes[i].kind == ORDER_VAR)
1638 {
1639 varpool_mark_needed_node (nodes[i].u.v);
1640 }
1641 }
1642 varpool_empty_needed_queue ();
1643
56af936e 1644 for (i = 0; i < max; ++i)
1645 {
1646 switch (nodes[i].kind)
1647 {
1648 case ORDER_FUNCTION:
09fc9532 1649 nodes[i].u.f->process = 0;
56af936e 1650 cgraph_expand_function (nodes[i].u.f);
1651 break;
1652
1653 case ORDER_VAR:
1d416bd7 1654 varpool_assemble_decl (nodes[i].u.v);
56af936e 1655 break;
1656
1657 case ORDER_ASM:
1658 assemble_asm (nodes[i].u.a->asm_str);
1659 break;
1660
1661 case ORDER_UNDEFINED:
1662 break;
1663
1664 default:
1665 gcc_unreachable ();
1666 }
1667 }
4b4ea2db 1668
1669 cgraph_asm_nodes = NULL;
3e1cde87 1670 free (nodes);
56af936e 1671}
1672
b0cdf642 1673/* Return true when function body of DECL still needs to be kept around
1674 for later re-use. */
1675bool
1676cgraph_preserve_function_body_p (tree decl)
1677{
1678 struct cgraph_node *node;
8d8c4c8d 1679
1680 gcc_assert (cgraph_global_info_ready);
b0cdf642 1681 /* Look if there is any clone around. */
ccf4ab6b 1682 node = cgraph_node (decl);
1683 if (node->clones)
1684 return true;
b0cdf642 1685 return false;
1686}
1687
77fce4cd 1688static void
1689ipa_passes (void)
1690{
87d4aa85 1691 set_cfun (NULL);
4b14adf9 1692 current_function_decl = NULL;
75a70cf9 1693 gimple_register_cfg_hooks ();
77fce4cd 1694 bitmap_obstack_initialize (NULL);
59dd4830 1695
c9036234 1696 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1697
59dd4830 1698 if (!in_lto_p)
1699 execute_ipa_pass_list (all_small_ipa_passes);
9ed5b1f5 1700
7bfefa9d 1701 /* If pass_all_early_optimizations was not scheduled, the state of
1702 the cgraph will not be properly updated. Update it now. */
1703 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1704 cgraph_state = CGRAPH_STATE_IPA_SSA;
9ed5b1f5 1705
7bfefa9d 1706 if (!in_lto_p)
1707 {
1708 /* Generate coverage variables and constructors. */
1709 coverage_finish ();
1710
1711 /* Process new functions added. */
1712 set_cfun (NULL);
1713 current_function_decl = NULL;
1714 cgraph_process_new_functions ();
7bfefa9d 1715
c9036234 1716 execute_ipa_summary_passes
1717 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
8867b500 1718 }
23433d72 1719
1720 /* Some targets need to handle LTO assembler output specially. */
1721 if (flag_generate_lto)
1722 targetm.asm_out.lto_start ();
1723
7bfefa9d 1724 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1725
1726 if (!in_lto_p)
1727 ipa_write_summaries ();
1728
23433d72 1729 if (flag_generate_lto)
1730 targetm.asm_out.lto_end ();
1731
8867b500 1732 if (!flag_ltrans)
1733 execute_ipa_pass_list (all_regular_ipa_passes);
c9036234 1734 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
9ed5b1f5 1735
77fce4cd 1736 bitmap_obstack_release (NULL);
1737}
1738
34e5cced 1739
ae01b312 1740/* Perform simple optimizations based on callgraph. */
1741
7bfefa9d 1742void
d9d9733a 1743cgraph_optimize (void)
ae01b312 1744{
852f689e 1745 if (seen_error ())
cb2b5570 1746 return;
1747
b0cdf642 1748#ifdef ENABLE_CHECKING
1749 verify_cgraph ();
1750#endif
a861fe52 1751
c1dcd13c 1752 /* Frontend may output common variables after the unit has been finalized.
1753 It is safe to deal with them here as they are always zero initialized. */
1d416bd7 1754 varpool_analyze_pending_decls ();
e9f08e82 1755
f79b6507 1756 timevar_push (TV_CGRAPHOPT);
51949610 1757 if (pre_ipa_mem_report)
1758 {
1759 fprintf (stderr, "Memory consumption before IPA\n");
1760 dump_memory_report (false);
1761 }
d7c6d889 1762 if (!quiet_flag)
cd6bca02 1763 fprintf (stderr, "Performing interprocedural optimizations\n");
523c1122 1764 cgraph_state = CGRAPH_STATE_IPA;
c04e3894 1765
be4d0974 1766 /* Don't run the IPA passes if there was any error or sorry messages. */
852f689e 1767 if (!seen_error ())
be4d0974 1768 ipa_passes ();
1769
34e5cced 1770 /* Do nothing else if any IPA pass found errors. */
852f689e 1771 if (seen_error ())
021c1c18 1772 {
1773 timevar_pop (TV_CGRAPHOPT);
1774 return;
1775 }
34e5cced 1776
e1be32b8 1777 /* This pass remove bodies of extern inline functions we never inlined.
1778 Do this later so other IPA passes see what is really going on. */
1779 cgraph_remove_unreachable_nodes (false, dump_file);
80a85d8a 1780 cgraph_global_info_ready = true;
f79b6507 1781 if (cgraph_dump_file)
1782 {
e4200070 1783 fprintf (cgraph_dump_file, "Optimized ");
f79b6507 1784 dump_cgraph (cgraph_dump_file);
c1dcd13c 1785 dump_varpool (cgraph_dump_file);
f79b6507 1786 }
51949610 1787 if (post_ipa_mem_report)
1788 {
defa2fa6 1789 fprintf (stderr, "Memory consumption after IPA\n");
51949610 1790 dump_memory_report (false);
1791 }
f79b6507 1792 timevar_pop (TV_CGRAPHOPT);
ae01b312 1793
d7c6d889 1794 /* Output everything. */
47306a5d 1795 (*debug_hooks->assembly_start) ();
e4200070 1796 if (!quiet_flag)
1797 fprintf (stderr, "Assembling functions:\n");
b0cdf642 1798#ifdef ENABLE_CHECKING
1799 verify_cgraph ();
1800#endif
56af936e 1801
ccf4ab6b 1802 cgraph_materialize_all_clones ();
acc70efa 1803 cgraph_mark_functions_to_output ();
c1dcd13c 1804
523c1122 1805 cgraph_state = CGRAPH_STATE_EXPANSION;
56af936e 1806 if (!flag_toplevel_reorder)
1807 cgraph_output_in_order ();
1808 else
1809 {
1810 cgraph_output_pending_asms ();
1811
1812 cgraph_expand_all_functions ();
1d416bd7 1813 varpool_remove_unreferenced_decls ();
56af936e 1814
1d416bd7 1815 varpool_assemble_pending_decls ();
56af936e 1816 }
523c1122 1817 cgraph_process_new_functions ();
1818 cgraph_state = CGRAPH_STATE_FINISHED;
c1dcd13c 1819
f79b6507 1820 if (cgraph_dump_file)
1821 {
e4200070 1822 fprintf (cgraph_dump_file, "\nFinal ");
f79b6507 1823 dump_cgraph (cgraph_dump_file);
1824 }
b0cdf642 1825#ifdef ENABLE_CHECKING
1826 verify_cgraph ();
4ee9c684 1827 /* Double check that all inline clones are gone and that all
1828 function bodies have been released from memory. */
852f689e 1829 if (!seen_error ())
4ee9c684 1830 {
1831 struct cgraph_node *node;
1832 bool error_found = false;
1833
1834 for (node = cgraph_nodes; node; node = node->next)
1835 if (node->analyzed
1836 && (node->global.inlined_to
1a1a827a 1837 || gimple_has_body_p (node->decl)))
4ee9c684 1838 {
1839 error_found = true;
1840 dump_cgraph_node (stderr, node);
a0c938f0 1841 }
4ee9c684 1842 if (error_found)
c04e3894 1843 internal_error ("nodes with unreleased memory found");
4ee9c684 1844 }
b0cdf642 1845#endif
ae01b312 1846}
34e5cced 1847
121f3051 1848void
1849init_cgraph (void)
1850{
01ec0a6c 1851 if (!cgraph_dump_file)
1852 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
121f3051 1853}
b5d36404 1854
a0c938f0 1855/* The edges representing the callers of the NEW_VERSION node were
b5d36404 1856 fixed by cgraph_function_versioning (), now the call_expr in their
1857 respective tree code should be updated to call the NEW_VERSION. */
1858
1859static void
1860update_call_expr (struct cgraph_node *new_version)
1861{
1862 struct cgraph_edge *e;
1863
1864 gcc_assert (new_version);
75a70cf9 1865
1866 /* Update the call expr on the edges to call the new version. */
b5d36404 1867 for (e = new_version->callers; e; e = e->next_caller)
e03a95e7 1868 {
1869 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
1870 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
e38def9c 1871 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
e03a95e7 1872 }
b5d36404 1873}
1874
1875
1876/* Create a new cgraph node which is the new version of
1877 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1878 edges which should be redirected to point to
1879 NEW_VERSION. ALL the callees edges of OLD_VERSION
1880 are cloned to the new version node. Return the new
b06ab5fa 1881 version node.
1882
1883 If non-NULL BLOCK_TO_COPY determine what basic blocks
1884 was copied to prevent duplications of calls that are dead
1885 in the clone. */
b5d36404 1886
1887static struct cgraph_node *
1888cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
4460a647 1889 tree new_decl,
b06ab5fa 1890 VEC(cgraph_edge_p,heap) *redirect_callers,
1891 bitmap bbs_to_copy)
1892 {
b5d36404 1893 struct cgraph_node *new_version;
32936803 1894 struct cgraph_edge *e;
b5d36404 1895 unsigned i;
1896
1897 gcc_assert (old_version);
a0c938f0 1898
b5d36404 1899 new_version = cgraph_node (new_decl);
1900
1901 new_version->analyzed = true;
1902 new_version->local = old_version->local;
a70a5e2c 1903 new_version->local.externally_visible = false;
1904 new_version->local.local = true;
1905 new_version->local.vtable_method = false;
b5d36404 1906 new_version->global = old_version->global;
a93f1c3b 1907 new_version->rtl = old_version->rtl;
b5d36404 1908 new_version->reachable = true;
1909 new_version->count = old_version->count;
1910
a70a5e2c 1911 for (e = old_version->callees; e; e=e->next_callee)
b06ab5fa 1912 if (!bbs_to_copy
1913 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
1914 cgraph_clone_edge (e, new_version, e->call_stmt,
1915 e->lto_stmt_uid, REG_BR_PROB_BASE,
1916 CGRAPH_FREQ_BASE,
1917 e->loop_nest, true);
a70a5e2c 1918 for (e = old_version->indirect_calls; e; e=e->next_callee)
b06ab5fa 1919 if (!bbs_to_copy
1920 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
1921 cgraph_clone_edge (e, new_version, e->call_stmt,
1922 e->lto_stmt_uid, REG_BR_PROB_BASE,
1923 CGRAPH_FREQ_BASE,
1924 e->loop_nest, true);
48148244 1925 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
4460a647 1926 {
1927 /* Redirect calls to the old version node to point to its new
1928 version. */
1929 cgraph_redirect_edge_callee (e, new_version);
1930 }
b5d36404 1931
1932 return new_version;
1933 }
1934
1935 /* Perform function versioning.
a0c938f0 1936 Function versioning includes copying of the tree and
b5d36404 1937 a callgraph update (creating a new cgraph node and updating
1938 its callees and callers).
1939
1940 REDIRECT_CALLERS varray includes the edges to be redirected
1941 to the new version.
1942
1943 TREE_MAP is a mapping of tree nodes we want to replace with
1944 new ones (according to results of prior analysis).
1945 OLD_VERSION_NODE is the node that is versioned.
48e1416a 1946 It returns the new version's cgraph node.
b06ab5fa 1947 If non-NULL ARGS_TO_SKIP determine function parameters to remove
1948 from new version.
1949 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
1950 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
b5d36404 1951
1952struct cgraph_node *
1953cgraph_function_versioning (struct cgraph_node *old_version_node,
4460a647 1954 VEC(cgraph_edge_p,heap) *redirect_callers,
ccf4ab6b 1955 VEC (ipa_replace_map_p,gc)* tree_map,
a70a5e2c 1956 bitmap args_to_skip,
b06ab5fa 1957 bitmap bbs_to_copy,
1958 basic_block new_entry_block,
a70a5e2c 1959 const char *clone_name)
b5d36404 1960{
1961 tree old_decl = old_version_node->decl;
1962 struct cgraph_node *new_version_node = NULL;
1963 tree new_decl;
1964
1965 if (!tree_versionable_function_p (old_decl))
1966 return NULL;
1967
1968 /* Make a new FUNCTION_DECL tree node for the
1969 new version. */
5afe38fe 1970 if (!args_to_skip)
1971 new_decl = copy_node (old_decl);
1972 else
1973 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
b5d36404 1974
df0b8dfb 1975 /* Generate a new name for the new version. */
1976 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
1977 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
1978 SET_DECL_RTL (new_decl, NULL);
1979
b5d36404 1980 /* Create the new version's call-graph node.
1981 and update the edges of the new node. */
1982 new_version_node =
1983 cgraph_copy_node_for_versioning (old_version_node, new_decl,
b06ab5fa 1984 redirect_callers, bbs_to_copy);
b5d36404 1985
1986 /* Copy the OLD_VERSION_NODE function tree to the new version. */
b06ab5fa 1987 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
1988 bbs_to_copy, new_entry_block);
b5d36404 1989
a0c938f0 1990 /* Update the new version's properties.
e03a95e7 1991 Make The new version visible only within this translation unit. Make sure
1992 that is not weak also.
a0c938f0 1993 ??? We cannot use COMDAT linkage because there is no
b5d36404 1994 ABI support for this. */
6137cc9f 1995 cgraph_make_decl_local (new_version_node->decl);
f014e39d 1996 DECL_VIRTUAL_P (new_version_node->decl) = 0;
b5d36404 1997 new_version_node->local.externally_visible = 0;
1998 new_version_node->local.local = 1;
1999 new_version_node->lowered = true;
f014e39d 2000
e03a95e7 2001 /* Update the call_expr on the edges to call the new version node. */
2002 update_call_expr (new_version_node);
48e1416a 2003
50828ed8 2004 cgraph_call_function_insertion_hooks (new_version_node);
b5d36404 2005 return new_version_node;
2006}
469679ab 2007
2008/* Produce separate function body for inline clones so the offline copy can be
2009 modified without affecting them. */
2010struct cgraph_node *
2011save_inline_function_body (struct cgraph_node *node)
2012{
ccf4ab6b 2013 struct cgraph_node *first_clone, *n;
469679ab 2014
2015 gcc_assert (node == cgraph_node (node->decl));
2016
2017 cgraph_lower_function (node);
2018
ccf4ab6b 2019 first_clone = node->clones;
469679ab 2020
2021 first_clone->decl = copy_node (node->decl);
469679ab 2022 cgraph_insert_node_to_hashtable (first_clone);
2023 gcc_assert (first_clone == cgraph_node (first_clone->decl));
ccf4ab6b 2024 if (first_clone->next_sibling_clone)
2025 {
2026 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2027 n->clone_of = first_clone;
2028 n->clone_of = first_clone;
2029 n->next_sibling_clone = first_clone->clones;
2030 if (first_clone->clones)
2031 first_clone->clones->prev_sibling_clone = n;
2032 first_clone->clones = first_clone->next_sibling_clone;
2033 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2034 first_clone->next_sibling_clone = NULL;
2035 gcc_assert (!first_clone->prev_sibling_clone);
2036 }
2037 first_clone->clone_of = NULL;
2038 node->clones = NULL;
2039
2040 if (first_clone->clones)
2041 for (n = first_clone->clones; n != first_clone;)
2042 {
2043 gcc_assert (n->decl == node->decl);
2044 n->decl = first_clone->decl;
2045 if (n->clones)
2046 n = n->clones;
2047 else if (n->next_sibling_clone)
2048 n = n->next_sibling_clone;
2049 else
2050 {
2051 while (n != first_clone && !n->next_sibling_clone)
2052 n = n->clone_of;
2053 if (n != first_clone)
2054 n = n->next_sibling_clone;
2055 }
2056 }
469679ab 2057
2058 /* Copy the OLD_VERSION_NODE function tree to the new version. */
b06ab5fa 2059 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL,
2060 NULL, NULL);
469679ab 2061
2062 DECL_EXTERNAL (first_clone->decl) = 0;
ecd88073 2063 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
469679ab 2064 TREE_PUBLIC (first_clone->decl) = 0;
2065 DECL_COMDAT (first_clone->decl) = 0;
7fe9b425 2066 VEC_free (ipa_opt_pass, heap,
6d1cc52c 2067 first_clone->ipa_transforms_to_apply);
2068 first_clone->ipa_transforms_to_apply = NULL;
469679ab 2069
469679ab 2070#ifdef ENABLE_CHECKING
2071 verify_cgraph_node (first_clone);
2072#endif
2073 return first_clone;
2074}
a861fe52 2075
ccf4ab6b 2076/* Given virtual clone, turn it into actual clone. */
2077static void
2078cgraph_materialize_clone (struct cgraph_node *node)
2079{
2080 bitmap_obstack_initialize (NULL);
e748b31d 2081#ifdef ENABLE_CHECKING
2082 node->former_clone_of = node->clone_of->decl;
2083 if (node->clone_of->former_clone_of)
2084 node->former_clone_of = node->clone_of->former_clone_of;
2085#endif
ccf4ab6b 2086 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2087 tree_function_versioning (node->clone_of->decl, node->decl,
2088 node->clone.tree_map, true,
b06ab5fa 2089 node->clone.args_to_skip, NULL, NULL);
e20422ea 2090 if (cgraph_dump_file)
2091 {
2092 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2093 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2094 }
ccf4ab6b 2095
2096 /* Function is no longer clone. */
2097 if (node->next_sibling_clone)
2098 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2099 if (node->prev_sibling_clone)
2100 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2101 else
2102 node->clone_of->clones = node->next_sibling_clone;
2103 node->next_sibling_clone = NULL;
2104 node->prev_sibling_clone = NULL;
6d1cc52c 2105 if (!node->clone_of->analyzed && !node->clone_of->clones)
7d6a1ec8 2106 {
2107 cgraph_release_function_body (node->clone_of);
2108 cgraph_node_remove_callees (node->clone_of);
2109 ipa_remove_all_references (&node->clone_of->ref_list);
2110 }
ccf4ab6b 2111 node->clone_of = NULL;
2112 bitmap_obstack_release (NULL);
2113}
2114
c596d830 2115/* If necessary, change the function declaration in the call statement
2116 associated with E so that it corresponds to the edge callee. */
2117
2118gimple
2119cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2120{
2121 tree decl = gimple_call_fndecl (e->call_stmt);
2122 gimple new_stmt;
1f449108 2123#ifdef ENABLE_CHECKING
2124 struct cgraph_node *node;
2125#endif
c596d830 2126
1caef38b 2127 if (e->indirect_unknown_callee
2128 || decl == e->callee->decl
c596d830 2129 /* Don't update call from same body alias to the real function. */
1caef38b 2130 || (decl && cgraph_get_node (decl) == cgraph_get_node (e->callee->decl)))
c596d830 2131 return e->call_stmt;
2132
1f449108 2133#ifdef ENABLE_CHECKING
1caef38b 2134 if (decl)
2135 {
2136 node = cgraph_get_node (decl);
2137 gcc_assert (!node || !node->clone.combined_args_to_skip);
2138 }
1f449108 2139#endif
e748b31d 2140
c596d830 2141 if (cgraph_dump_file)
2142 {
2143 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2144 cgraph_node_name (e->caller), e->caller->uid,
2145 cgraph_node_name (e->callee), e->callee->uid);
2146 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
e748b31d 2147 if (e->callee->clone.combined_args_to_skip)
91aba934 2148 {
2149 fprintf (cgraph_dump_file, " combined args to skip: ");
2150 dump_bitmap (cgraph_dump_file,
2151 e->callee->clone.combined_args_to_skip);
e748b31d 2152 }
c596d830 2153 }
2154
2155 if (e->callee->clone.combined_args_to_skip)
91aba934 2156 {
2157 gimple_stmt_iterator gsi;
2158
2159 new_stmt
2160 = gimple_call_copy_skip_args (e->call_stmt,
2161 e->callee->clone.combined_args_to_skip);
75c7f5a5 2162 gimple_call_set_fndecl (new_stmt, e->callee->decl);
91aba934 2163
2164 if (gimple_vdef (new_stmt)
2165 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2166 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2167
2168 gsi = gsi_for_stmt (e->call_stmt);
2169 gsi_replace (&gsi, new_stmt, true);
2170 }
c596d830 2171 else
75c7f5a5 2172 {
2173 new_stmt = e->call_stmt;
2174 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2175 update_stmt (new_stmt);
2176 }
c596d830 2177
c596d830 2178 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2179
2180 if (cgraph_dump_file)
2181 {
2182 fprintf (cgraph_dump_file, " updated to:");
2183 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2184 }
2185 return new_stmt;
2186}
2187
ccf4ab6b 2188/* Once all functions from compilation unit are in memory, produce all clones
c596d830 2189 and update all calls. We might also do this on demand if we don't want to
2190 bring all functions to memory prior compilation, but current WHOPR
2191 implementation does that and it is is bit easier to keep everything right in
2192 this order. */
ccf4ab6b 2193void
2194cgraph_materialize_all_clones (void)
2195{
2196 struct cgraph_node *node;
2197 bool stabilized = false;
2198
2199 if (cgraph_dump_file)
2200 fprintf (cgraph_dump_file, "Materializing clones\n");
2201#ifdef ENABLE_CHECKING
2202 verify_cgraph ();
2203#endif
2204
2205 /* We can also do topological order, but number of iterations should be
2206 bounded by number of IPA passes since single IPA pass is probably not
2207 going to create clones of clones it created itself. */
2208 while (!stabilized)
2209 {
2210 stabilized = true;
2211 for (node = cgraph_nodes; node; node = node->next)
2212 {
2213 if (node->clone_of && node->decl != node->clone_of->decl
2214 && !gimple_has_body_p (node->decl))
2215 {
2216 if (gimple_has_body_p (node->clone_of->decl))
2217 {
2218 if (cgraph_dump_file)
e20422ea 2219 {
2220 fprintf (cgraph_dump_file, "clonning %s to %s\n",
2221 cgraph_node_name (node->clone_of),
2222 cgraph_node_name (node));
2223 if (node->clone.tree_map)
2224 {
2225 unsigned int i;
2226 fprintf (cgraph_dump_file, " replace map: ");
2227 for (i = 0; i < VEC_length (ipa_replace_map_p,
2228 node->clone.tree_map);
2229 i++)
2230 {
2231 struct ipa_replace_map *replace_info;
2232 replace_info = VEC_index (ipa_replace_map_p,
2233 node->clone.tree_map,
2234 i);
2235 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2236 fprintf (cgraph_dump_file, " -> ");
2237 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2238 fprintf (cgraph_dump_file, "%s%s;",
2239 replace_info->replace_p ? "(replace)":"",
2240 replace_info->ref_p ? "(ref)":"");
2241 }
2242 fprintf (cgraph_dump_file, "\n");
2243 }
2244 if (node->clone.args_to_skip)
2245 {
2246 fprintf (cgraph_dump_file, " args_to_skip: ");
2247 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2248 }
2249 if (node->clone.args_to_skip)
2250 {
2251 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2252 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2253 }
2254 }
ccf4ab6b 2255 cgraph_materialize_clone (node);
a510bd8d 2256 stabilized = false;
ccf4ab6b 2257 }
ccf4ab6b 2258 }
2259 }
2260 }
ee3f5fc0 2261 for (node = cgraph_nodes; node; node = node->next)
2262 if (!node->analyzed && node->callees)
2263 cgraph_node_remove_callees (node);
c596d830 2264 if (cgraph_dump_file)
2265 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
947781ac 2266#ifdef ENABLE_CHECKING
2267 verify_cgraph ();
2268#endif
ccf4ab6b 2269 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2270}
2271
a861fe52 2272#include "gt-cgraphunit.h"