]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cgraphunit.c
implement method attributes.
[thirdparty/gcc.git] / gcc / cgraphunit.c
CommitLineData
cd6bca02 1/* Callgraph based interprocedural optimizations.
028a99ef 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
e3a37aef 3 Free Software Foundation, Inc.
ae01b312 4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
ae01b312 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
ae01b312 21
b0cdf642 22/* This module implements main driver of compilation process as well as
cd6bca02 23 few basic interprocedural optimizers.
b0cdf642 24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
b326746d 35 (There is one exception needed for implementing GCC extern inline
36 function.)
b0cdf642 37
1d416bd7 38 - varpool_finalize_variable
b0cdf642 39
7bd28bba 40 This function has same behavior as the above but is used for static
b0cdf642 41 variables.
42
43 - cgraph_finalize_compilation_unit
44
b326746d 45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
b0cdf642 47
6329636b 48 In the the call-graph construction and local function
b0cdf642 49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
b326746d 52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
b0cdf642 54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
b0cdf642 62 - cgraph_mark_needed_node
1d416bd7 63 - varpool_mark_needed_node
b0cdf642 64
b326746d 65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
ccd2f3d1 69 used by C++ frontend to explicitly mark the keyed methods.
b0cdf642 70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
6329636b 80 Analyzing of all functions is deferred
b0cdf642 81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
ca67a72b 91 The intra-procedural information is produced and its existence
b0cdf642 92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
6329636b 106 where reference has been optimized out. */
121f3051 107
acc70efa 108
ae01b312 109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
112#include "tm.h"
113#include "tree.h"
b5530559 114#include "rtl.h"
acc70efa 115#include "tree-flow.h"
ae01b312 116#include "tree-inline.h"
117#include "langhooks.h"
c6224531 118#include "pointer-set.h"
ae01b312 119#include "toplev.h"
120#include "flags.h"
121#include "ggc.h"
122#include "debug.h"
123#include "target.h"
124#include "cgraph.h"
80a85d8a 125#include "diagnostic.h"
ce084dfc 126#include "tree-pretty-print.h"
127#include "gimple-pretty-print.h"
f79b6507 128#include "timevar.h"
d7c6d889 129#include "params.h"
130#include "fibheap.h"
611e5405 131#include "intl.h"
b69eb0ff 132#include "function.h"
b5d36404 133#include "ipa-prop.h"
75a70cf9 134#include "gimple.h"
135#include "tree-iterator.h"
f1e2a033 136#include "tree-pass.h"
bfec3452 137#include "tree-dump.h"
c1dcd13c 138#include "output.h"
9ed5b1f5 139#include "coverage.h"
c9036234 140#include "plugin.h"
d7c6d889 141
a6868229 142static void cgraph_expand_all_functions (void);
d9d9733a 143static void cgraph_mark_functions_to_output (void);
144static void cgraph_expand_function (struct cgraph_node *);
f788fff2 145static void cgraph_output_pending_asms (void);
bfec3452 146static void cgraph_analyze_function (struct cgraph_node *);
25bb88de 147
ecb08119 148FILE *cgraph_dump_file;
121f3051 149
28454517 150/* Used for vtable lookup in thunk adjusting. */
151static GTY (()) tree vtable_entry_type;
152
2c0b522d 153/* Determine if function DECL is needed. That is, visible to something
154 either outside this translation unit, something magic in the system
6329636b 155 configury. */
2c0b522d 156
7bfefa9d 157bool
158cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
2c0b522d 159{
3f82b628 160 /* If the user told us it is used, then it must be so. */
05806473 161 if (node->local.externally_visible)
162 return true;
163
3f82b628 164 /* ??? If the assembler name is set by hand, it is possible to assemble
165 the name later after finalizing the function and the fact is noticed
166 in assemble_name then. This is arguably a bug. */
167 if (DECL_ASSEMBLER_NAME_SET_P (decl)
168 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
169 return true;
170
55680bef 171 /* With -fkeep-inline-functions we are keeping all inline functions except
172 for extern inline ones. */
173 if (flag_keep_inline_functions
174 && DECL_DECLARED_INLINE_P (decl)
316ef6d8 175 && !DECL_EXTERNAL (decl)
176 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
55680bef 177 return true;
178
2c0b522d 179 /* If we decided it was needed before, but at the time we didn't have
180 the body of the function available, then it's still needed. We have
181 to go back and re-check its dependencies now. */
182 if (node->needed)
183 return true;
184
185 /* Externally visible functions must be output. The exception is
a0c938f0 186 COMDAT functions that must be output only when they are needed.
8baa9d15 187
188 When not optimizing, also output the static functions. (see
95da6220 189 PR24561), but don't do so for always_inline functions, functions
0f9238c0 190 declared inline and nested functions. These were optimized out
d3d410e1 191 in the original implementation and it is unclear whether we want
554f2707 192 to change the behavior here. */
bba7ddf8 193 if (((TREE_PUBLIC (decl)
0f9238c0 194 || (!optimize
195 && !node->local.disregard_inline_limits
d3d410e1 196 && !DECL_DECLARED_INLINE_P (decl)
0f9238c0 197 && !(DECL_CONTEXT (decl)
198 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
59dd4830 199 && !flag_whole_program
200 && !flag_lto
201 && !flag_whopr)
62eec3b4 202 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
2c0b522d 203 return true;
204
2c0b522d 205 return false;
206}
207
bdc40eb8 208/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
523c1122 209 functions into callgraph in a way so they look like ordinary reachable
210 functions inserted into callgraph already at construction time. */
211
212bool
213cgraph_process_new_functions (void)
214{
215 bool output = false;
216 tree fndecl;
217 struct cgraph_node *node;
218
0cddb138 219 varpool_analyze_pending_decls ();
523c1122 220 /* Note that this queue may grow as its being processed, as the new
221 functions may generate new ones. */
222 while (cgraph_new_nodes)
223 {
224 node = cgraph_new_nodes;
225 fndecl = node->decl;
226 cgraph_new_nodes = cgraph_new_nodes->next_needed;
227 switch (cgraph_state)
228 {
229 case CGRAPH_STATE_CONSTRUCTION:
230 /* At construction time we just need to finalize function and move
231 it into reachable functions list. */
232
233 node->next_needed = NULL;
234 cgraph_finalize_function (fndecl, false);
235 cgraph_mark_reachable_node (node);
236 output = true;
237 break;
238
239 case CGRAPH_STATE_IPA:
f517b36e 240 case CGRAPH_STATE_IPA_SSA:
523c1122 241 /* When IPA optimization already started, do all essential
242 transformations that has been already performed on the whole
243 cgraph but not on this function. */
244
75a70cf9 245 gimple_register_cfg_hooks ();
523c1122 246 if (!node->analyzed)
247 cgraph_analyze_function (node);
248 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
249 current_function_decl = fndecl;
9c1bff7a 250 compute_inline_parameters (node);
f517b36e 251 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
252 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
253 /* When not optimizing, be sure we run early local passes anyway
254 to expand OMP. */
255 || !optimize)
20099e35 256 execute_pass_list (pass_early_local_passes.pass.sub);
523c1122 257 free_dominance_info (CDI_POST_DOMINATORS);
258 free_dominance_info (CDI_DOMINATORS);
259 pop_cfun ();
260 current_function_decl = NULL;
261 break;
262
263 case CGRAPH_STATE_EXPANSION:
264 /* Functions created during expansion shall be compiled
265 directly. */
09fc9532 266 node->process = 0;
523c1122 267 cgraph_expand_function (node);
268 break;
269
270 default:
271 gcc_unreachable ();
272 break;
273 }
50828ed8 274 cgraph_call_function_insertion_hooks (node);
0cddb138 275 varpool_analyze_pending_decls ();
523c1122 276 }
277 return output;
278}
279
9b8fb23a 280/* As an GCC extension we allow redefinition of the function. The
281 semantics when both copies of bodies differ is not well defined.
282 We replace the old body with new body so in unit at a time mode
283 we always use new body, while in normal mode we may end up with
284 old body inlined into some functions and new body expanded and
285 inlined in others.
286
287 ??? It may make more sense to use one body for inlining and other
288 body for expanding the function but this is difficult to do. */
289
290static void
291cgraph_reset_node (struct cgraph_node *node)
292{
09fc9532 293 /* If node->process is set, then we have already begun whole-unit analysis.
6329636b 294 This is *not* testing for whether we've already emitted the function.
295 That case can be sort-of legitimately seen with real function redefinition
296 errors. I would argue that the front end should never present us with
297 such a case, but don't enforce that for now. */
09fc9532 298 gcc_assert (!node->process);
9b8fb23a 299
300 /* Reset our data structures so we can analyze the function again. */
301 memset (&node->local, 0, sizeof (node->local));
302 memset (&node->global, 0, sizeof (node->global));
303 memset (&node->rtl, 0, sizeof (node->rtl));
304 node->analyzed = false;
305 node->local.redefined_extern_inline = true;
306 node->local.finalized = false;
307
9b8fb23a 308 cgraph_node_remove_callees (node);
309
310 /* We may need to re-queue the node for assembling in case
46beef9a 311 we already proceeded it and ignored as not needed or got
312 a re-declaration in IMA mode. */
313 if (node->reachable)
9b8fb23a 314 {
315 struct cgraph_node *n;
316
317 for (n = cgraph_nodes_queue; n; n = n->next_needed)
318 if (n == node)
319 break;
320 if (!n)
321 node->reachable = 0;
322 }
323}
c08871a9 324
1e8e9920 325static void
326cgraph_lower_function (struct cgraph_node *node)
327{
328 if (node->lowered)
329 return;
bfec3452 330
331 if (node->nested)
332 lower_nested_functions (node->decl);
333 gcc_assert (!node->nested);
334
1e8e9920 335 tree_lowering_passes (node->decl);
336 node->lowered = true;
337}
338
28df663b 339/* DECL has been parsed. Take it, queue it, compile it at the whim of the
340 logic in effect. If NESTED is true, then our caller cannot stand to have
341 the garbage collector run at the moment. We would need to either create
342 a new GC context, or just not compile right now. */
ae01b312 343
344void
28df663b 345cgraph_finalize_function (tree decl, bool nested)
ae01b312 346{
347 struct cgraph_node *node = cgraph_node (decl);
348
c08871a9 349 if (node->local.finalized)
9b8fb23a 350 cgraph_reset_node (node);
28df663b 351
167b550b 352 node->pid = cgraph_max_pid ++;
c08871a9 353 notice_global_symbol (decl);
79bb87b4 354 node->local.finalized = true;
e27482aa 355 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
22c748a1 356 node->finalized_by_frontend = true;
ae01b312 357
7bfefa9d 358 if (cgraph_decide_is_function_needed (node, decl))
2c0b522d 359 cgraph_mark_needed_node (node);
360
ecda6e51 361 /* Since we reclaim unreachable nodes at the end of every language
3f82b628 362 level unit, we need to be conservative about possible entry points
363 there. */
1e3aebec 364 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
365 || DECL_STATIC_CONSTRUCTOR (decl)
d050bafd 366 || DECL_STATIC_DESTRUCTOR (decl)
367 /* COMDAT virtual functions may be referenced by vtable from
368 other compilatoin unit. Still we want to devirtualize calls
369 to those so we need to analyze them.
370 FIXME: We should introduce may edges for this purpose and update
371 their handling in unreachable function removal and inliner too. */
372 || (DECL_VIRTUAL_P (decl) && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
3f82b628 373 cgraph_mark_reachable_node (node);
374
2c0b522d 375 /* If we've not yet emitted decl, tell the debug info about it. */
28df663b 376 if (!TREE_ASM_WRITTEN (decl))
2c0b522d 377 (*debug_hooks->deferred_inline_function) (decl);
4e8871a0 378
b69eb0ff 379 /* Possibly warn about unused parameters. */
380 if (warn_unused_parameter)
381 do_warn_unused_parameter (decl);
6329636b 382
383 if (!nested)
384 ggc_collect ();
ae01b312 385}
386
0da03d11 387/* C99 extern inline keywords allow changing of declaration after function
388 has been finalized. We need to re-decide if we want to mark the function as
389 needed then. */
390
391void
392cgraph_mark_if_needed (tree decl)
393{
394 struct cgraph_node *node = cgraph_node (decl);
7bfefa9d 395 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
0da03d11 396 cgraph_mark_needed_node (node);
397}
398
f2d608d8 399#ifdef ENABLE_CHECKING
ccf4ab6b 400/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
401static bool
402clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
403{
404 while (node != node2 && node2)
405 node2 = node2->clone_of;
406 return node2 != NULL;
407}
f2d608d8 408#endif
ccf4ab6b 409
1a036a3b 410/* Verify edge E count and frequency. */
411
412static bool
413verify_edge_count_and_frequency (struct cgraph_edge *e)
414{
415 bool error_found = false;
416 if (e->count < 0)
417 {
418 error ("caller edge count is negative");
419 error_found = true;
420 }
421 if (e->frequency < 0)
422 {
423 error ("caller edge frequency is negative");
424 error_found = true;
425 }
426 if (e->frequency > CGRAPH_FREQ_MAX)
427 {
428 error ("caller edge frequency is too large");
429 error_found = true;
430 }
431 if (gimple_has_body_p (e->caller->decl)
432 && !e->caller->global.inlined_to
433 && (e->frequency
434 != compute_call_stmt_bb_frequency (e->caller->decl,
435 gimple_bb (e->call_stmt))))
436 {
437 error ("caller edge frequency %i does not match BB freqency %i",
438 e->frequency,
439 compute_call_stmt_bb_frequency (e->caller->decl,
440 gimple_bb (e->call_stmt)));
441 error_found = true;
442 }
443 return error_found;
444}
445
b0cdf642 446/* Verify cgraph nodes of given cgraph node. */
4b987fac 447DEBUG_FUNCTION void
b0cdf642 448verify_cgraph_node (struct cgraph_node *node)
449{
450 struct cgraph_edge *e;
e27482aa 451 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
117ef3d7 452 struct function *saved_cfun = cfun;
e27482aa 453 basic_block this_block;
75a70cf9 454 gimple_stmt_iterator gsi;
9bfec7c2 455 bool error_found = false;
b0cdf642 456
852f689e 457 if (seen_error ())
bd09cd3e 458 return;
459
b0cdf642 460 timevar_push (TV_CGRAPH_VERIFY);
117ef3d7 461 /* debug_generic_stmt needs correct cfun */
462 set_cfun (this_cfun);
b0cdf642 463 for (e = node->callees; e; e = e->next_callee)
464 if (e->aux)
465 {
0a81f5a0 466 error ("aux field set for edge %s->%s",
abd3e6b5 467 identifier_to_locale (cgraph_node_name (e->caller)),
468 identifier_to_locale (cgraph_node_name (e->callee)));
b0cdf642 469 error_found = true;
470 }
a2cb9b3b 471 if (node->count < 0)
472 {
473 error ("Execution count is negative");
474 error_found = true;
475 }
59dd4830 476 if (node->global.inlined_to && node->local.externally_visible)
477 {
478 error ("Externally visible inline clone");
479 error_found = true;
480 }
481 if (node->global.inlined_to && node->address_taken)
482 {
483 error ("Inline clone with address taken");
484 error_found = true;
485 }
486 if (node->global.inlined_to && node->needed)
487 {
488 error ("Inline clone is needed");
489 error_found = true;
490 }
799c8711 491 for (e = node->indirect_calls; e; e = e->next_callee)
492 {
493 if (e->aux)
494 {
495 error ("aux field set for indirect edge from %s",
496 identifier_to_locale (cgraph_node_name (e->caller)));
497 error_found = true;
498 }
499 if (!e->indirect_unknown_callee
500 || !e->indirect_info)
501 {
502 error ("An indirect edge from %s is not marked as indirect or has "
503 "associated indirect_info, the corresponding statement is: ",
504 identifier_to_locale (cgraph_node_name (e->caller)));
505 debug_gimple_stmt (e->call_stmt);
506 error_found = true;
507 }
508 }
b0cdf642 509 for (e = node->callers; e; e = e->next_caller)
510 {
1a036a3b 511 if (verify_edge_count_and_frequency (e))
512 error_found = true;
b0cdf642 513 if (!e->inline_failed)
514 {
515 if (node->global.inlined_to
516 != (e->caller->global.inlined_to
517 ? e->caller->global.inlined_to : e->caller))
518 {
0a81f5a0 519 error ("inlined_to pointer is wrong");
b0cdf642 520 error_found = true;
521 }
522 if (node->callers->next_caller)
523 {
0a81f5a0 524 error ("multiple inline callers");
b0cdf642 525 error_found = true;
526 }
527 }
528 else
529 if (node->global.inlined_to)
530 {
0a81f5a0 531 error ("inlined_to pointer set for noninline callers");
b0cdf642 532 error_found = true;
533 }
534 }
1a036a3b 535 for (e = node->indirect_calls; e; e = e->next_callee)
536 if (verify_edge_count_and_frequency (e))
537 error_found = true;
b0cdf642 538 if (!node->callers && node->global.inlined_to)
539 {
5cd75817 540 error ("inlined_to pointer is set but no predecessors found");
b0cdf642 541 error_found = true;
542 }
543 if (node->global.inlined_to == node)
544 {
0a81f5a0 545 error ("inlined_to pointer refers to itself");
b0cdf642 546 error_found = true;
547 }
548
0f6439b9 549 if (!cgraph_node (node->decl))
b0cdf642 550 {
0f6439b9 551 error ("node not found in cgraph_hash");
b0cdf642 552 error_found = true;
553 }
a0c938f0 554
ccf4ab6b 555 if (node->clone_of)
556 {
557 struct cgraph_node *n;
558 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
559 if (n == node)
560 break;
561 if (!n)
562 {
563 error ("node has wrong clone_of");
564 error_found = true;
565 }
566 }
567 if (node->clones)
568 {
569 struct cgraph_node *n;
570 for (n = node->clones; n; n = n->next_sibling_clone)
571 if (n->clone_of != node)
572 break;
573 if (n)
574 {
575 error ("node has wrong clone list");
576 error_found = true;
577 }
578 }
579 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
580 {
581 error ("node is in clone list but it is not clone");
582 error_found = true;
583 }
584 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
585 {
586 error ("node has wrong prev_clone pointer");
587 error_found = true;
588 }
589 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
590 {
591 error ("double linked list of clones corrupted");
592 error_found = true;
593 }
c524ac5d 594 if (node->same_comdat_group)
595 {
596 struct cgraph_node *n = node->same_comdat_group;
597
598 if (!DECL_ONE_ONLY (node->decl))
599 {
600 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
601 error_found = true;
602 }
603 if (n == node)
604 {
605 error ("node is alone in a comdat group");
606 error_found = true;
607 }
608 do
609 {
610 if (!n->same_comdat_group)
611 {
612 error ("same_comdat_group is not a circular list");
613 error_found = true;
614 break;
615 }
616 n = n->same_comdat_group;
617 }
618 while (n != node);
619 }
ccf4ab6b 620
621 if (node->analyzed && gimple_has_body_p (node->decl)
75a70cf9 622 && !TREE_ASM_WRITTEN (node->decl)
7bfefa9d 623 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
624 && !flag_wpa)
b0cdf642 625 {
e27482aa 626 if (this_cfun->cfg)
627 {
628 /* The nodes we're interested in are never shared, so walk
629 the tree ignoring duplicates. */
e7c352d1 630 struct pointer_set_t *visited_nodes = pointer_set_create ();
e27482aa 631 /* Reach the trees by walking over the CFG, and note the
632 enclosing basic-blocks in the call edges. */
633 FOR_EACH_BB_FN (this_block, this_cfun)
75a70cf9 634 for (gsi = gsi_start_bb (this_block);
635 !gsi_end_p (gsi);
636 gsi_next (&gsi))
9bfec7c2 637 {
75a70cf9 638 gimple stmt = gsi_stmt (gsi);
799c8711 639 if (is_gimple_call (stmt))
9bfec7c2 640 {
641 struct cgraph_edge *e = cgraph_edge (node, stmt);
799c8711 642 tree decl = gimple_call_fndecl (stmt);
9bfec7c2 643 if (e)
644 {
645 if (e->aux)
646 {
0a81f5a0 647 error ("shared call_stmt:");
75a70cf9 648 debug_gimple_stmt (stmt);
9bfec7c2 649 error_found = true;
650 }
799c8711 651 if (!e->indirect_unknown_callee)
28454517 652 {
799c8711 653 if (e->callee->same_body_alias)
654 {
655 error ("edge points to same body alias:");
656 debug_tree (e->callee->decl);
657 error_found = true;
658 }
e748b31d 659#ifdef ENABLE_CHECKING
660 else if (!e->callee->global.inlined_to
799c8711 661 && decl
e748b31d 662 && cgraph_get_node (decl)
663 && (e->callee->former_clone_of
664 != cgraph_get_node (decl)->decl)
799c8711 665 && !clone_of_p (cgraph_node (decl),
666 e->callee))
667 {
668 error ("edge points to wrong declaration:");
669 debug_tree (e->callee->decl);
670 fprintf (stderr," Instead of:");
671 debug_tree (decl);
672 error_found = true;
673 }
e748b31d 674#endif
28454517 675 }
799c8711 676 else if (decl)
9bfec7c2 677 {
799c8711 678 error ("an indirect edge with unknown callee "
679 "corresponding to a call_stmt with "
680 "a known declaration:");
ee3f5fc0 681 error_found = true;
799c8711 682 debug_gimple_stmt (e->call_stmt);
9bfec7c2 683 }
684 e->aux = (void *)1;
685 }
799c8711 686 else if (decl)
9bfec7c2 687 {
0a81f5a0 688 error ("missing callgraph edge for call stmt:");
75a70cf9 689 debug_gimple_stmt (stmt);
9bfec7c2 690 error_found = true;
691 }
692 }
693 }
e27482aa 694 pointer_set_destroy (visited_nodes);
e27482aa 695 }
696 else
697 /* No CFG available?! */
698 gcc_unreachable ();
699
b0cdf642 700 for (e = node->callees; e; e = e->next_callee)
701 {
799c8711 702 if (!e->aux)
b0cdf642 703 {
0a81f5a0 704 error ("edge %s->%s has no corresponding call_stmt",
abd3e6b5 705 identifier_to_locale (cgraph_node_name (e->caller)),
706 identifier_to_locale (cgraph_node_name (e->callee)));
75a70cf9 707 debug_gimple_stmt (e->call_stmt);
b0cdf642 708 error_found = true;
709 }
710 e->aux = 0;
711 }
799c8711 712 for (e = node->indirect_calls; e; e = e->next_callee)
713 {
714 if (!e->aux)
715 {
716 error ("an indirect edge from %s has no corresponding call_stmt",
717 identifier_to_locale (cgraph_node_name (e->caller)));
718 debug_gimple_stmt (e->call_stmt);
719 error_found = true;
720 }
721 e->aux = 0;
722 }
b0cdf642 723 }
724 if (error_found)
725 {
726 dump_cgraph_node (stderr, node);
0a81f5a0 727 internal_error ("verify_cgraph_node failed");
b0cdf642 728 }
117ef3d7 729 set_cfun (saved_cfun);
b0cdf642 730 timevar_pop (TV_CGRAPH_VERIFY);
731}
732
733/* Verify whole cgraph structure. */
4b987fac 734DEBUG_FUNCTION void
b0cdf642 735verify_cgraph (void)
736{
737 struct cgraph_node *node;
738
852f689e 739 if (seen_error ())
8ec2a798 740 return;
741
b0cdf642 742 for (node = cgraph_nodes; node; node = node->next)
743 verify_cgraph_node (node);
744}
745
56af936e 746/* Output all asm statements we have stored up to be output. */
747
748static void
749cgraph_output_pending_asms (void)
750{
751 struct cgraph_asm_node *can;
752
852f689e 753 if (seen_error ())
56af936e 754 return;
755
756 for (can = cgraph_asm_nodes; can; can = can->next)
757 assemble_asm (can->asm_str);
758 cgraph_asm_nodes = NULL;
759}
760
0785e435 761/* Analyze the function scheduled to be output. */
bfec3452 762static void
0785e435 763cgraph_analyze_function (struct cgraph_node *node)
764{
bfec3452 765 tree save = current_function_decl;
0785e435 766 tree decl = node->decl;
767
ec1e35b2 768 current_function_decl = decl;
e27482aa 769 push_cfun (DECL_STRUCT_FUNCTION (decl));
bfec3452 770
6816d0c4 771 assign_assembler_name_if_neeeded (node->decl);
772
bfec3452 773 /* Make sure to gimplify bodies only once. During analyzing a
774 function we lower it, which will require gimplified nested
775 functions, so we can end up here with an already gimplified
776 body. */
777 if (!gimple_body (decl))
778 gimplify_function_tree (decl);
779 dump_function (TDI_generic, decl);
780
e27482aa 781 cgraph_lower_function (node);
6e8d6e86 782 node->analyzed = true;
0785e435 783
e27482aa 784 pop_cfun ();
bfec3452 785 current_function_decl = save;
0785e435 786}
787
05806473 788/* Look for externally_visible and used attributes and mark cgraph nodes
789 accordingly.
790
791 We cannot mark the nodes at the point the attributes are processed (in
792 handle_*_attribute) because the copy of the declarations available at that
793 point may not be canonical. For example, in:
794
795 void f();
796 void f() __attribute__((used));
797
798 the declaration we see in handle_used_attribute will be the second
799 declaration -- but the front end will subsequently merge that declaration
800 with the original declaration and discard the second declaration.
801
802 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
803
804 void f() {}
805 void f() __attribute__((externally_visible));
806
807 is valid.
808
809 So, we walk the nodes at the end of the translation unit, applying the
810 attributes at that point. */
811
812static void
813process_function_and_variable_attributes (struct cgraph_node *first,
1d416bd7 814 struct varpool_node *first_var)
05806473 815{
816 struct cgraph_node *node;
1d416bd7 817 struct varpool_node *vnode;
05806473 818
819 for (node = cgraph_nodes; node != first; node = node->next)
820 {
821 tree decl = node->decl;
83a23b05 822 if (DECL_PRESERVE_P (decl))
0b49f8f8 823 cgraph_mark_needed_node (node);
05806473 824 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
825 {
ba12ea31 826 if (! TREE_PUBLIC (node->decl))
712d2297 827 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
828 "%<externally_visible%>"
829 " attribute have effect only on public objects");
59dd4830 830 else if (node->local.finalized)
831 cgraph_mark_needed_node (node);
05806473 832 }
833 }
1d416bd7 834 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
05806473 835 {
836 tree decl = vnode->decl;
83a23b05 837 if (DECL_PRESERVE_P (decl))
05806473 838 {
22671757 839 vnode->force_output = true;
05806473 840 if (vnode->finalized)
1d416bd7 841 varpool_mark_needed_node (vnode);
05806473 842 }
843 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
844 {
ba12ea31 845 if (! TREE_PUBLIC (vnode->decl))
712d2297 846 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
847 "%<externally_visible%>"
848 " attribute have effect only on public objects");
59dd4830 849 else if (vnode->finalized)
850 varpool_mark_needed_node (vnode);
05806473 851 }
852 }
853}
854
aeeb194b 855/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
856 each reachable functions) and build cgraph.
857 The function can be called multiple times after inserting new nodes
0d424440 858 into beginning of queue. Just the new part of queue is re-scanned then. */
ae01b312 859
aeeb194b 860static void
861cgraph_analyze_functions (void)
ae01b312 862{
c1dcd13c 863 /* Keep track of already processed nodes when called multiple times for
06b27565 864 intermodule optimization. */
c1dcd13c 865 static struct cgraph_node *first_analyzed;
c17d0de1 866 struct cgraph_node *first_processed = first_analyzed;
1d416bd7 867 static struct varpool_node *first_analyzed_var;
aeeb194b 868 struct cgraph_node *node, *next;
ae01b312 869
f1c35659 870 bitmap_obstack_initialize (NULL);
c17d0de1 871 process_function_and_variable_attributes (first_processed,
872 first_analyzed_var);
873 first_processed = cgraph_nodes;
1d416bd7 874 first_analyzed_var = varpool_nodes;
875 varpool_analyze_pending_decls ();
f79b6507 876 if (cgraph_dump_file)
ae01b312 877 {
e4200070 878 fprintf (cgraph_dump_file, "Initial entry points:");
c1dcd13c 879 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 880 if (node->needed)
f79b6507 881 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
882 fprintf (cgraph_dump_file, "\n");
ae01b312 883 }
aeeb194b 884 cgraph_process_new_functions ();
ae01b312 885
e6d2b2d8 886 /* Propagate reachability flag and lower representation of all reachable
887 functions. In the future, lowering will introduce new functions and
888 new entry points on the way (by template instantiation and virtual
889 method table generation for instance). */
3d7bfc56 890 while (cgraph_nodes_queue)
ae01b312 891 {
0785e435 892 struct cgraph_edge *edge;
3d7bfc56 893 tree decl = cgraph_nodes_queue->decl;
894
895 node = cgraph_nodes_queue;
d87976fb 896 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
b0cdf642 897 node->next_needed = NULL;
ae01b312 898
638531ad 899 /* ??? It is possible to create extern inline function and later using
bbd5cba2 900 weak alias attribute to kill its body. See
638531ad 901 gcc.c-torture/compile/20011119-1.c */
75a70cf9 902 if (!DECL_STRUCT_FUNCTION (decl))
9b8fb23a 903 {
904 cgraph_reset_node (node);
905 continue;
906 }
638531ad 907
7bfefa9d 908 if (!node->analyzed)
909 cgraph_analyze_function (node);
2c0b522d 910
ae01b312 911 for (edge = node->callees; edge; edge = edge->next_callee)
0785e435 912 if (!edge->callee->reachable)
2c0b522d 913 cgraph_mark_reachable_node (edge->callee);
914
61c2c7b1 915 if (node->same_comdat_group)
916 {
917 for (next = node->same_comdat_group;
918 next != node;
919 next = next->same_comdat_group)
920 cgraph_mark_reachable_node (next);
921 }
922
d544ceff 923 /* If decl is a clone of an abstract function, mark that abstract
924 function so that we don't release its body. The DECL_INITIAL() of that
925 abstract function declaration will be later needed to output debug info. */
926 if (DECL_ABSTRACT_ORIGIN (decl))
927 {
928 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
929 origin_node->abstract_and_needed = true;
930 }
931
c17d0de1 932 /* We finalize local static variables during constructing callgraph
933 edges. Process their attributes too. */
934 process_function_and_variable_attributes (first_processed,
935 first_analyzed_var);
936 first_processed = cgraph_nodes;
1d416bd7 937 first_analyzed_var = varpool_nodes;
938 varpool_analyze_pending_decls ();
aeeb194b 939 cgraph_process_new_functions ();
ae01b312 940 }
2c0b522d 941
aa5e06c7 942 /* Collect entry points to the unit. */
f79b6507 943 if (cgraph_dump_file)
3d7bfc56 944 {
e4200070 945 fprintf (cgraph_dump_file, "Unit entry points:");
c1dcd13c 946 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1a1a827a 947 if (node->needed)
f79b6507 948 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
e4200070 949 fprintf (cgraph_dump_file, "\n\nInitial ");
0785e435 950 dump_cgraph (cgraph_dump_file);
3d7bfc56 951 }
e6d2b2d8 952
f79b6507 953 if (cgraph_dump_file)
954 fprintf (cgraph_dump_file, "\nReclaiming functions:");
ae01b312 955
f4ec5ce1 956 for (node = cgraph_nodes; node != first_analyzed; node = next)
ae01b312 957 {
958 tree decl = node->decl;
f4ec5ce1 959 next = node->next;
ae01b312 960
1a1a827a 961 if (node->local.finalized && !gimple_has_body_p (decl))
a0c938f0 962 cgraph_reset_node (node);
9b8fb23a 963
1a1a827a 964 if (!node->reachable && gimple_has_body_p (decl))
ae01b312 965 {
f79b6507 966 if (cgraph_dump_file)
967 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
b0cdf642 968 cgraph_remove_node (node);
9b8fb23a 969 continue;
ae01b312 970 }
bc5cab3b 971 else
972 node->next_needed = NULL;
1a1a827a 973 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
9b8fb23a 974 gcc_assert (node->analyzed == node->local.finalized);
ae01b312 975 }
f79b6507 976 if (cgraph_dump_file)
e4200070 977 {
978 fprintf (cgraph_dump_file, "\n\nReclaimed ");
979 dump_cgraph (cgraph_dump_file);
980 }
f1c35659 981 bitmap_obstack_release (NULL);
c1dcd13c 982 first_analyzed = cgraph_nodes;
ae01b312 983 ggc_collect ();
aeeb194b 984}
985
8f69fd82 986
aeeb194b 987/* Analyze the whole compilation unit once it is parsed completely. */
988
989void
990cgraph_finalize_compilation_unit (void)
991{
9929334e 992 timevar_push (TV_CGRAPH);
993
bfec3452 994 /* Do not skip analyzing the functions if there were errors, we
995 miss diagnostics for following functions otherwise. */
aeeb194b 996
8f69fd82 997 /* Emit size functions we didn't inline. */
4189e677 998 finalize_size_functions ();
8f69fd82 999
9929334e 1000 /* Mark alias targets necessary and emit diagnostics. */
1001 finish_aliases_1 ();
1002
aeeb194b 1003 if (!quiet_flag)
1004 {
1005 fprintf (stderr, "\nAnalyzing compilation unit\n");
1006 fflush (stderr);
1007 }
1008
9929334e 1009 /* Gimplify and lower all functions, compute reachability and
1010 remove unreachable nodes. */
1011 cgraph_analyze_functions ();
1012
8f69fd82 1013 /* Mark alias targets necessary and emit diagnostics. */
1014 finish_aliases_1 ();
1015
9929334e 1016 /* Gimplify and lower thunks. */
aeeb194b 1017 cgraph_analyze_functions ();
bfec3452 1018
9929334e 1019 /* Finally drive the pass manager. */
bfec3452 1020 cgraph_optimize ();
9929334e 1021
1022 timevar_pop (TV_CGRAPH);
ae01b312 1023}
9ed5b1f5 1024
1025
ae01b312 1026/* Figure out what functions we want to assemble. */
1027
1028static void
d9d9733a 1029cgraph_mark_functions_to_output (void)
ae01b312 1030{
1031 struct cgraph_node *node;
61c2c7b1 1032#ifdef ENABLE_CHECKING
1033 bool check_same_comdat_groups = false;
1034
1035 for (node = cgraph_nodes; node; node = node->next)
1036 gcc_assert (!node->process);
1037#endif
ae01b312 1038
ae01b312 1039 for (node = cgraph_nodes; node; node = node->next)
1040 {
1041 tree decl = node->decl;
d7c6d889 1042 struct cgraph_edge *e;
a0c938f0 1043
61c2c7b1 1044 gcc_assert (!node->process || node->same_comdat_group);
1045 if (node->process)
1046 continue;
d7c6d889 1047
1048 for (e = node->callers; e; e = e->next_caller)
611e5405 1049 if (e->inline_failed)
d7c6d889 1050 break;
ae01b312 1051
e6d2b2d8 1052 /* We need to output all local functions that are used and not
1053 always inlined, as well as those that are reachable from
1054 outside the current compilation unit. */
1a1a827a 1055 if (node->analyzed
b0cdf642 1056 && !node->global.inlined_to
1e3aebec 1057 && (!cgraph_only_called_directly_p (node)
d7c6d889 1058 || (e && node->reachable))
4ee9c684 1059 && !TREE_ASM_WRITTEN (decl)
ae01b312 1060 && !DECL_EXTERNAL (decl))
61c2c7b1 1061 {
1062 node->process = 1;
1063 if (node->same_comdat_group)
1064 {
1065 struct cgraph_node *next;
1066 for (next = node->same_comdat_group;
1067 next != node;
1068 next = next->same_comdat_group)
1069 next->process = 1;
1070 }
1071 }
1072 else if (node->same_comdat_group)
1073 {
1074#ifdef ENABLE_CHECKING
1075 check_same_comdat_groups = true;
1076#endif
1077 }
cc636d56 1078 else
9cee7c3f 1079 {
1080 /* We should've reclaimed all functions that are not needed. */
1081#ifdef ENABLE_CHECKING
75a70cf9 1082 if (!node->global.inlined_to
1a1a827a 1083 && gimple_has_body_p (decl)
08843223 1084 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1085 are inside partition, we can end up not removing the body since we no longer
1086 have analyzed node pointing to it. */
1087 && !node->in_other_partition
9cee7c3f 1088 && !DECL_EXTERNAL (decl))
1089 {
1090 dump_cgraph_node (stderr, node);
1091 internal_error ("failed to reclaim unneeded function");
1092 }
1093#endif
75a70cf9 1094 gcc_assert (node->global.inlined_to
1a1a827a 1095 || !gimple_has_body_p (decl)
08843223 1096 || node->in_other_partition
9cee7c3f 1097 || DECL_EXTERNAL (decl));
1098
1099 }
a0c938f0 1100
961e3b13 1101 }
61c2c7b1 1102#ifdef ENABLE_CHECKING
1103 if (check_same_comdat_groups)
1104 for (node = cgraph_nodes; node; node = node->next)
1105 if (node->same_comdat_group && !node->process)
1106 {
1107 tree decl = node->decl;
1108 if (!node->global.inlined_to
1109 && gimple_has_body_p (decl)
08843223 1110 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1111 are inside partition, we can end up not removing the body since we no longer
1112 have analyzed node pointing to it. */
1113 && !node->in_other_partition
61c2c7b1 1114 && !DECL_EXTERNAL (decl))
1115 {
1116 dump_cgraph_node (stderr, node);
1117 internal_error ("failed to reclaim unneeded function");
1118 }
1119 }
1120#endif
961e3b13 1121}
1122
28454517 1123/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1124 in lowered gimple form.
1125
1126 Set current_function_decl and cfun to newly constructed empty function body.
1127 return basic block in the function body. */
1128
1129static basic_block
1130init_lowered_empty_function (tree decl)
1131{
1132 basic_block bb;
1133
1134 current_function_decl = decl;
1135 allocate_struct_function (decl, false);
1136 gimple_register_cfg_hooks ();
1137 init_empty_tree_cfg ();
1138 init_tree_ssa (cfun);
1139 init_ssa_operands ();
1140 cfun->gimple_df->in_ssa_p = true;
1141 DECL_INITIAL (decl) = make_node (BLOCK);
1142
1143 DECL_SAVED_TREE (decl) = error_mark_node;
1144 cfun->curr_properties |=
1145 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1146 PROP_ssa);
1147
1148 /* Create BB for body of the function and connect it properly. */
1149 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1150 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1151 make_edge (bb, EXIT_BLOCK_PTR, 0);
1152
1153 return bb;
1154}
1155
1156/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1157 offset indicated by VIRTUAL_OFFSET, if that is
1158 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1159 zero for a result adjusting thunk. */
1160
1161static tree
1162thunk_adjust (gimple_stmt_iterator * bsi,
1163 tree ptr, bool this_adjusting,
1164 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1165{
1166 gimple stmt;
1167 tree ret;
1168
55d6cb23 1169 if (this_adjusting
1170 && fixed_offset != 0)
28454517 1171 {
1172 stmt = gimple_build_assign (ptr,
1173 fold_build2_loc (input_location,
1174 POINTER_PLUS_EXPR,
1175 TREE_TYPE (ptr), ptr,
1176 size_int (fixed_offset)));
1177 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1178 }
1179
1180 /* If there's a virtual offset, look up that value in the vtable and
1181 adjust the pointer again. */
1182 if (virtual_offset)
1183 {
1184 tree vtabletmp;
1185 tree vtabletmp2;
1186 tree vtabletmp3;
1187 tree offsettmp;
1188
1189 if (!vtable_entry_type)
1190 {
1191 tree vfunc_type = make_node (FUNCTION_TYPE);
1192 TREE_TYPE (vfunc_type) = integer_type_node;
1193 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1194 layout_type (vfunc_type);
1195
1196 vtable_entry_type = build_pointer_type (vfunc_type);
1197 }
1198
1199 vtabletmp =
1200 create_tmp_var (build_pointer_type
1201 (build_pointer_type (vtable_entry_type)), "vptr");
1202
1203 /* The vptr is always at offset zero in the object. */
1204 stmt = gimple_build_assign (vtabletmp,
1205 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1206 ptr));
1207 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1208 mark_symbols_for_renaming (stmt);
1209 find_referenced_vars_in (stmt);
1210
1211 /* Form the vtable address. */
1212 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1213 "vtableaddr");
1214 stmt = gimple_build_assign (vtabletmp2,
182cf5a9 1215 build_simple_mem_ref (vtabletmp));
28454517 1216 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1217 mark_symbols_for_renaming (stmt);
1218 find_referenced_vars_in (stmt);
1219
1220 /* Find the entry with the vcall offset. */
1221 stmt = gimple_build_assign (vtabletmp2,
1222 fold_build2_loc (input_location,
1223 POINTER_PLUS_EXPR,
1224 TREE_TYPE (vtabletmp2),
1225 vtabletmp2,
1226 fold_convert (sizetype,
1227 virtual_offset)));
1228 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1229
1230 /* Get the offset itself. */
1231 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1232 "vcalloffset");
1233 stmt = gimple_build_assign (vtabletmp3,
182cf5a9 1234 build_simple_mem_ref (vtabletmp2));
28454517 1235 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1236 mark_symbols_for_renaming (stmt);
1237 find_referenced_vars_in (stmt);
1238
1239 /* Cast to sizetype. */
1240 offsettmp = create_tmp_var (sizetype, "offset");
1241 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1242 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1243 mark_symbols_for_renaming (stmt);
1244 find_referenced_vars_in (stmt);
1245
1246 /* Adjust the `this' pointer. */
1247 ptr = fold_build2_loc (input_location,
1248 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1249 offsettmp);
1250 }
1251
55d6cb23 1252 if (!this_adjusting
1253 && fixed_offset != 0)
28454517 1254 /* Adjust the pointer by the constant. */
1255 {
1256 tree ptrtmp;
1257
1258 if (TREE_CODE (ptr) == VAR_DECL)
1259 ptrtmp = ptr;
1260 else
1261 {
1262 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1263 stmt = gimple_build_assign (ptrtmp, ptr);
1264 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1265 mark_symbols_for_renaming (stmt);
1266 find_referenced_vars_in (stmt);
1267 }
1268 ptr = fold_build2_loc (input_location,
1269 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1270 size_int (fixed_offset));
1271 }
1272
1273 /* Emit the statement and gimplify the adjustment expression. */
1274 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1275 stmt = gimple_build_assign (ret, ptr);
1276 mark_symbols_for_renaming (stmt);
1277 find_referenced_vars_in (stmt);
1278 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1279
1280 return ret;
1281}
1282
1283/* Produce assembler for thunk NODE. */
1284
1285static void
1286assemble_thunk (struct cgraph_node *node)
1287{
1288 bool this_adjusting = node->thunk.this_adjusting;
1289 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1290 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1291 tree virtual_offset = NULL;
1292 tree alias = node->thunk.alias;
1293 tree thunk_fndecl = node->decl;
1294 tree a = DECL_ARGUMENTS (thunk_fndecl);
1295
1296 current_function_decl = thunk_fndecl;
1297
1298 if (this_adjusting
1299 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1300 virtual_value, alias))
1301 {
1302 const char *fnname;
1303 tree fn_block;
1304
1305 DECL_RESULT (thunk_fndecl)
1306 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1307 RESULT_DECL, 0, integer_type_node);
22ea3b47 1308 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
28454517 1309
1310 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1311 create one. */
1312 fn_block = make_node (BLOCK);
1313 BLOCK_VARS (fn_block) = a;
1314 DECL_INITIAL (thunk_fndecl) = fn_block;
1315 init_function_start (thunk_fndecl);
1316 cfun->is_thunk = 1;
1317 assemble_start_function (thunk_fndecl, fnname);
1318
1319 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1320 fixed_offset, virtual_value, alias);
1321
1322 assemble_end_function (thunk_fndecl, fnname);
1323 init_insn_lengths ();
1324 free_after_compilation (cfun);
1325 set_cfun (NULL);
1326 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1327 }
1328 else
1329 {
1330 tree restype;
1331 basic_block bb, then_bb, else_bb, return_bb;
1332 gimple_stmt_iterator bsi;
1333 int nargs = 0;
1334 tree arg;
1335 int i;
1336 tree resdecl;
1337 tree restmp = NULL;
1338 VEC(tree, heap) *vargs;
1339
1340 gimple call;
1341 gimple ret;
1342
1343 DECL_IGNORED_P (thunk_fndecl) = 1;
1344 bitmap_obstack_initialize (NULL);
1345
1346 if (node->thunk.virtual_offset_p)
1347 virtual_offset = size_int (virtual_value);
1348
1349 /* Build the return declaration for the function. */
1350 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1351 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1352 {
1353 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1354 DECL_ARTIFICIAL (resdecl) = 1;
1355 DECL_IGNORED_P (resdecl) = 1;
1356 DECL_RESULT (thunk_fndecl) = resdecl;
1357 }
1358 else
1359 resdecl = DECL_RESULT (thunk_fndecl);
1360
1361 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1362
1363 bsi = gsi_start_bb (bb);
1364
1365 /* Build call to the function being thunked. */
1366 if (!VOID_TYPE_P (restype))
1367 {
1368 if (!is_gimple_reg_type (restype))
1369 {
1370 restmp = resdecl;
2ab2ce89 1371 add_local_decl (cfun, restmp);
28454517 1372 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1373 }
1374 else
1375 restmp = create_tmp_var_raw (restype, "retval");
1376 }
1377
1767a056 1378 for (arg = a; arg; arg = DECL_CHAIN (arg))
28454517 1379 nargs++;
1380 vargs = VEC_alloc (tree, heap, nargs);
1381 if (this_adjusting)
1382 VEC_quick_push (tree, vargs,
1383 thunk_adjust (&bsi,
1384 a, 1, fixed_offset,
1385 virtual_offset));
1386 else
1387 VEC_quick_push (tree, vargs, a);
1767a056 1388 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
28454517 1389 VEC_quick_push (tree, vargs, arg);
1390 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1391 VEC_free (tree, heap, vargs);
1392 gimple_call_set_cannot_inline (call, true);
1393 gimple_call_set_from_thunk (call, true);
1394 if (restmp)
1395 gimple_call_set_lhs (call, restmp);
1396 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1397 mark_symbols_for_renaming (call);
1398 find_referenced_vars_in (call);
1399 update_stmt (call);
1400
1401 if (restmp && !this_adjusting)
1402 {
57ab8ec3 1403 tree true_label = NULL_TREE;
28454517 1404
1405 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1406 {
1407 gimple stmt;
1408 /* If the return type is a pointer, we need to
1409 protect against NULL. We know there will be an
1410 adjustment, because that's why we're emitting a
1411 thunk. */
1412 then_bb = create_basic_block (NULL, (void *) 0, bb);
1413 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1414 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1415 remove_edge (single_succ_edge (bb));
1416 true_label = gimple_block_label (then_bb);
28454517 1417 stmt = gimple_build_cond (NE_EXPR, restmp,
1418 fold_convert (TREE_TYPE (restmp),
1419 integer_zero_node),
1420 NULL_TREE, NULL_TREE);
1421 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1422 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1423 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1424 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1425 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1426 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1427 bsi = gsi_last_bb (then_bb);
1428 }
1429
1430 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1431 fixed_offset, virtual_offset);
1432 if (true_label)
1433 {
1434 gimple stmt;
1435 bsi = gsi_last_bb (else_bb);
1436 stmt = gimple_build_assign (restmp, fold_convert (TREE_TYPE (restmp),
1437 integer_zero_node));
1438 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1439 bsi = gsi_last_bb (return_bb);
1440 }
1441 }
1442 else
1443 gimple_call_set_tail (call, true);
1444
1445 /* Build return value. */
1446 ret = gimple_build_return (restmp);
1447 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1448
1449 delete_unreachable_blocks ();
1450 update_ssa (TODO_update_ssa);
1451
1452 cgraph_remove_same_body_alias (node);
1453 /* Since we want to emit the thunk, we explicitly mark its name as
1454 referenced. */
28454517 1455 cgraph_add_new_function (thunk_fndecl, true);
1456 bitmap_obstack_release (NULL);
1457 }
1458 current_function_decl = NULL;
1459}
1460
ae01b312 1461/* Expand function specified by NODE. */
e6d2b2d8 1462
ae01b312 1463static void
d9d9733a 1464cgraph_expand_function (struct cgraph_node *node)
ae01b312 1465{
1466 tree decl = node->decl;
1467
b0cdf642 1468 /* We ought to not compile any inline clones. */
cc636d56 1469 gcc_assert (!node->global.inlined_to);
b0cdf642 1470
6329636b 1471 announce_function (decl);
09fc9532 1472 node->process = 0;
ed772161 1473 if (node->same_body)
1474 {
28454517 1475 struct cgraph_node *alias, *next;
ed772161 1476 bool saved_alias = node->alias;
28454517 1477 for (alias = node->same_body;
1478 alias && alias->next; alias = alias->next)
1479 ;
1480 /* Walk aliases in the order they were created; it is possible that
1481 thunks reffers to the aliases made earlier. */
1482 for (; alias; alias = next)
1483 {
1484 next = alias->previous;
1485 if (!alias->thunk.thunk_p)
1486 assemble_alias (alias->decl,
1487 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1488 else
1489 assemble_thunk (alias);
1490 }
ed772161 1491 node->alias = saved_alias;
f7777314 1492 cgraph_process_new_functions ();
ed772161 1493 }
f7777314 1494
1495 gcc_assert (node->lowered);
1496
1497 /* Generate RTL for the body of DECL. */
1498 tree_rest_of_compilation (decl);
1499
1500 /* Make sure that BE didn't give up on compiling. */
1501 gcc_assert (TREE_ASM_WRITTEN (decl));
1502 current_function_decl = NULL;
1a1a827a 1503 gcc_assert (!cgraph_preserve_function_body_p (decl));
1504 cgraph_release_function_body (node);
1505 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1506 points to the dead function body. */
1507 cgraph_node_remove_callees (node);
e1be32b8 1508
1509 cgraph_function_flags_ready = true;
ae01b312 1510}
1511
b0cdf642 1512/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
d7c6d889 1513
1514bool
326a9581 1515cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
d7c6d889 1516{
b0cdf642 1517 *reason = e->inline_failed;
1518 return !e->inline_failed;
d7c6d889 1519}
b0cdf642 1520
acc70efa 1521
acc70efa 1522
d9d9733a 1523/* Expand all functions that must be output.
1524
d7c6d889 1525 Attempt to topologically sort the nodes so function is output when
1526 all called functions are already assembled to allow data to be
91c82c20 1527 propagated across the callgraph. Use a stack to get smaller distance
3927afe0 1528 between a function and its callees (later we may choose to use a more
d7c6d889 1529 sophisticated algorithm for function reordering; we will likely want
1530 to use subsections to make the output functions appear in top-down
1531 order). */
1532
1533static void
a6868229 1534cgraph_expand_all_functions (void)
d7c6d889 1535{
1536 struct cgraph_node *node;
4c36ffe6 1537 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
c04e3894 1538 int order_pos, new_order_pos = 0;
d7c6d889 1539 int i;
1540
d7c6d889 1541 order_pos = cgraph_postorder (order);
cc636d56 1542 gcc_assert (order_pos == cgraph_n_nodes);
d7c6d889 1543
7bd28bba 1544 /* Garbage collector may remove inline clones we eliminate during
b0cdf642 1545 optimization. So we must be sure to not reference them. */
1546 for (i = 0; i < order_pos; i++)
09fc9532 1547 if (order[i]->process)
b0cdf642 1548 order[new_order_pos++] = order[i];
1549
1550 for (i = new_order_pos - 1; i >= 0; i--)
d7c6d889 1551 {
1552 node = order[i];
09fc9532 1553 if (node->process)
d7c6d889 1554 {
cc636d56 1555 gcc_assert (node->reachable);
09fc9532 1556 node->process = 0;
d7c6d889 1557 cgraph_expand_function (node);
1558 }
1559 }
523c1122 1560 cgraph_process_new_functions ();
773c5ba7 1561
d7c6d889 1562 free (order);
773c5ba7 1563
d7c6d889 1564}
1565
56af936e 1566/* This is used to sort the node types by the cgraph order number. */
1567
0b09525f 1568enum cgraph_order_sort_kind
1569{
1570 ORDER_UNDEFINED = 0,
1571 ORDER_FUNCTION,
1572 ORDER_VAR,
1573 ORDER_ASM
1574};
1575
56af936e 1576struct cgraph_order_sort
1577{
0b09525f 1578 enum cgraph_order_sort_kind kind;
56af936e 1579 union
1580 {
1581 struct cgraph_node *f;
1d416bd7 1582 struct varpool_node *v;
56af936e 1583 struct cgraph_asm_node *a;
1584 } u;
1585};
1586
1587/* Output all functions, variables, and asm statements in the order
1588 according to their order fields, which is the order in which they
1589 appeared in the file. This implements -fno-toplevel-reorder. In
1590 this mode we may output functions and variables which don't really
1591 need to be output. */
1592
1593static void
1594cgraph_output_in_order (void)
1595{
1596 int max;
56af936e 1597 struct cgraph_order_sort *nodes;
1598 int i;
1599 struct cgraph_node *pf;
1d416bd7 1600 struct varpool_node *pv;
56af936e 1601 struct cgraph_asm_node *pa;
1602
1603 max = cgraph_order;
3e1cde87 1604 nodes = XCNEWVEC (struct cgraph_order_sort, max);
56af936e 1605
1d416bd7 1606 varpool_analyze_pending_decls ();
56af936e 1607
1608 for (pf = cgraph_nodes; pf; pf = pf->next)
1609 {
09fc9532 1610 if (pf->process)
56af936e 1611 {
1612 i = pf->order;
1613 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1614 nodes[i].kind = ORDER_FUNCTION;
1615 nodes[i].u.f = pf;
1616 }
1617 }
1618
1d416bd7 1619 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
56af936e 1620 {
1621 i = pv->order;
1622 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1623 nodes[i].kind = ORDER_VAR;
1624 nodes[i].u.v = pv;
1625 }
1626
1627 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1628 {
1629 i = pa->order;
1630 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1631 nodes[i].kind = ORDER_ASM;
1632 nodes[i].u.a = pa;
1633 }
56af936e 1634
304e5318 1635 /* In toplevel reorder mode we output all statics; mark them as needed. */
1636 for (i = 0; i < max; ++i)
1637 {
1638 if (nodes[i].kind == ORDER_VAR)
1639 {
1640 varpool_mark_needed_node (nodes[i].u.v);
1641 }
1642 }
1643 varpool_empty_needed_queue ();
1644
56af936e 1645 for (i = 0; i < max; ++i)
1646 {
1647 switch (nodes[i].kind)
1648 {
1649 case ORDER_FUNCTION:
09fc9532 1650 nodes[i].u.f->process = 0;
56af936e 1651 cgraph_expand_function (nodes[i].u.f);
1652 break;
1653
1654 case ORDER_VAR:
1d416bd7 1655 varpool_assemble_decl (nodes[i].u.v);
56af936e 1656 break;
1657
1658 case ORDER_ASM:
1659 assemble_asm (nodes[i].u.a->asm_str);
1660 break;
1661
1662 case ORDER_UNDEFINED:
1663 break;
1664
1665 default:
1666 gcc_unreachable ();
1667 }
1668 }
4b4ea2db 1669
1670 cgraph_asm_nodes = NULL;
3e1cde87 1671 free (nodes);
56af936e 1672}
1673
b0cdf642 1674/* Return true when function body of DECL still needs to be kept around
1675 for later re-use. */
1676bool
1677cgraph_preserve_function_body_p (tree decl)
1678{
1679 struct cgraph_node *node;
8d8c4c8d 1680
1681 gcc_assert (cgraph_global_info_ready);
b0cdf642 1682 /* Look if there is any clone around. */
ccf4ab6b 1683 node = cgraph_node (decl);
1684 if (node->clones)
1685 return true;
b0cdf642 1686 return false;
1687}
1688
77fce4cd 1689static void
1690ipa_passes (void)
1691{
87d4aa85 1692 set_cfun (NULL);
4b14adf9 1693 current_function_decl = NULL;
75a70cf9 1694 gimple_register_cfg_hooks ();
77fce4cd 1695 bitmap_obstack_initialize (NULL);
59dd4830 1696
c9036234 1697 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1698
59dd4830 1699 if (!in_lto_p)
1700 execute_ipa_pass_list (all_small_ipa_passes);
9ed5b1f5 1701
7bfefa9d 1702 /* If pass_all_early_optimizations was not scheduled, the state of
1703 the cgraph will not be properly updated. Update it now. */
1704 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1705 cgraph_state = CGRAPH_STATE_IPA_SSA;
9ed5b1f5 1706
7bfefa9d 1707 if (!in_lto_p)
1708 {
1709 /* Generate coverage variables and constructors. */
1710 coverage_finish ();
1711
1712 /* Process new functions added. */
1713 set_cfun (NULL);
1714 current_function_decl = NULL;
1715 cgraph_process_new_functions ();
7bfefa9d 1716
c9036234 1717 execute_ipa_summary_passes
1718 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
8867b500 1719 }
23433d72 1720
1721 /* Some targets need to handle LTO assembler output specially. */
1722 if (flag_generate_lto)
1723 targetm.asm_out.lto_start ();
1724
7bfefa9d 1725 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1726
1727 if (!in_lto_p)
1728 ipa_write_summaries ();
1729
23433d72 1730 if (flag_generate_lto)
1731 targetm.asm_out.lto_end ();
1732
8867b500 1733 if (!flag_ltrans)
1734 execute_ipa_pass_list (all_regular_ipa_passes);
c9036234 1735 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
9ed5b1f5 1736
77fce4cd 1737 bitmap_obstack_release (NULL);
1738}
1739
34e5cced 1740
ae01b312 1741/* Perform simple optimizations based on callgraph. */
1742
7bfefa9d 1743void
d9d9733a 1744cgraph_optimize (void)
ae01b312 1745{
852f689e 1746 if (seen_error ())
cb2b5570 1747 return;
1748
b0cdf642 1749#ifdef ENABLE_CHECKING
1750 verify_cgraph ();
1751#endif
a861fe52 1752
c1dcd13c 1753 /* Frontend may output common variables after the unit has been finalized.
1754 It is safe to deal with them here as they are always zero initialized. */
1d416bd7 1755 varpool_analyze_pending_decls ();
e9f08e82 1756
f79b6507 1757 timevar_push (TV_CGRAPHOPT);
51949610 1758 if (pre_ipa_mem_report)
1759 {
1760 fprintf (stderr, "Memory consumption before IPA\n");
1761 dump_memory_report (false);
1762 }
d7c6d889 1763 if (!quiet_flag)
cd6bca02 1764 fprintf (stderr, "Performing interprocedural optimizations\n");
523c1122 1765 cgraph_state = CGRAPH_STATE_IPA;
c04e3894 1766
be4d0974 1767 /* Don't run the IPA passes if there was any error or sorry messages. */
852f689e 1768 if (!seen_error ())
be4d0974 1769 ipa_passes ();
1770
34e5cced 1771 /* Do nothing else if any IPA pass found errors. */
852f689e 1772 if (seen_error ())
021c1c18 1773 {
1774 timevar_pop (TV_CGRAPHOPT);
1775 return;
1776 }
34e5cced 1777
e1be32b8 1778 /* This pass remove bodies of extern inline functions we never inlined.
1779 Do this later so other IPA passes see what is really going on. */
1780 cgraph_remove_unreachable_nodes (false, dump_file);
80a85d8a 1781 cgraph_global_info_ready = true;
f79b6507 1782 if (cgraph_dump_file)
1783 {
e4200070 1784 fprintf (cgraph_dump_file, "Optimized ");
f79b6507 1785 dump_cgraph (cgraph_dump_file);
c1dcd13c 1786 dump_varpool (cgraph_dump_file);
f79b6507 1787 }
51949610 1788 if (post_ipa_mem_report)
1789 {
defa2fa6 1790 fprintf (stderr, "Memory consumption after IPA\n");
51949610 1791 dump_memory_report (false);
1792 }
f79b6507 1793 timevar_pop (TV_CGRAPHOPT);
ae01b312 1794
d7c6d889 1795 /* Output everything. */
47306a5d 1796 (*debug_hooks->assembly_start) ();
e4200070 1797 if (!quiet_flag)
1798 fprintf (stderr, "Assembling functions:\n");
b0cdf642 1799#ifdef ENABLE_CHECKING
1800 verify_cgraph ();
1801#endif
56af936e 1802
ccf4ab6b 1803 cgraph_materialize_all_clones ();
acc70efa 1804 cgraph_mark_functions_to_output ();
c1dcd13c 1805
523c1122 1806 cgraph_state = CGRAPH_STATE_EXPANSION;
56af936e 1807 if (!flag_toplevel_reorder)
1808 cgraph_output_in_order ();
1809 else
1810 {
1811 cgraph_output_pending_asms ();
1812
1813 cgraph_expand_all_functions ();
1d416bd7 1814 varpool_remove_unreferenced_decls ();
56af936e 1815
1d416bd7 1816 varpool_assemble_pending_decls ();
56af936e 1817 }
523c1122 1818 cgraph_process_new_functions ();
1819 cgraph_state = CGRAPH_STATE_FINISHED;
c1dcd13c 1820
f79b6507 1821 if (cgraph_dump_file)
1822 {
e4200070 1823 fprintf (cgraph_dump_file, "\nFinal ");
f79b6507 1824 dump_cgraph (cgraph_dump_file);
1825 }
b0cdf642 1826#ifdef ENABLE_CHECKING
1827 verify_cgraph ();
4ee9c684 1828 /* Double check that all inline clones are gone and that all
1829 function bodies have been released from memory. */
852f689e 1830 if (!seen_error ())
4ee9c684 1831 {
1832 struct cgraph_node *node;
1833 bool error_found = false;
1834
1835 for (node = cgraph_nodes; node; node = node->next)
1836 if (node->analyzed
1837 && (node->global.inlined_to
1a1a827a 1838 || gimple_has_body_p (node->decl)))
4ee9c684 1839 {
1840 error_found = true;
1841 dump_cgraph_node (stderr, node);
a0c938f0 1842 }
4ee9c684 1843 if (error_found)
c04e3894 1844 internal_error ("nodes with unreleased memory found");
4ee9c684 1845 }
b0cdf642 1846#endif
ae01b312 1847}
34e5cced 1848
121f3051 1849void
1850init_cgraph (void)
1851{
01ec0a6c 1852 if (!cgraph_dump_file)
1853 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
121f3051 1854}
b5d36404 1855
a0c938f0 1856/* The edges representing the callers of the NEW_VERSION node were
b5d36404 1857 fixed by cgraph_function_versioning (), now the call_expr in their
1858 respective tree code should be updated to call the NEW_VERSION. */
1859
1860static void
1861update_call_expr (struct cgraph_node *new_version)
1862{
1863 struct cgraph_edge *e;
1864
1865 gcc_assert (new_version);
75a70cf9 1866
1867 /* Update the call expr on the edges to call the new version. */
b5d36404 1868 for (e = new_version->callers; e; e = e->next_caller)
e03a95e7 1869 {
1870 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
1871 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
e38def9c 1872 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
e03a95e7 1873 }
b5d36404 1874}
1875
1876
1877/* Create a new cgraph node which is the new version of
1878 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1879 edges which should be redirected to point to
1880 NEW_VERSION. ALL the callees edges of OLD_VERSION
1881 are cloned to the new version node. Return the new
b06ab5fa 1882 version node.
1883
1884 If non-NULL BLOCK_TO_COPY determine what basic blocks
1885 was copied to prevent duplications of calls that are dead
1886 in the clone. */
b5d36404 1887
1888static struct cgraph_node *
1889cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
4460a647 1890 tree new_decl,
b06ab5fa 1891 VEC(cgraph_edge_p,heap) *redirect_callers,
1892 bitmap bbs_to_copy)
1893 {
b5d36404 1894 struct cgraph_node *new_version;
32936803 1895 struct cgraph_edge *e;
b5d36404 1896 unsigned i;
1897
1898 gcc_assert (old_version);
a0c938f0 1899
b5d36404 1900 new_version = cgraph_node (new_decl);
1901
1902 new_version->analyzed = true;
1903 new_version->local = old_version->local;
a70a5e2c 1904 new_version->local.externally_visible = false;
1905 new_version->local.local = true;
1906 new_version->local.vtable_method = false;
b5d36404 1907 new_version->global = old_version->global;
a93f1c3b 1908 new_version->rtl = old_version->rtl;
b5d36404 1909 new_version->reachable = true;
1910 new_version->count = old_version->count;
1911
a70a5e2c 1912 for (e = old_version->callees; e; e=e->next_callee)
b06ab5fa 1913 if (!bbs_to_copy
1914 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
1915 cgraph_clone_edge (e, new_version, e->call_stmt,
1916 e->lto_stmt_uid, REG_BR_PROB_BASE,
1917 CGRAPH_FREQ_BASE,
1918 e->loop_nest, true);
a70a5e2c 1919 for (e = old_version->indirect_calls; e; e=e->next_callee)
b06ab5fa 1920 if (!bbs_to_copy
1921 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
1922 cgraph_clone_edge (e, new_version, e->call_stmt,
1923 e->lto_stmt_uid, REG_BR_PROB_BASE,
1924 CGRAPH_FREQ_BASE,
1925 e->loop_nest, true);
48148244 1926 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
4460a647 1927 {
1928 /* Redirect calls to the old version node to point to its new
1929 version. */
1930 cgraph_redirect_edge_callee (e, new_version);
1931 }
b5d36404 1932
1933 return new_version;
1934 }
1935
1936 /* Perform function versioning.
a0c938f0 1937 Function versioning includes copying of the tree and
b5d36404 1938 a callgraph update (creating a new cgraph node and updating
1939 its callees and callers).
1940
1941 REDIRECT_CALLERS varray includes the edges to be redirected
1942 to the new version.
1943
1944 TREE_MAP is a mapping of tree nodes we want to replace with
1945 new ones (according to results of prior analysis).
1946 OLD_VERSION_NODE is the node that is versioned.
48e1416a 1947 It returns the new version's cgraph node.
b06ab5fa 1948 If non-NULL ARGS_TO_SKIP determine function parameters to remove
1949 from new version.
1950 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
1951 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
b5d36404 1952
1953struct cgraph_node *
1954cgraph_function_versioning (struct cgraph_node *old_version_node,
4460a647 1955 VEC(cgraph_edge_p,heap) *redirect_callers,
ccf4ab6b 1956 VEC (ipa_replace_map_p,gc)* tree_map,
a70a5e2c 1957 bitmap args_to_skip,
b06ab5fa 1958 bitmap bbs_to_copy,
1959 basic_block new_entry_block,
a70a5e2c 1960 const char *clone_name)
b5d36404 1961{
1962 tree old_decl = old_version_node->decl;
1963 struct cgraph_node *new_version_node = NULL;
1964 tree new_decl;
1965
1966 if (!tree_versionable_function_p (old_decl))
1967 return NULL;
1968
1969 /* Make a new FUNCTION_DECL tree node for the
1970 new version. */
5afe38fe 1971 if (!args_to_skip)
1972 new_decl = copy_node (old_decl);
1973 else
1974 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
b5d36404 1975
df0b8dfb 1976 /* Generate a new name for the new version. */
1977 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
1978 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
1979 SET_DECL_RTL (new_decl, NULL);
1980
b5d36404 1981 /* Create the new version's call-graph node.
1982 and update the edges of the new node. */
1983 new_version_node =
1984 cgraph_copy_node_for_versioning (old_version_node, new_decl,
b06ab5fa 1985 redirect_callers, bbs_to_copy);
b5d36404 1986
1987 /* Copy the OLD_VERSION_NODE function tree to the new version. */
b06ab5fa 1988 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
1989 bbs_to_copy, new_entry_block);
b5d36404 1990
a0c938f0 1991 /* Update the new version's properties.
e03a95e7 1992 Make The new version visible only within this translation unit. Make sure
1993 that is not weak also.
a0c938f0 1994 ??? We cannot use COMDAT linkage because there is no
b5d36404 1995 ABI support for this. */
6137cc9f 1996 cgraph_make_decl_local (new_version_node->decl);
f014e39d 1997 DECL_VIRTUAL_P (new_version_node->decl) = 0;
b5d36404 1998 new_version_node->local.externally_visible = 0;
1999 new_version_node->local.local = 1;
2000 new_version_node->lowered = true;
f014e39d 2001
e03a95e7 2002 /* Update the call_expr on the edges to call the new version node. */
2003 update_call_expr (new_version_node);
48e1416a 2004
50828ed8 2005 cgraph_call_function_insertion_hooks (new_version_node);
b5d36404 2006 return new_version_node;
2007}
469679ab 2008
2009/* Produce separate function body for inline clones so the offline copy can be
2010 modified without affecting them. */
2011struct cgraph_node *
2012save_inline_function_body (struct cgraph_node *node)
2013{
ccf4ab6b 2014 struct cgraph_node *first_clone, *n;
469679ab 2015
2016 gcc_assert (node == cgraph_node (node->decl));
2017
2018 cgraph_lower_function (node);
2019
ccf4ab6b 2020 first_clone = node->clones;
469679ab 2021
2022 first_clone->decl = copy_node (node->decl);
469679ab 2023 cgraph_insert_node_to_hashtable (first_clone);
2024 gcc_assert (first_clone == cgraph_node (first_clone->decl));
ccf4ab6b 2025 if (first_clone->next_sibling_clone)
2026 {
2027 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2028 n->clone_of = first_clone;
2029 n->clone_of = first_clone;
2030 n->next_sibling_clone = first_clone->clones;
2031 if (first_clone->clones)
2032 first_clone->clones->prev_sibling_clone = n;
2033 first_clone->clones = first_clone->next_sibling_clone;
2034 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2035 first_clone->next_sibling_clone = NULL;
2036 gcc_assert (!first_clone->prev_sibling_clone);
2037 }
2038 first_clone->clone_of = NULL;
2039 node->clones = NULL;
2040
2041 if (first_clone->clones)
2042 for (n = first_clone->clones; n != first_clone;)
2043 {
2044 gcc_assert (n->decl == node->decl);
2045 n->decl = first_clone->decl;
2046 if (n->clones)
2047 n = n->clones;
2048 else if (n->next_sibling_clone)
2049 n = n->next_sibling_clone;
2050 else
2051 {
2052 while (n != first_clone && !n->next_sibling_clone)
2053 n = n->clone_of;
2054 if (n != first_clone)
2055 n = n->next_sibling_clone;
2056 }
2057 }
469679ab 2058
2059 /* Copy the OLD_VERSION_NODE function tree to the new version. */
b06ab5fa 2060 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL,
2061 NULL, NULL);
469679ab 2062
2063 DECL_EXTERNAL (first_clone->decl) = 0;
ecd88073 2064 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
469679ab 2065 TREE_PUBLIC (first_clone->decl) = 0;
2066 DECL_COMDAT (first_clone->decl) = 0;
7fe9b425 2067 VEC_free (ipa_opt_pass, heap,
6d1cc52c 2068 first_clone->ipa_transforms_to_apply);
2069 first_clone->ipa_transforms_to_apply = NULL;
469679ab 2070
469679ab 2071#ifdef ENABLE_CHECKING
2072 verify_cgraph_node (first_clone);
2073#endif
2074 return first_clone;
2075}
a861fe52 2076
ccf4ab6b 2077/* Given virtual clone, turn it into actual clone. */
2078static void
2079cgraph_materialize_clone (struct cgraph_node *node)
2080{
2081 bitmap_obstack_initialize (NULL);
e748b31d 2082#ifdef ENABLE_CHECKING
2083 node->former_clone_of = node->clone_of->decl;
2084 if (node->clone_of->former_clone_of)
2085 node->former_clone_of = node->clone_of->former_clone_of;
2086#endif
ccf4ab6b 2087 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2088 tree_function_versioning (node->clone_of->decl, node->decl,
2089 node->clone.tree_map, true,
b06ab5fa 2090 node->clone.args_to_skip, NULL, NULL);
e20422ea 2091 if (cgraph_dump_file)
2092 {
2093 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2094 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2095 }
ccf4ab6b 2096
2097 /* Function is no longer clone. */
2098 if (node->next_sibling_clone)
2099 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2100 if (node->prev_sibling_clone)
2101 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2102 else
2103 node->clone_of->clones = node->next_sibling_clone;
2104 node->next_sibling_clone = NULL;
2105 node->prev_sibling_clone = NULL;
6d1cc52c 2106 if (!node->clone_of->analyzed && !node->clone_of->clones)
7d6a1ec8 2107 {
2108 cgraph_release_function_body (node->clone_of);
2109 cgraph_node_remove_callees (node->clone_of);
2110 ipa_remove_all_references (&node->clone_of->ref_list);
2111 }
ccf4ab6b 2112 node->clone_of = NULL;
2113 bitmap_obstack_release (NULL);
2114}
2115
c596d830 2116/* If necessary, change the function declaration in the call statement
2117 associated with E so that it corresponds to the edge callee. */
2118
2119gimple
2120cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2121{
2122 tree decl = gimple_call_fndecl (e->call_stmt);
2123 gimple new_stmt;
1f449108 2124#ifdef ENABLE_CHECKING
2125 struct cgraph_node *node;
2126#endif
c596d830 2127
1caef38b 2128 if (e->indirect_unknown_callee
2129 || decl == e->callee->decl
c596d830 2130 /* Don't update call from same body alias to the real function. */
1caef38b 2131 || (decl && cgraph_get_node (decl) == cgraph_get_node (e->callee->decl)))
c596d830 2132 return e->call_stmt;
2133
1f449108 2134#ifdef ENABLE_CHECKING
1caef38b 2135 if (decl)
2136 {
2137 node = cgraph_get_node (decl);
2138 gcc_assert (!node || !node->clone.combined_args_to_skip);
2139 }
1f449108 2140#endif
e748b31d 2141
c596d830 2142 if (cgraph_dump_file)
2143 {
2144 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2145 cgraph_node_name (e->caller), e->caller->uid,
2146 cgraph_node_name (e->callee), e->callee->uid);
2147 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
e748b31d 2148 if (e->callee->clone.combined_args_to_skip)
91aba934 2149 {
2150 fprintf (cgraph_dump_file, " combined args to skip: ");
2151 dump_bitmap (cgraph_dump_file,
2152 e->callee->clone.combined_args_to_skip);
e748b31d 2153 }
c596d830 2154 }
2155
2156 if (e->callee->clone.combined_args_to_skip)
91aba934 2157 {
2158 gimple_stmt_iterator gsi;
2159
2160 new_stmt
2161 = gimple_call_copy_skip_args (e->call_stmt,
2162 e->callee->clone.combined_args_to_skip);
75c7f5a5 2163 gimple_call_set_fndecl (new_stmt, e->callee->decl);
91aba934 2164
2165 if (gimple_vdef (new_stmt)
2166 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2167 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2168
2169 gsi = gsi_for_stmt (e->call_stmt);
2170 gsi_replace (&gsi, new_stmt, true);
2171 }
c596d830 2172 else
75c7f5a5 2173 {
2174 new_stmt = e->call_stmt;
2175 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2176 update_stmt (new_stmt);
2177 }
c596d830 2178
c596d830 2179 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2180
2181 if (cgraph_dump_file)
2182 {
2183 fprintf (cgraph_dump_file, " updated to:");
2184 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2185 }
2186 return new_stmt;
2187}
2188
ccf4ab6b 2189/* Once all functions from compilation unit are in memory, produce all clones
c596d830 2190 and update all calls. We might also do this on demand if we don't want to
2191 bring all functions to memory prior compilation, but current WHOPR
2192 implementation does that and it is is bit easier to keep everything right in
2193 this order. */
ccf4ab6b 2194void
2195cgraph_materialize_all_clones (void)
2196{
2197 struct cgraph_node *node;
2198 bool stabilized = false;
2199
2200 if (cgraph_dump_file)
2201 fprintf (cgraph_dump_file, "Materializing clones\n");
2202#ifdef ENABLE_CHECKING
2203 verify_cgraph ();
2204#endif
2205
2206 /* We can also do topological order, but number of iterations should be
2207 bounded by number of IPA passes since single IPA pass is probably not
2208 going to create clones of clones it created itself. */
2209 while (!stabilized)
2210 {
2211 stabilized = true;
2212 for (node = cgraph_nodes; node; node = node->next)
2213 {
2214 if (node->clone_of && node->decl != node->clone_of->decl
2215 && !gimple_has_body_p (node->decl))
2216 {
2217 if (gimple_has_body_p (node->clone_of->decl))
2218 {
2219 if (cgraph_dump_file)
e20422ea 2220 {
2221 fprintf (cgraph_dump_file, "clonning %s to %s\n",
2222 cgraph_node_name (node->clone_of),
2223 cgraph_node_name (node));
2224 if (node->clone.tree_map)
2225 {
2226 unsigned int i;
2227 fprintf (cgraph_dump_file, " replace map: ");
2228 for (i = 0; i < VEC_length (ipa_replace_map_p,
2229 node->clone.tree_map);
2230 i++)
2231 {
2232 struct ipa_replace_map *replace_info;
2233 replace_info = VEC_index (ipa_replace_map_p,
2234 node->clone.tree_map,
2235 i);
2236 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2237 fprintf (cgraph_dump_file, " -> ");
2238 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2239 fprintf (cgraph_dump_file, "%s%s;",
2240 replace_info->replace_p ? "(replace)":"",
2241 replace_info->ref_p ? "(ref)":"");
2242 }
2243 fprintf (cgraph_dump_file, "\n");
2244 }
2245 if (node->clone.args_to_skip)
2246 {
2247 fprintf (cgraph_dump_file, " args_to_skip: ");
2248 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2249 }
2250 if (node->clone.args_to_skip)
2251 {
2252 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2253 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2254 }
2255 }
ccf4ab6b 2256 cgraph_materialize_clone (node);
a510bd8d 2257 stabilized = false;
ccf4ab6b 2258 }
ccf4ab6b 2259 }
2260 }
2261 }
ee3f5fc0 2262 for (node = cgraph_nodes; node; node = node->next)
2263 if (!node->analyzed && node->callees)
2264 cgraph_node_remove_callees (node);
c596d830 2265 if (cgraph_dump_file)
2266 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
947781ac 2267#ifdef ENABLE_CHECKING
2268 verify_cgraph ();
2269#endif
ccf4ab6b 2270 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2271}
2272
a861fe52 2273#include "gt-cgraphunit.h"